]> git.saurik.com Git - apple/javascriptcore.git/blob - dfg/DFGSpeculativeJIT64.cpp
JavaScriptCore-1218.0.1.tar.gz
[apple/javascriptcore.git] / dfg / DFGSpeculativeJIT64.cpp
1 /*
2 * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "DFGSpeculativeJIT.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "Arguments.h"
32 #include "ArrayPrototype.h"
33 #include "DFGCallArrayAllocatorSlowPathGenerator.h"
34 #include "DFGSlowPathGenerator.h"
35 #include "JSCJSValueInlines.h"
36 #include "ObjectPrototype.h"
37
38 namespace JSC { namespace DFG {
39
40 #if USE(JSVALUE64)
41
42 GPRReg SpeculativeJIT::fillInteger(Edge edge, DataFormat& returnFormat)
43 {
44 ASSERT(!needsTypeCheck(edge, SpecInt32));
45
46 VirtualRegister virtualRegister = edge->virtualRegister();
47 GenerationInfo& info = m_generationInfo[virtualRegister];
48
49 if (info.registerFormat() == DataFormatNone) {
50 GPRReg gpr = allocate();
51
52 if (edge->hasConstant()) {
53 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
54 if (isInt32Constant(edge.node())) {
55 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr);
56 info.fillInteger(*m_stream, gpr);
57 returnFormat = DataFormatInteger;
58 return gpr;
59 }
60 if (isNumberConstant(edge.node())) {
61 JSValue jsValue = jsNumber(valueOfNumberConstant(edge.node()));
62 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr);
63 } else {
64 ASSERT(isJSConstant(edge.node()));
65 JSValue jsValue = valueOfJSConstant(edge.node());
66 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
67 }
68 } else if (info.spillFormat() == DataFormatInteger) {
69 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
70 m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr);
71 // Tag it, since fillInteger() is used when we want a boxed integer.
72 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
73 } else {
74 RELEASE_ASSERT(info.spillFormat() == DataFormatJS || info.spillFormat() == DataFormatJSInteger);
75 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
76 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
77 }
78
79 // Since we statically know that we're filling an integer, and values
80 // in the JSStack are boxed, this must be DataFormatJSInteger.
81 // We will check this with a jitAssert below.
82 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
83 unlock(gpr);
84 }
85
86 switch (info.registerFormat()) {
87 case DataFormatNone:
88 // Should have filled, above.
89 case DataFormatJSDouble:
90 case DataFormatDouble:
91 case DataFormatJS:
92 case DataFormatCell:
93 case DataFormatJSCell:
94 case DataFormatBoolean:
95 case DataFormatJSBoolean:
96 case DataFormatStorage:
97 // Should only be calling this function if we know this operand to be integer.
98 RELEASE_ASSERT_NOT_REACHED();
99
100 case DataFormatJSInteger: {
101 GPRReg gpr = info.gpr();
102 m_gprs.lock(gpr);
103 m_jit.jitAssertIsJSInt32(gpr);
104 returnFormat = DataFormatJSInteger;
105 return gpr;
106 }
107
108 case DataFormatInteger: {
109 GPRReg gpr = info.gpr();
110 m_gprs.lock(gpr);
111 m_jit.jitAssertIsInt32(gpr);
112 returnFormat = DataFormatInteger;
113 return gpr;
114 }
115
116 default:
117 RELEASE_ASSERT_NOT_REACHED();
118 return InvalidGPRReg;
119 }
120 }
121
122 GPRReg SpeculativeJIT::fillJSValue(Edge edge)
123 {
124 VirtualRegister virtualRegister = edge->virtualRegister();
125 GenerationInfo& info = m_generationInfo[virtualRegister];
126
127 switch (info.registerFormat()) {
128 case DataFormatNone: {
129 GPRReg gpr = allocate();
130
131 if (edge->hasConstant()) {
132 if (isInt32Constant(edge.node())) {
133 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
134 JSValue jsValue = jsNumber(valueOfInt32Constant(edge.node()));
135 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr);
136 } else if (isNumberConstant(edge.node())) {
137 info.fillJSValue(*m_stream, gpr, DataFormatJSDouble);
138 JSValue jsValue(JSValue::EncodeAsDouble, valueOfNumberConstant(edge.node()));
139 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr);
140 } else {
141 ASSERT(isJSConstant(edge.node()));
142 JSValue jsValue = valueOfJSConstant(edge.node());
143 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
144 info.fillJSValue(*m_stream, gpr, DataFormatJS);
145 }
146
147 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
148 } else {
149 DataFormat spillFormat = info.spillFormat();
150 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
151 if (spillFormat == DataFormatInteger) {
152 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
153 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
154 spillFormat = DataFormatJSInteger;
155 } else {
156 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
157 if (spillFormat == DataFormatDouble) {
158 // Need to box the double, since we want a JSValue.
159 m_jit.sub64(GPRInfo::tagTypeNumberRegister, gpr);
160 spillFormat = DataFormatJSDouble;
161 } else
162 RELEASE_ASSERT(spillFormat & DataFormatJS);
163 }
164 info.fillJSValue(*m_stream, gpr, spillFormat);
165 }
166 return gpr;
167 }
168
169 case DataFormatInteger: {
170 GPRReg gpr = info.gpr();
171 // If the register has already been locked we need to take a copy.
172 // If not, we'll zero extend in place, so mark on the info that this is now type DataFormatInteger, not DataFormatJSInteger.
173 if (m_gprs.isLocked(gpr)) {
174 GPRReg result = allocate();
175 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr, result);
176 return result;
177 }
178 m_gprs.lock(gpr);
179 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
180 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
181 return gpr;
182 }
183
184 case DataFormatDouble: {
185 FPRReg fpr = info.fpr();
186 GPRReg gpr = boxDouble(fpr);
187
188 // Update all info
189 info.fillJSValue(*m_stream, gpr, DataFormatJSDouble);
190 m_fprs.release(fpr);
191 m_gprs.retain(gpr, virtualRegister, SpillOrderJS);
192
193 return gpr;
194 }
195
196 case DataFormatCell:
197 // No retag required on JSVALUE64!
198 case DataFormatJS:
199 case DataFormatJSInteger:
200 case DataFormatJSDouble:
201 case DataFormatJSCell:
202 case DataFormatJSBoolean: {
203 GPRReg gpr = info.gpr();
204 m_gprs.lock(gpr);
205 return gpr;
206 }
207
208 case DataFormatBoolean:
209 case DataFormatStorage:
210 // this type currently never occurs
211 RELEASE_ASSERT_NOT_REACHED();
212
213 default:
214 RELEASE_ASSERT_NOT_REACHED();
215 return InvalidGPRReg;
216 }
217 }
218
219 void SpeculativeJIT::nonSpeculativeUInt32ToNumber(Node* node)
220 {
221 IntegerOperand op1(this, node->child1());
222 FPRTemporary boxer(this);
223 GPRTemporary result(this, op1);
224
225 JITCompiler::Jump positive = m_jit.branch32(MacroAssembler::GreaterThanOrEqual, op1.gpr(), TrustedImm32(0));
226
227 m_jit.convertInt32ToDouble(op1.gpr(), boxer.fpr());
228 m_jit.addDouble(JITCompiler::AbsoluteAddress(&AssemblyHelpers::twoToThe32), boxer.fpr());
229
230 boxDouble(boxer.fpr(), result.gpr());
231
232 JITCompiler::Jump done = m_jit.jump();
233
234 positive.link(&m_jit);
235
236 m_jit.or64(GPRInfo::tagTypeNumberRegister, op1.gpr(), result.gpr());
237
238 done.link(&m_jit);
239
240 jsValueResult(result.gpr(), m_currentNode);
241 }
242
243 void SpeculativeJIT::cachedGetById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg resultGPR, unsigned identifierNumber, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
244 {
245 JITCompiler::DataLabelPtr structureToCompare;
246 JITCompiler::PatchableJump structureCheck = m_jit.patchableBranchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(unusedPointer)));
247
248 JITCompiler::ConvertibleLoadLabel propertyStorageLoad =
249 m_jit.convertibleLoadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
250 JITCompiler::DataLabelCompact loadWithPatch = m_jit.load64WithCompactAddressOffsetPatch(JITCompiler::Address(resultGPR, 0), resultGPR);
251
252 JITCompiler::Label doneLabel = m_jit.label();
253
254 OwnPtr<SlowPathGenerator> slowPath;
255 if (!slowPathTarget.isSet()) {
256 slowPath = slowPathCall(
257 structureCheck.m_jump, this, operationGetByIdOptimize, resultGPR, baseGPR,
258 identifier(identifierNumber), spillMode);
259 } else {
260 JITCompiler::JumpList slowCases;
261 slowCases.append(structureCheck.m_jump);
262 slowCases.append(slowPathTarget);
263 slowPath = slowPathCall(
264 slowCases, this, operationGetByIdOptimize, resultGPR, baseGPR,
265 identifier(identifierNumber), spillMode);
266 }
267 m_jit.addPropertyAccess(
268 PropertyAccessRecord(
269 codeOrigin, structureToCompare, structureCheck, propertyStorageLoad, loadWithPatch,
270 slowPath.get(), doneLabel, safeCast<int8_t>(baseGPR), safeCast<int8_t>(resultGPR),
271 usedRegisters(),
272 spillMode == NeedToSpill ? PropertyAccessRecord::RegistersInUse : PropertyAccessRecord::RegistersFlushed));
273 addSlowPathGenerator(slowPath.release());
274 }
275
276 void SpeculativeJIT::cachedPutById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg valueGPR, Edge valueUse, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget)
277 {
278
279 JITCompiler::DataLabelPtr structureToCompare;
280 JITCompiler::PatchableJump structureCheck = m_jit.patchableBranchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(unusedPointer)));
281
282 writeBarrier(baseGPR, valueGPR, valueUse, WriteBarrierForPropertyAccess, scratchGPR);
283
284 JITCompiler::ConvertibleLoadLabel propertyStorageLoad =
285 m_jit.convertibleLoadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
286 JITCompiler::DataLabel32 storeWithPatch = m_jit.store64WithAddressOffsetPatch(valueGPR, JITCompiler::Address(scratchGPR, 0));
287
288 JITCompiler::Label doneLabel = m_jit.label();
289
290 V_DFGOperation_EJCI optimizedCall;
291 if (m_jit.strictModeFor(m_currentNode->codeOrigin)) {
292 if (putKind == Direct)
293 optimizedCall = operationPutByIdDirectStrictOptimize;
294 else
295 optimizedCall = operationPutByIdStrictOptimize;
296 } else {
297 if (putKind == Direct)
298 optimizedCall = operationPutByIdDirectNonStrictOptimize;
299 else
300 optimizedCall = operationPutByIdNonStrictOptimize;
301 }
302 OwnPtr<SlowPathGenerator> slowPath;
303 if (!slowPathTarget.isSet()) {
304 slowPath = slowPathCall(
305 structureCheck.m_jump, this, optimizedCall, NoResult, valueGPR, baseGPR,
306 identifier(identifierNumber));
307 } else {
308 JITCompiler::JumpList slowCases;
309 slowCases.append(structureCheck.m_jump);
310 slowCases.append(slowPathTarget);
311 slowPath = slowPathCall(
312 slowCases, this, optimizedCall, NoResult, valueGPR, baseGPR,
313 identifier(identifierNumber));
314 }
315 RegisterSet currentlyUsedRegisters = usedRegisters();
316 currentlyUsedRegisters.clear(scratchGPR);
317 ASSERT(currentlyUsedRegisters.get(baseGPR));
318 ASSERT(currentlyUsedRegisters.get(valueGPR));
319 m_jit.addPropertyAccess(
320 PropertyAccessRecord(
321 codeOrigin, structureToCompare, structureCheck, propertyStorageLoad,
322 JITCompiler::DataLabelCompact(storeWithPatch.label()), slowPath.get(), doneLabel,
323 safeCast<int8_t>(baseGPR), safeCast<int8_t>(valueGPR), currentlyUsedRegisters));
324 addSlowPathGenerator(slowPath.release());
325 }
326
327 void SpeculativeJIT::nonSpeculativeNonPeepholeCompareNull(Edge operand, bool invert)
328 {
329 JSValueOperand arg(this, operand);
330 GPRReg argGPR = arg.gpr();
331
332 GPRTemporary result(this, arg);
333 GPRReg resultGPR = result.gpr();
334
335 JITCompiler::Jump notCell;
336
337 JITCompiler::Jump notMasqueradesAsUndefined;
338 if (m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
339 if (!isKnownCell(operand.node()))
340 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
341
342 m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
343 m_jit.move(invert ? TrustedImm32(1) : TrustedImm32(0), resultGPR);
344 notMasqueradesAsUndefined = m_jit.jump();
345 } else {
346 GPRTemporary localGlobalObject(this);
347 GPRTemporary remoteGlobalObject(this);
348
349 if (!isKnownCell(operand.node()))
350 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
351
352 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR);
353 JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::NonZero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined));
354
355 m_jit.move(invert ? TrustedImm32(1) : TrustedImm32(0), resultGPR);
356 notMasqueradesAsUndefined = m_jit.jump();
357
358 isMasqueradesAsUndefined.link(&m_jit);
359 GPRReg localGlobalObjectGPR = localGlobalObject.gpr();
360 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr();
361 m_jit.move(JITCompiler::TrustedImmPtr(m_jit.graph().globalObjectFor(operand->codeOrigin)), localGlobalObjectGPR);
362 m_jit.loadPtr(JITCompiler::Address(resultGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR);
363 m_jit.comparePtr(invert ? JITCompiler::NotEqual : JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, resultGPR);
364 }
365
366 if (!isKnownCell(operand.node())) {
367 JITCompiler::Jump done = m_jit.jump();
368
369 notCell.link(&m_jit);
370
371 m_jit.move(argGPR, resultGPR);
372 m_jit.and64(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR);
373 m_jit.compare64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm32(ValueNull), resultGPR);
374
375 done.link(&m_jit);
376 }
377
378 notMasqueradesAsUndefined.link(&m_jit);
379
380 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
381 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
382 }
383
384 void SpeculativeJIT::nonSpeculativePeepholeBranchNull(Edge operand, Node* branchNode, bool invert)
385 {
386 BlockIndex taken = branchNode->takenBlockIndex();
387 BlockIndex notTaken = branchNode->notTakenBlockIndex();
388
389 if (taken == nextBlock()) {
390 invert = !invert;
391 BlockIndex tmp = taken;
392 taken = notTaken;
393 notTaken = tmp;
394 }
395
396 JSValueOperand arg(this, operand);
397 GPRReg argGPR = arg.gpr();
398
399 GPRTemporary result(this, arg);
400 GPRReg resultGPR = result.gpr();
401
402 JITCompiler::Jump notCell;
403
404 if (m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
405 if (!isKnownCell(operand.node()))
406 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
407
408 m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
409 jump(invert ? taken : notTaken, ForceJump);
410 } else {
411 GPRTemporary localGlobalObject(this);
412 GPRTemporary remoteGlobalObject(this);
413
414 if (!isKnownCell(operand.node()))
415 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
416
417 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR);
418 branchTest8(JITCompiler::Zero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined), invert ? taken : notTaken);
419
420 GPRReg localGlobalObjectGPR = localGlobalObject.gpr();
421 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr();
422 m_jit.move(TrustedImmPtr(m_jit.graph().globalObjectFor(operand->codeOrigin)), localGlobalObjectGPR);
423 m_jit.loadPtr(JITCompiler::Address(resultGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR);
424 branchPtr(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, invert ? notTaken : taken);
425 }
426
427 if (!isKnownCell(operand.node())) {
428 jump(notTaken, ForceJump);
429
430 notCell.link(&m_jit);
431
432 m_jit.move(argGPR, resultGPR);
433 m_jit.and64(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR);
434 branch64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm64(ValueNull), taken);
435 }
436
437 jump(notTaken);
438 }
439
440 bool SpeculativeJIT::nonSpeculativeCompareNull(Node* node, Edge operand, bool invert)
441 {
442 unsigned branchIndexInBlock = detectPeepHoleBranch();
443 if (branchIndexInBlock != UINT_MAX) {
444 Node* branchNode = m_jit.graph().m_blocks[m_block]->at(branchIndexInBlock);
445
446 RELEASE_ASSERT(node->adjustedRefCount() == 1);
447
448 nonSpeculativePeepholeBranchNull(operand, branchNode, invert);
449
450 use(node->child1());
451 use(node->child2());
452 m_indexInBlock = branchIndexInBlock;
453 m_currentNode = branchNode;
454
455 return true;
456 }
457
458 nonSpeculativeNonPeepholeCompareNull(operand, invert);
459
460 return false;
461 }
462
463 void SpeculativeJIT::nonSpeculativePeepholeBranch(Node* node, Node* branchNode, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
464 {
465 BlockIndex taken = branchNode->takenBlockIndex();
466 BlockIndex notTaken = branchNode->notTakenBlockIndex();
467
468 JITCompiler::ResultCondition callResultCondition = JITCompiler::NonZero;
469
470 // The branch instruction will branch to the taken block.
471 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through.
472 if (taken == nextBlock()) {
473 cond = JITCompiler::invert(cond);
474 callResultCondition = JITCompiler::Zero;
475 BlockIndex tmp = taken;
476 taken = notTaken;
477 notTaken = tmp;
478 }
479
480 JSValueOperand arg1(this, node->child1());
481 JSValueOperand arg2(this, node->child2());
482 GPRReg arg1GPR = arg1.gpr();
483 GPRReg arg2GPR = arg2.gpr();
484
485 JITCompiler::JumpList slowPath;
486
487 if (isKnownNotInteger(node->child1().node()) || isKnownNotInteger(node->child2().node())) {
488 GPRResult result(this);
489 GPRReg resultGPR = result.gpr();
490
491 arg1.use();
492 arg2.use();
493
494 flushRegisters();
495 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
496
497 branchTest32(callResultCondition, resultGPR, taken);
498 } else {
499 GPRTemporary result(this, arg2);
500 GPRReg resultGPR = result.gpr();
501
502 arg1.use();
503 arg2.use();
504
505 if (!isKnownInteger(node->child1().node()))
506 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister));
507 if (!isKnownInteger(node->child2().node()))
508 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister));
509
510 branch32(cond, arg1GPR, arg2GPR, taken);
511
512 if (!isKnownInteger(node->child1().node()) || !isKnownInteger(node->child2().node())) {
513 jump(notTaken, ForceJump);
514
515 slowPath.link(&m_jit);
516
517 silentSpillAllRegisters(resultGPR);
518 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
519 silentFillAllRegisters(resultGPR);
520
521 branchTest32(callResultCondition, resultGPR, taken);
522 }
523 }
524
525 jump(notTaken);
526
527 m_indexInBlock = m_jit.graph().m_blocks[m_block]->size() - 1;
528 m_currentNode = branchNode;
529 }
530
531 template<typename JumpType>
532 class CompareAndBoxBooleanSlowPathGenerator
533 : public CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg> {
534 public:
535 CompareAndBoxBooleanSlowPathGenerator(
536 JumpType from, SpeculativeJIT* jit,
537 S_DFGOperation_EJJ function, GPRReg result, GPRReg arg1, GPRReg arg2)
538 : CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg>(
539 from, jit, function, NeedToSpill, result)
540 , m_arg1(arg1)
541 , m_arg2(arg2)
542 {
543 }
544
545 protected:
546 virtual void generateInternal(SpeculativeJIT* jit)
547 {
548 this->setUp(jit);
549 this->recordCall(jit->callOperation(this->m_function, this->m_result, m_arg1, m_arg2));
550 jit->m_jit.and32(JITCompiler::TrustedImm32(1), this->m_result);
551 jit->m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), this->m_result);
552 this->tearDown(jit);
553 }
554
555 private:
556 GPRReg m_arg1;
557 GPRReg m_arg2;
558 };
559
560 void SpeculativeJIT::nonSpeculativeNonPeepholeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
561 {
562 JSValueOperand arg1(this, node->child1());
563 JSValueOperand arg2(this, node->child2());
564 GPRReg arg1GPR = arg1.gpr();
565 GPRReg arg2GPR = arg2.gpr();
566
567 JITCompiler::JumpList slowPath;
568
569 if (isKnownNotInteger(node->child1().node()) || isKnownNotInteger(node->child2().node())) {
570 GPRResult result(this);
571 GPRReg resultGPR = result.gpr();
572
573 arg1.use();
574 arg2.use();
575
576 flushRegisters();
577 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
578
579 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
580 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly);
581 } else {
582 GPRTemporary result(this, arg2);
583 GPRReg resultGPR = result.gpr();
584
585 arg1.use();
586 arg2.use();
587
588 if (!isKnownInteger(node->child1().node()))
589 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister));
590 if (!isKnownInteger(node->child2().node()))
591 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister));
592
593 m_jit.compare32(cond, arg1GPR, arg2GPR, resultGPR);
594 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
595
596 if (!isKnownInteger(node->child1().node()) || !isKnownInteger(node->child2().node())) {
597 addSlowPathGenerator(adoptPtr(
598 new CompareAndBoxBooleanSlowPathGenerator<JITCompiler::JumpList>(
599 slowPath, this, helperFunction, resultGPR, arg1GPR, arg2GPR)));
600 }
601
602 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly);
603 }
604 }
605
606 void SpeculativeJIT::nonSpeculativePeepholeStrictEq(Node* node, Node* branchNode, bool invert)
607 {
608 BlockIndex taken = branchNode->takenBlockIndex();
609 BlockIndex notTaken = branchNode->notTakenBlockIndex();
610
611 // The branch instruction will branch to the taken block.
612 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through.
613 if (taken == nextBlock()) {
614 invert = !invert;
615 BlockIndex tmp = taken;
616 taken = notTaken;
617 notTaken = tmp;
618 }
619
620 JSValueOperand arg1(this, node->child1());
621 JSValueOperand arg2(this, node->child2());
622 GPRReg arg1GPR = arg1.gpr();
623 GPRReg arg2GPR = arg2.gpr();
624
625 GPRTemporary result(this);
626 GPRReg resultGPR = result.gpr();
627
628 arg1.use();
629 arg2.use();
630
631 if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) {
632 // see if we get lucky: if the arguments are cells and they reference the same
633 // cell, then they must be strictly equal.
634 branch64(JITCompiler::Equal, arg1GPR, arg2GPR, invert ? notTaken : taken);
635
636 silentSpillAllRegisters(resultGPR);
637 callOperation(operationCompareStrictEqCell, resultGPR, arg1GPR, arg2GPR);
638 silentFillAllRegisters(resultGPR);
639
640 branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultGPR, taken);
641 } else {
642 m_jit.or64(arg1GPR, arg2GPR, resultGPR);
643
644 JITCompiler::Jump twoCellsCase = m_jit.branchTest64(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister);
645
646 JITCompiler::Jump leftOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg1GPR, GPRInfo::tagTypeNumberRegister);
647 JITCompiler::Jump leftDouble = m_jit.branchTest64(JITCompiler::NonZero, arg1GPR, GPRInfo::tagTypeNumberRegister);
648 leftOK.link(&m_jit);
649 JITCompiler::Jump rightOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg2GPR, GPRInfo::tagTypeNumberRegister);
650 JITCompiler::Jump rightDouble = m_jit.branchTest64(JITCompiler::NonZero, arg2GPR, GPRInfo::tagTypeNumberRegister);
651 rightOK.link(&m_jit);
652
653 branch64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR, taken);
654 jump(notTaken, ForceJump);
655
656 twoCellsCase.link(&m_jit);
657 branch64(JITCompiler::Equal, arg1GPR, arg2GPR, invert ? notTaken : taken);
658
659 leftDouble.link(&m_jit);
660 rightDouble.link(&m_jit);
661
662 silentSpillAllRegisters(resultGPR);
663 callOperation(operationCompareStrictEq, resultGPR, arg1GPR, arg2GPR);
664 silentFillAllRegisters(resultGPR);
665
666 branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultGPR, taken);
667 }
668
669 jump(notTaken);
670 }
671
672 void SpeculativeJIT::nonSpeculativeNonPeepholeStrictEq(Node* node, bool invert)
673 {
674 JSValueOperand arg1(this, node->child1());
675 JSValueOperand arg2(this, node->child2());
676 GPRReg arg1GPR = arg1.gpr();
677 GPRReg arg2GPR = arg2.gpr();
678
679 GPRTemporary result(this);
680 GPRReg resultGPR = result.gpr();
681
682 arg1.use();
683 arg2.use();
684
685 if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) {
686 // see if we get lucky: if the arguments are cells and they reference the same
687 // cell, then they must be strictly equal.
688 // FIXME: this should flush registers instead of silent spill/fill.
689 JITCompiler::Jump notEqualCase = m_jit.branch64(JITCompiler::NotEqual, arg1GPR, arg2GPR);
690
691 m_jit.move(JITCompiler::TrustedImm64(JSValue::encode(jsBoolean(!invert))), resultGPR);
692
693 JITCompiler::Jump done = m_jit.jump();
694
695 notEqualCase.link(&m_jit);
696
697 silentSpillAllRegisters(resultGPR);
698 callOperation(operationCompareStrictEqCell, resultGPR, arg1GPR, arg2GPR);
699 silentFillAllRegisters(resultGPR);
700
701 m_jit.and64(JITCompiler::TrustedImm32(1), resultGPR);
702 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR);
703
704 done.link(&m_jit);
705 } else {
706 m_jit.or64(arg1GPR, arg2GPR, resultGPR);
707
708 JITCompiler::JumpList slowPathCases;
709
710 JITCompiler::Jump twoCellsCase = m_jit.branchTest64(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister);
711
712 JITCompiler::Jump leftOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg1GPR, GPRInfo::tagTypeNumberRegister);
713 slowPathCases.append(m_jit.branchTest64(JITCompiler::NonZero, arg1GPR, GPRInfo::tagTypeNumberRegister));
714 leftOK.link(&m_jit);
715 JITCompiler::Jump rightOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg2GPR, GPRInfo::tagTypeNumberRegister);
716 slowPathCases.append(m_jit.branchTest64(JITCompiler::NonZero, arg2GPR, GPRInfo::tagTypeNumberRegister));
717 rightOK.link(&m_jit);
718
719 m_jit.compare64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR, resultGPR);
720 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR);
721
722 JITCompiler::Jump done = m_jit.jump();
723
724 twoCellsCase.link(&m_jit);
725 slowPathCases.append(m_jit.branch64(JITCompiler::NotEqual, arg1GPR, arg2GPR));
726
727 m_jit.move(JITCompiler::TrustedImm64(JSValue::encode(jsBoolean(!invert))), resultGPR);
728
729 addSlowPathGenerator(
730 adoptPtr(
731 new CompareAndBoxBooleanSlowPathGenerator<MacroAssembler::JumpList>(
732 slowPathCases, this, operationCompareStrictEq, resultGPR, arg1GPR,
733 arg2GPR)));
734
735 done.link(&m_jit);
736 }
737
738 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly);
739 }
740
741 void SpeculativeJIT::emitCall(Node* node)
742 {
743 if (node->op() != Call)
744 RELEASE_ASSERT(node->op() == Construct);
745
746 // For constructors, the this argument is not passed but we have to make space
747 // for it.
748 int dummyThisArgument = node->op() == Call ? 0 : 1;
749
750 CallLinkInfo::CallType callType = node->op() == Call ? CallLinkInfo::Call : CallLinkInfo::Construct;
751
752 Edge calleeEdge = m_jit.graph().m_varArgChildren[node->firstChild()];
753 JSValueOperand callee(this, calleeEdge);
754 GPRReg calleeGPR = callee.gpr();
755 use(calleeEdge);
756
757 // The call instruction's first child is the function; the subsequent children are the
758 // arguments.
759 int numPassedArgs = node->numChildren() - 1;
760
761 m_jit.store32(MacroAssembler::TrustedImm32(numPassedArgs + dummyThisArgument), callFramePayloadSlot(JSStack::ArgumentCount));
762 m_jit.store64(GPRInfo::callFrameRegister, callFrameSlot(JSStack::CallerFrame));
763 m_jit.store64(calleeGPR, callFrameSlot(JSStack::Callee));
764
765 for (int i = 0; i < numPassedArgs; i++) {
766 Edge argEdge = m_jit.graph().m_varArgChildren[node->firstChild() + 1 + i];
767 JSValueOperand arg(this, argEdge);
768 GPRReg argGPR = arg.gpr();
769 use(argEdge);
770
771 m_jit.store64(argGPR, argumentSlot(i + dummyThisArgument));
772 }
773
774 flushRegisters();
775
776 GPRResult result(this);
777 GPRReg resultGPR = result.gpr();
778
779 JITCompiler::DataLabelPtr targetToCheck;
780 JITCompiler::JumpList slowPath;
781
782 CallBeginToken token;
783 m_jit.beginCall(node->codeOrigin, token);
784
785 m_jit.addPtr(TrustedImm32(m_jit.codeBlock()->m_numCalleeRegisters * sizeof(Register)), GPRInfo::callFrameRegister);
786
787 slowPath.append(m_jit.branchPtrWithPatch(MacroAssembler::NotEqual, calleeGPR, targetToCheck, MacroAssembler::TrustedImmPtr(0)));
788
789 m_jit.loadPtr(MacroAssembler::Address(calleeGPR, OBJECT_OFFSETOF(JSFunction, m_scope)), resultGPR);
790 m_jit.store64(resultGPR, MacroAssembler::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain));
791
792 CodeOrigin codeOrigin = m_currentNode->codeOrigin;
793 JITCompiler::Call fastCall = m_jit.nearCall();
794 m_jit.notifyCall(fastCall, codeOrigin, token);
795
796 JITCompiler::Jump done = m_jit.jump();
797
798 slowPath.link(&m_jit);
799
800 m_jit.move(calleeGPR, GPRInfo::nonArgGPR0);
801 m_jit.prepareForExceptionCheck();
802 JITCompiler::Call slowCall = m_jit.nearCall();
803 m_jit.notifyCall(slowCall, codeOrigin, token);
804
805 done.link(&m_jit);
806
807 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
808
809 jsValueResult(resultGPR, m_currentNode, DataFormatJS, UseChildrenCalledExplicitly);
810
811 m_jit.addJSCall(fastCall, slowCall, targetToCheck, callType, calleeGPR, m_currentNode->codeOrigin);
812 }
813
814 template<bool strict>
815 GPRReg SpeculativeJIT::fillSpeculateIntInternal(Edge edge, DataFormat& returnFormat)
816 {
817 #if DFG_ENABLE(DEBUG_VERBOSE)
818 dataLogF("SpecInt@%d ", edge->index());
819 #endif
820 AbstractValue& value = m_state.forNode(edge);
821 SpeculatedType type = value.m_type;
822 ASSERT(edge.useKind() != KnownInt32Use || !(value.m_type & ~SpecInt32));
823 value.filter(SpecInt32);
824 VirtualRegister virtualRegister = edge->virtualRegister();
825 GenerationInfo& info = m_generationInfo[virtualRegister];
826
827 switch (info.registerFormat()) {
828 case DataFormatNone: {
829 if ((edge->hasConstant() && !isInt32Constant(edge.node())) || info.spillFormat() == DataFormatDouble) {
830 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
831 returnFormat = DataFormatInteger;
832 return allocate();
833 }
834
835 GPRReg gpr = allocate();
836
837 if (edge->hasConstant()) {
838 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
839 ASSERT(isInt32Constant(edge.node()));
840 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr);
841 info.fillInteger(*m_stream, gpr);
842 returnFormat = DataFormatInteger;
843 return gpr;
844 }
845
846 DataFormat spillFormat = info.spillFormat();
847
848 RELEASE_ASSERT((spillFormat & DataFormatJS) || spillFormat == DataFormatInteger);
849
850 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
851
852 if (spillFormat == DataFormatJSInteger || spillFormat == DataFormatInteger) {
853 // If we know this was spilled as an integer we can fill without checking.
854 if (strict) {
855 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
856 info.fillInteger(*m_stream, gpr);
857 returnFormat = DataFormatInteger;
858 return gpr;
859 }
860 if (spillFormat == DataFormatInteger) {
861 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
862 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
863 } else
864 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
865 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
866 returnFormat = DataFormatJSInteger;
867 return gpr;
868 }
869 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
870
871 // Fill as JSValue, and fall through.
872 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
873 m_gprs.unlock(gpr);
874 }
875
876 case DataFormatJS: {
877 // Check the value is an integer.
878 GPRReg gpr = info.gpr();
879 m_gprs.lock(gpr);
880 if (type & ~SpecInt32)
881 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branch64(MacroAssembler::Below, gpr, GPRInfo::tagTypeNumberRegister));
882 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
883 // If !strict we're done, return.
884 if (!strict) {
885 returnFormat = DataFormatJSInteger;
886 return gpr;
887 }
888 // else fall through & handle as DataFormatJSInteger.
889 m_gprs.unlock(gpr);
890 }
891
892 case DataFormatJSInteger: {
893 // In a strict fill we need to strip off the value tag.
894 if (strict) {
895 GPRReg gpr = info.gpr();
896 GPRReg result;
897 // If the register has already been locked we need to take a copy.
898 // If not, we'll zero extend in place, so mark on the info that this is now type DataFormatInteger, not DataFormatJSInteger.
899 if (m_gprs.isLocked(gpr))
900 result = allocate();
901 else {
902 m_gprs.lock(gpr);
903 info.fillInteger(*m_stream, gpr);
904 result = gpr;
905 }
906 m_jit.zeroExtend32ToPtr(gpr, result);
907 returnFormat = DataFormatInteger;
908 return result;
909 }
910
911 GPRReg gpr = info.gpr();
912 m_gprs.lock(gpr);
913 returnFormat = DataFormatJSInteger;
914 return gpr;
915 }
916
917 case DataFormatInteger: {
918 GPRReg gpr = info.gpr();
919 m_gprs.lock(gpr);
920 returnFormat = DataFormatInteger;
921 return gpr;
922 }
923
924 case DataFormatDouble:
925 case DataFormatJSDouble: {
926 if (edge->hasConstant() && isInt32Constant(edge.node())) {
927 GPRReg gpr = allocate();
928 ASSERT(isInt32Constant(edge.node()));
929 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr);
930 returnFormat = DataFormatInteger;
931 return gpr;
932 }
933 }
934 case DataFormatCell:
935 case DataFormatBoolean:
936 case DataFormatJSCell:
937 case DataFormatJSBoolean: {
938 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
939 returnFormat = DataFormatInteger;
940 return allocate();
941 }
942
943 case DataFormatStorage:
944 RELEASE_ASSERT_NOT_REACHED();
945
946 default:
947 RELEASE_ASSERT_NOT_REACHED();
948 return InvalidGPRReg;
949 }
950 }
951
952 GPRReg SpeculativeJIT::fillSpeculateInt(Edge edge, DataFormat& returnFormat)
953 {
954 return fillSpeculateIntInternal<false>(edge, returnFormat);
955 }
956
957 GPRReg SpeculativeJIT::fillSpeculateIntStrict(Edge edge)
958 {
959 DataFormat mustBeDataFormatInteger;
960 GPRReg result = fillSpeculateIntInternal<true>(edge, mustBeDataFormatInteger);
961 RELEASE_ASSERT(mustBeDataFormatInteger == DataFormatInteger);
962 return result;
963 }
964
965 FPRReg SpeculativeJIT::fillSpeculateDouble(Edge edge)
966 {
967 #if DFG_ENABLE(DEBUG_VERBOSE)
968 dataLogF("SpecDouble@%d ", edge->index());
969 #endif
970 AbstractValue& value = m_state.forNode(edge);
971 SpeculatedType type = value.m_type;
972 ASSERT(edge.useKind() != KnownNumberUse || !(value.m_type & ~SpecNumber));
973 value.filter(SpecNumber);
974 VirtualRegister virtualRegister = edge->virtualRegister();
975 GenerationInfo& info = m_generationInfo[virtualRegister];
976
977 if (info.registerFormat() == DataFormatNone) {
978 if (edge->hasConstant()) {
979 GPRReg gpr = allocate();
980
981 if (isInt32Constant(edge.node())) {
982 FPRReg fpr = fprAllocate();
983 m_jit.move(MacroAssembler::Imm64(reinterpretDoubleToInt64(static_cast<double>(valueOfInt32Constant(edge.node())))), gpr);
984 m_jit.move64ToDouble(gpr, fpr);
985 unlock(gpr);
986
987 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
988 info.fillDouble(*m_stream, fpr);
989 return fpr;
990 }
991 if (isNumberConstant(edge.node())) {
992 FPRReg fpr = fprAllocate();
993 m_jit.move(MacroAssembler::Imm64(reinterpretDoubleToInt64(valueOfNumberConstant(edge.node()))), gpr);
994 m_jit.move64ToDouble(gpr, fpr);
995 unlock(gpr);
996
997 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
998 info.fillDouble(*m_stream, fpr);
999 return fpr;
1000 }
1001 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1002 return fprAllocate();
1003 }
1004
1005 DataFormat spillFormat = info.spillFormat();
1006 switch (spillFormat) {
1007 case DataFormatDouble: {
1008 FPRReg fpr = fprAllocate();
1009 m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr);
1010 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
1011 info.fillDouble(*m_stream, fpr);
1012 return fpr;
1013 }
1014
1015 case DataFormatInteger: {
1016 GPRReg gpr = allocate();
1017
1018 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1019 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
1020 info.fillInteger(*m_stream, gpr);
1021 unlock(gpr);
1022 break;
1023 }
1024
1025 default:
1026 GPRReg gpr = allocate();
1027
1028 RELEASE_ASSERT(spillFormat & DataFormatJS);
1029 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1030 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
1031 info.fillJSValue(*m_stream, gpr, spillFormat);
1032 unlock(gpr);
1033 break;
1034 }
1035 }
1036
1037 switch (info.registerFormat()) {
1038 case DataFormatNone: // Should have filled, above.
1039 case DataFormatBoolean: // This type never occurs.
1040 case DataFormatStorage:
1041 RELEASE_ASSERT_NOT_REACHED();
1042
1043 case DataFormatCell:
1044 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1045 return fprAllocate();
1046
1047 case DataFormatJSCell:
1048 case DataFormatJS:
1049 case DataFormatJSBoolean: {
1050 GPRReg jsValueGpr = info.gpr();
1051 m_gprs.lock(jsValueGpr);
1052 FPRReg fpr = fprAllocate();
1053 GPRReg tempGpr = allocate();
1054
1055 JITCompiler::Jump isInteger = m_jit.branch64(MacroAssembler::AboveOrEqual, jsValueGpr, GPRInfo::tagTypeNumberRegister);
1056
1057 if (type & ~SpecNumber)
1058 speculationCheck(BadType, JSValueRegs(jsValueGpr), edge, m_jit.branchTest64(MacroAssembler::Zero, jsValueGpr, GPRInfo::tagTypeNumberRegister));
1059
1060 // First, if we get here we have a double encoded as a JSValue
1061 m_jit.move(jsValueGpr, tempGpr);
1062 unboxDouble(tempGpr, fpr);
1063 JITCompiler::Jump hasUnboxedDouble = m_jit.jump();
1064
1065 // Finally, handle integers.
1066 isInteger.link(&m_jit);
1067 m_jit.convertInt32ToDouble(jsValueGpr, fpr);
1068 hasUnboxedDouble.link(&m_jit);
1069
1070 m_gprs.release(jsValueGpr);
1071 m_gprs.unlock(jsValueGpr);
1072 m_gprs.unlock(tempGpr);
1073 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
1074 info.fillDouble(*m_stream, fpr);
1075 info.killSpilled();
1076 return fpr;
1077 }
1078
1079 case DataFormatJSInteger:
1080 case DataFormatInteger: {
1081 FPRReg fpr = fprAllocate();
1082 GPRReg gpr = info.gpr();
1083 m_gprs.lock(gpr);
1084 m_jit.convertInt32ToDouble(gpr, fpr);
1085 m_gprs.unlock(gpr);
1086 return fpr;
1087 }
1088
1089 // Unbox the double
1090 case DataFormatJSDouble: {
1091 GPRReg gpr = info.gpr();
1092 FPRReg fpr = fprAllocate();
1093 if (m_gprs.isLocked(gpr)) {
1094 // Make sure we don't trample gpr if it is in use.
1095 GPRReg temp = allocate();
1096 m_jit.move(gpr, temp);
1097 unboxDouble(temp, fpr);
1098 unlock(temp);
1099 } else
1100 unboxDouble(gpr, fpr);
1101
1102 m_gprs.release(gpr);
1103 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
1104
1105 info.fillDouble(*m_stream, fpr);
1106 return fpr;
1107 }
1108
1109 case DataFormatDouble: {
1110 FPRReg fpr = info.fpr();
1111 m_fprs.lock(fpr);
1112 return fpr;
1113 }
1114
1115 default:
1116 RELEASE_ASSERT_NOT_REACHED();
1117 return InvalidFPRReg;
1118 }
1119 }
1120
1121 GPRReg SpeculativeJIT::fillSpeculateCell(Edge edge)
1122 {
1123 #if DFG_ENABLE(DEBUG_VERBOSE)
1124 dataLogF("SpecCell@%d ", edge->index());
1125 #endif
1126 AbstractValue& value = m_state.forNode(edge);
1127 SpeculatedType type = value.m_type;
1128 ASSERT((edge.useKind() != KnownCellUse && edge.useKind() != KnownStringUse) || !(value.m_type & ~SpecCell));
1129 value.filter(SpecCell);
1130 VirtualRegister virtualRegister = edge->virtualRegister();
1131 GenerationInfo& info = m_generationInfo[virtualRegister];
1132
1133 switch (info.registerFormat()) {
1134 case DataFormatNone: {
1135 if (info.spillFormat() == DataFormatInteger || info.spillFormat() == DataFormatDouble) {
1136 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1137 return allocate();
1138 }
1139
1140 GPRReg gpr = allocate();
1141
1142 if (edge->hasConstant()) {
1143 JSValue jsValue = valueOfJSConstant(edge.node());
1144 if (jsValue.isCell()) {
1145 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
1146 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
1147 info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
1148 return gpr;
1149 }
1150 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1151 return gpr;
1152 }
1153 RELEASE_ASSERT(info.spillFormat() & DataFormatJS);
1154 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1155 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
1156
1157 info.fillJSValue(*m_stream, gpr, DataFormatJS);
1158 if (type & ~SpecCell)
1159 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister));
1160 info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
1161 return gpr;
1162 }
1163
1164 case DataFormatCell:
1165 case DataFormatJSCell: {
1166 GPRReg gpr = info.gpr();
1167 m_gprs.lock(gpr);
1168 return gpr;
1169 }
1170
1171 case DataFormatJS: {
1172 GPRReg gpr = info.gpr();
1173 m_gprs.lock(gpr);
1174 if (type & ~SpecCell)
1175 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister));
1176 info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
1177 return gpr;
1178 }
1179
1180 case DataFormatJSInteger:
1181 case DataFormatInteger:
1182 case DataFormatJSDouble:
1183 case DataFormatDouble:
1184 case DataFormatJSBoolean:
1185 case DataFormatBoolean: {
1186 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1187 return allocate();
1188 }
1189
1190 case DataFormatStorage:
1191 RELEASE_ASSERT_NOT_REACHED();
1192
1193 default:
1194 RELEASE_ASSERT_NOT_REACHED();
1195 return InvalidGPRReg;
1196 }
1197 }
1198
1199 GPRReg SpeculativeJIT::fillSpeculateBoolean(Edge edge)
1200 {
1201 #if DFG_ENABLE(DEBUG_VERBOSE)
1202 dataLogF("SpecBool@%d ", edge->index());
1203 #endif
1204 AbstractValue& value = m_state.forNode(edge);
1205 SpeculatedType type = value.m_type;
1206 value.filter(SpecBoolean);
1207 VirtualRegister virtualRegister = edge->virtualRegister();
1208 GenerationInfo& info = m_generationInfo[virtualRegister];
1209
1210 switch (info.registerFormat()) {
1211 case DataFormatNone: {
1212 if (info.spillFormat() == DataFormatInteger || info.spillFormat() == DataFormatDouble) {
1213 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1214 return allocate();
1215 }
1216
1217 GPRReg gpr = allocate();
1218
1219 if (edge->hasConstant()) {
1220 JSValue jsValue = valueOfJSConstant(edge.node());
1221 if (jsValue.isBoolean()) {
1222 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
1223 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
1224 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
1225 return gpr;
1226 }
1227 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1228 return gpr;
1229 }
1230 RELEASE_ASSERT(info.spillFormat() & DataFormatJS);
1231 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1232 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
1233
1234 info.fillJSValue(*m_stream, gpr, DataFormatJS);
1235 if (type & ~SpecBoolean) {
1236 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1237 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg));
1238 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1239 }
1240 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
1241 return gpr;
1242 }
1243
1244 case DataFormatBoolean:
1245 case DataFormatJSBoolean: {
1246 GPRReg gpr = info.gpr();
1247 m_gprs.lock(gpr);
1248 return gpr;
1249 }
1250
1251 case DataFormatJS: {
1252 GPRReg gpr = info.gpr();
1253 m_gprs.lock(gpr);
1254 if (type & ~SpecBoolean) {
1255 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1256 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg));
1257 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1258 }
1259 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
1260 return gpr;
1261 }
1262
1263 case DataFormatJSInteger:
1264 case DataFormatInteger:
1265 case DataFormatJSDouble:
1266 case DataFormatDouble:
1267 case DataFormatJSCell:
1268 case DataFormatCell: {
1269 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1270 return allocate();
1271 }
1272
1273 case DataFormatStorage:
1274 RELEASE_ASSERT_NOT_REACHED();
1275
1276 default:
1277 RELEASE_ASSERT_NOT_REACHED();
1278 return InvalidGPRReg;
1279 }
1280 }
1281
1282 JITCompiler::Jump SpeculativeJIT::convertToDouble(GPRReg value, FPRReg result, GPRReg tmp)
1283 {
1284 JITCompiler::Jump isInteger = m_jit.branch64(MacroAssembler::AboveOrEqual, value, GPRInfo::tagTypeNumberRegister);
1285
1286 JITCompiler::Jump notNumber = m_jit.branchTest64(MacroAssembler::Zero, value, GPRInfo::tagTypeNumberRegister);
1287
1288 m_jit.move(value, tmp);
1289 unboxDouble(tmp, result);
1290
1291 JITCompiler::Jump done = m_jit.jump();
1292
1293 isInteger.link(&m_jit);
1294
1295 m_jit.convertInt32ToDouble(value, result);
1296
1297 done.link(&m_jit);
1298
1299 return notNumber;
1300 }
1301
1302 void SpeculativeJIT::compileObjectEquality(Node* node)
1303 {
1304 SpeculateCellOperand op1(this, node->child1());
1305 SpeculateCellOperand op2(this, node->child2());
1306 GPRTemporary result(this, op1);
1307
1308 GPRReg op1GPR = op1.gpr();
1309 GPRReg op2GPR = op2.gpr();
1310 GPRReg resultGPR = result.gpr();
1311
1312 if (m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
1313 m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1314 DFG_TYPE_CHECK(
1315 JSValueSource::unboxedCell(op1GPR), node->child1(), SpecObject, m_jit.branchPtr(
1316 MacroAssembler::Equal,
1317 MacroAssembler::Address(op1GPR, JSCell::structureOffset()),
1318 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1319 DFG_TYPE_CHECK(
1320 JSValueSource::unboxedCell(op2GPR), node->child2(), SpecObject, m_jit.branchPtr(
1321 MacroAssembler::Equal,
1322 MacroAssembler::Address(op2GPR, JSCell::structureOffset()),
1323 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1324 } else {
1325 GPRTemporary structure(this);
1326 GPRReg structureGPR = structure.gpr();
1327
1328 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR);
1329 DFG_TYPE_CHECK(
1330 JSValueSource::unboxedCell(op1GPR), node->child1(), SpecObject, m_jit.branchPtr(
1331 MacroAssembler::Equal,
1332 structureGPR,
1333 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1334 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), node->child1(),
1335 m_jit.branchTest8(
1336 MacroAssembler::NonZero,
1337 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1338 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1339
1340 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR);
1341 DFG_TYPE_CHECK(
1342 JSValueSource::unboxedCell(op2GPR), node->child2(), SpecObject, m_jit.branchPtr(
1343 MacroAssembler::Equal,
1344 structureGPR,
1345 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1346 speculationCheck(BadType, JSValueSource::unboxedCell(op2GPR), node->child2(),
1347 m_jit.branchTest8(
1348 MacroAssembler::NonZero,
1349 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1350 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1351 }
1352
1353 MacroAssembler::Jump falseCase = m_jit.branch64(MacroAssembler::NotEqual, op1GPR, op2GPR);
1354 m_jit.move(TrustedImm32(ValueTrue), resultGPR);
1355 MacroAssembler::Jump done = m_jit.jump();
1356 falseCase.link(&m_jit);
1357 m_jit.move(TrustedImm32(ValueFalse), resultGPR);
1358 done.link(&m_jit);
1359
1360 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
1361 }
1362
1363 void SpeculativeJIT::compileObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild)
1364 {
1365 SpeculateCellOperand op1(this, leftChild);
1366 JSValueOperand op2(this, rightChild, ManualOperandSpeculation);
1367 GPRTemporary result(this);
1368
1369 GPRReg op1GPR = op1.gpr();
1370 GPRReg op2GPR = op2.gpr();
1371 GPRReg resultGPR = result.gpr();
1372 GPRTemporary structure;
1373 GPRReg structureGPR = InvalidGPRReg;
1374
1375 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid();
1376
1377 if (!masqueradesAsUndefinedWatchpointValid) {
1378 // The masquerades as undefined case will use the structure register, so allocate it here.
1379 // Do this at the top of the function to avoid branching around a register allocation.
1380 GPRTemporary realStructure(this);
1381 structure.adopt(realStructure);
1382 structureGPR = structure.gpr();
1383 }
1384
1385 if (masqueradesAsUndefinedWatchpointValid) {
1386 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1387 DFG_TYPE_CHECK(
1388 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1389 MacroAssembler::Equal,
1390 MacroAssembler::Address(op1GPR, JSCell::structureOffset()),
1391 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1392 } else {
1393 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR);
1394 DFG_TYPE_CHECK(
1395 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1396 MacroAssembler::Equal,
1397 structureGPR,
1398 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1399 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild,
1400 m_jit.branchTest8(
1401 MacroAssembler::NonZero,
1402 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1403 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1404 }
1405
1406 // It seems that most of the time when programs do a == b where b may be either null/undefined
1407 // or an object, b is usually an object. Balance the branches to make that case fast.
1408 MacroAssembler::Jump rightNotCell =
1409 m_jit.branchTest64(MacroAssembler::NonZero, op2GPR, GPRInfo::tagMaskRegister);
1410
1411 // We know that within this branch, rightChild must be a cell.
1412 if (masqueradesAsUndefinedWatchpointValid) {
1413 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1414 DFG_TYPE_CHECK(
1415 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1416 MacroAssembler::Equal,
1417 MacroAssembler::Address(op2GPR, JSCell::structureOffset()),
1418 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1419 } else {
1420 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR);
1421 DFG_TYPE_CHECK(
1422 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1423 MacroAssembler::Equal,
1424 structureGPR,
1425 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1426 speculationCheck(BadType, JSValueRegs(op2GPR), rightChild,
1427 m_jit.branchTest8(
1428 MacroAssembler::NonZero,
1429 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1430 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1431 }
1432
1433 // At this point we know that we can perform a straight-forward equality comparison on pointer
1434 // values because both left and right are pointers to objects that have no special equality
1435 // protocols.
1436 MacroAssembler::Jump falseCase = m_jit.branch64(MacroAssembler::NotEqual, op1GPR, op2GPR);
1437 MacroAssembler::Jump trueCase = m_jit.jump();
1438
1439 rightNotCell.link(&m_jit);
1440
1441 // We know that within this branch, rightChild must not be a cell. Check if that is enough to
1442 // prove that it is either null or undefined.
1443 if (needsTypeCheck(rightChild, SpecCell | SpecOther)) {
1444 m_jit.move(op2GPR, resultGPR);
1445 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR);
1446
1447 typeCheck(
1448 JSValueRegs(op2GPR), rightChild, SpecCell | SpecOther,
1449 m_jit.branch64(
1450 MacroAssembler::NotEqual, resultGPR,
1451 MacroAssembler::TrustedImm64(ValueNull)));
1452 }
1453
1454 falseCase.link(&m_jit);
1455 m_jit.move(TrustedImm32(ValueFalse), resultGPR);
1456 MacroAssembler::Jump done = m_jit.jump();
1457 trueCase.link(&m_jit);
1458 m_jit.move(TrustedImm32(ValueTrue), resultGPR);
1459 done.link(&m_jit);
1460
1461 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
1462 }
1463
1464 void SpeculativeJIT::compilePeepHoleObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild, Node* branchNode)
1465 {
1466 BlockIndex taken = branchNode->takenBlockIndex();
1467 BlockIndex notTaken = branchNode->notTakenBlockIndex();
1468
1469 SpeculateCellOperand op1(this, leftChild);
1470 JSValueOperand op2(this, rightChild, ManualOperandSpeculation);
1471 GPRTemporary result(this);
1472
1473 GPRReg op1GPR = op1.gpr();
1474 GPRReg op2GPR = op2.gpr();
1475 GPRReg resultGPR = result.gpr();
1476 GPRTemporary structure;
1477 GPRReg structureGPR = InvalidGPRReg;
1478
1479 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid();
1480
1481 if (!masqueradesAsUndefinedWatchpointValid) {
1482 // The masquerades as undefined case will use the structure register, so allocate it here.
1483 // Do this at the top of the function to avoid branching around a register allocation.
1484 GPRTemporary realStructure(this);
1485 structure.adopt(realStructure);
1486 structureGPR = structure.gpr();
1487 }
1488
1489 if (masqueradesAsUndefinedWatchpointValid) {
1490 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1491 DFG_TYPE_CHECK(
1492 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1493 MacroAssembler::Equal,
1494 MacroAssembler::Address(op1GPR, JSCell::structureOffset()),
1495 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1496 } else {
1497 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR);
1498 DFG_TYPE_CHECK(
1499 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1500 MacroAssembler::Equal,
1501 structureGPR,
1502 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1503 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild,
1504 m_jit.branchTest8(
1505 MacroAssembler::NonZero,
1506 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1507 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1508 }
1509
1510 // It seems that most of the time when programs do a == b where b may be either null/undefined
1511 // or an object, b is usually an object. Balance the branches to make that case fast.
1512 MacroAssembler::Jump rightNotCell =
1513 m_jit.branchTest64(MacroAssembler::NonZero, op2GPR, GPRInfo::tagMaskRegister);
1514
1515 // We know that within this branch, rightChild must be a cell.
1516 if (masqueradesAsUndefinedWatchpointValid) {
1517 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1518 DFG_TYPE_CHECK(
1519 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1520 MacroAssembler::Equal,
1521 MacroAssembler::Address(op2GPR, JSCell::structureOffset()),
1522 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1523 } else {
1524 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR);
1525 DFG_TYPE_CHECK(
1526 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1527 MacroAssembler::Equal,
1528 structureGPR,
1529 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1530 speculationCheck(BadType, JSValueRegs(op2GPR), rightChild,
1531 m_jit.branchTest8(
1532 MacroAssembler::NonZero,
1533 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1534 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1535 }
1536
1537 // At this point we know that we can perform a straight-forward equality comparison on pointer
1538 // values because both left and right are pointers to objects that have no special equality
1539 // protocols.
1540 branch64(MacroAssembler::Equal, op1GPR, op2GPR, taken);
1541
1542 // We know that within this branch, rightChild must not be a cell. Check if that is enough to
1543 // prove that it is either null or undefined.
1544 if (!needsTypeCheck(rightChild, SpecCell | SpecOther))
1545 rightNotCell.link(&m_jit);
1546 else {
1547 jump(notTaken, ForceJump);
1548
1549 rightNotCell.link(&m_jit);
1550 m_jit.move(op2GPR, resultGPR);
1551 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR);
1552
1553 typeCheck(
1554 JSValueRegs(op2GPR), rightChild, SpecCell | SpecOther, m_jit.branch64(
1555 MacroAssembler::NotEqual, resultGPR,
1556 MacroAssembler::TrustedImm64(ValueNull)));
1557 }
1558
1559 jump(notTaken);
1560 }
1561
1562 void SpeculativeJIT::compileIntegerCompare(Node* node, MacroAssembler::RelationalCondition condition)
1563 {
1564 SpeculateIntegerOperand op1(this, node->child1());
1565 SpeculateIntegerOperand op2(this, node->child2());
1566 GPRTemporary result(this, op1, op2);
1567
1568 m_jit.compare32(condition, op1.gpr(), op2.gpr(), result.gpr());
1569
1570 // If we add a DataFormatBool, we should use it here.
1571 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
1572 jsValueResult(result.gpr(), m_currentNode, DataFormatJSBoolean);
1573 }
1574
1575 void SpeculativeJIT::compileDoubleCompare(Node* node, MacroAssembler::DoubleCondition condition)
1576 {
1577 SpeculateDoubleOperand op1(this, node->child1());
1578 SpeculateDoubleOperand op2(this, node->child2());
1579 GPRTemporary result(this);
1580
1581 m_jit.move(TrustedImm32(ValueTrue), result.gpr());
1582 MacroAssembler::Jump trueCase = m_jit.branchDouble(condition, op1.fpr(), op2.fpr());
1583 m_jit.xor64(TrustedImm32(true), result.gpr());
1584 trueCase.link(&m_jit);
1585
1586 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1587 }
1588
1589 void SpeculativeJIT::compileValueAdd(Node* node)
1590 {
1591 JSValueOperand op1(this, node->child1());
1592 JSValueOperand op2(this, node->child2());
1593
1594 GPRReg op1GPR = op1.gpr();
1595 GPRReg op2GPR = op2.gpr();
1596
1597 flushRegisters();
1598
1599 GPRResult result(this);
1600 if (isKnownNotNumber(node->child1().node()) || isKnownNotNumber(node->child2().node()))
1601 callOperation(operationValueAddNotNumber, result.gpr(), op1GPR, op2GPR);
1602 else
1603 callOperation(operationValueAdd, result.gpr(), op1GPR, op2GPR);
1604
1605 jsValueResult(result.gpr(), node);
1606 }
1607
1608 void SpeculativeJIT::compileObjectOrOtherLogicalNot(Edge nodeUse)
1609 {
1610 JSValueOperand value(this, nodeUse, ManualOperandSpeculation);
1611 GPRTemporary result(this);
1612 GPRReg valueGPR = value.gpr();
1613 GPRReg resultGPR = result.gpr();
1614 GPRTemporary structure;
1615 GPRReg structureGPR = InvalidGPRReg;
1616
1617 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid();
1618
1619 if (!masqueradesAsUndefinedWatchpointValid) {
1620 // The masquerades as undefined case will use the structure register, so allocate it here.
1621 // Do this at the top of the function to avoid branching around a register allocation.
1622 GPRTemporary realStructure(this);
1623 structure.adopt(realStructure);
1624 structureGPR = structure.gpr();
1625 }
1626
1627 MacroAssembler::Jump notCell = m_jit.branchTest64(MacroAssembler::NonZero, valueGPR, GPRInfo::tagMaskRegister);
1628 if (masqueradesAsUndefinedWatchpointValid) {
1629 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1630 DFG_TYPE_CHECK(
1631 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1632 MacroAssembler::Equal,
1633 MacroAssembler::Address(valueGPR, JSCell::structureOffset()),
1634 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1635 } else {
1636 m_jit.loadPtr(MacroAssembler::Address(valueGPR, JSCell::structureOffset()), structureGPR);
1637
1638 DFG_TYPE_CHECK(
1639 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1640 MacroAssembler::Equal,
1641 structureGPR,
1642 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1643
1644 MacroAssembler::Jump isNotMasqueradesAsUndefined =
1645 m_jit.branchTest8(
1646 MacroAssembler::Zero,
1647 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1648 MacroAssembler::TrustedImm32(MasqueradesAsUndefined));
1649
1650 speculationCheck(BadType, JSValueRegs(valueGPR), nodeUse,
1651 m_jit.branchPtr(
1652 MacroAssembler::Equal,
1653 MacroAssembler::Address(structureGPR, Structure::globalObjectOffset()),
1654 MacroAssembler::TrustedImmPtr(m_jit.graph().globalObjectFor(m_currentNode->codeOrigin))));
1655
1656 isNotMasqueradesAsUndefined.link(&m_jit);
1657 }
1658 m_jit.move(TrustedImm32(ValueFalse), resultGPR);
1659 MacroAssembler::Jump done = m_jit.jump();
1660
1661 notCell.link(&m_jit);
1662
1663 if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) {
1664 m_jit.move(valueGPR, resultGPR);
1665 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR);
1666 typeCheck(
1667 JSValueRegs(valueGPR), nodeUse, SpecCell | SpecOther, m_jit.branch64(
1668 MacroAssembler::NotEqual,
1669 resultGPR,
1670 MacroAssembler::TrustedImm64(ValueNull)));
1671 }
1672 m_jit.move(TrustedImm32(ValueTrue), resultGPR);
1673
1674 done.link(&m_jit);
1675
1676 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
1677 }
1678
1679 void SpeculativeJIT::compileLogicalNot(Node* node)
1680 {
1681 switch (node->child1().useKind()) {
1682 case ObjectOrOtherUse: {
1683 compileObjectOrOtherLogicalNot(node->child1());
1684 return;
1685 }
1686
1687 case Int32Use: {
1688 SpeculateIntegerOperand value(this, node->child1());
1689 GPRTemporary result(this, value);
1690 m_jit.compare32(MacroAssembler::Equal, value.gpr(), MacroAssembler::TrustedImm32(0), result.gpr());
1691 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
1692 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1693 return;
1694 }
1695
1696 case NumberUse: {
1697 SpeculateDoubleOperand value(this, node->child1());
1698 FPRTemporary scratch(this);
1699 GPRTemporary result(this);
1700 m_jit.move(TrustedImm32(ValueFalse), result.gpr());
1701 MacroAssembler::Jump nonZero = m_jit.branchDoubleNonZero(value.fpr(), scratch.fpr());
1702 m_jit.xor32(TrustedImm32(true), result.gpr());
1703 nonZero.link(&m_jit);
1704 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1705 return;
1706 }
1707
1708 case BooleanUse: {
1709 if (!needsTypeCheck(node->child1(), SpecBoolean)) {
1710 SpeculateBooleanOperand value(this, node->child1());
1711 GPRTemporary result(this, value);
1712
1713 m_jit.move(value.gpr(), result.gpr());
1714 m_jit.xor64(TrustedImm32(true), result.gpr());
1715
1716 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1717 return;
1718 }
1719
1720 JSValueOperand value(this, node->child1(), ManualOperandSpeculation);
1721 GPRTemporary result(this); // FIXME: We could reuse, but on speculation fail would need recovery to restore tag (akin to add).
1722
1723 m_jit.move(value.gpr(), result.gpr());
1724 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), result.gpr());
1725 typeCheck(
1726 JSValueRegs(value.gpr()), node->child1(), SpecBoolean, m_jit.branchTest64(
1727 JITCompiler::NonZero, result.gpr(), TrustedImm32(static_cast<int32_t>(~1))));
1728 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), result.gpr());
1729
1730 // If we add a DataFormatBool, we should use it here.
1731 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1732 return;
1733 }
1734
1735 case UntypedUse: {
1736 JSValueOperand arg1(this, node->child1());
1737 GPRTemporary result(this);
1738
1739 GPRReg arg1GPR = arg1.gpr();
1740 GPRReg resultGPR = result.gpr();
1741
1742 arg1.use();
1743
1744 m_jit.move(arg1GPR, resultGPR);
1745 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), resultGPR);
1746 JITCompiler::Jump slowCase = m_jit.branchTest64(JITCompiler::NonZero, resultGPR, TrustedImm32(static_cast<int32_t>(~1)));
1747
1748 addSlowPathGenerator(
1749 slowPathCall(slowCase, this, dfgConvertJSValueToBoolean, resultGPR, arg1GPR));
1750
1751 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), resultGPR);
1752 jsValueResult(resultGPR, node, DataFormatJSBoolean, UseChildrenCalledExplicitly);
1753 return;
1754 }
1755
1756 default:
1757 RELEASE_ASSERT_NOT_REACHED();
1758 break;
1759 }
1760 }
1761
1762 void SpeculativeJIT::emitObjectOrOtherBranch(Edge nodeUse, BlockIndex taken, BlockIndex notTaken)
1763 {
1764 JSValueOperand value(this, nodeUse, ManualOperandSpeculation);
1765 GPRTemporary scratch(this);
1766 GPRReg valueGPR = value.gpr();
1767 GPRReg scratchGPR = scratch.gpr();
1768
1769 MacroAssembler::Jump notCell = m_jit.branchTest64(MacroAssembler::NonZero, valueGPR, GPRInfo::tagMaskRegister);
1770 if (m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
1771 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1772
1773 DFG_TYPE_CHECK(
1774 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1775 MacroAssembler::Equal,
1776 MacroAssembler::Address(valueGPR, JSCell::structureOffset()),
1777 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1778 } else {
1779 m_jit.loadPtr(MacroAssembler::Address(valueGPR, JSCell::structureOffset()), scratchGPR);
1780
1781 DFG_TYPE_CHECK(
1782 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1783 MacroAssembler::Equal,
1784 scratchGPR,
1785 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1786
1787 JITCompiler::Jump isNotMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::Zero, MacroAssembler::Address(scratchGPR, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
1788
1789 speculationCheck(BadType, JSValueRegs(valueGPR), nodeUse,
1790 m_jit.branchPtr(
1791 MacroAssembler::Equal,
1792 MacroAssembler::Address(scratchGPR, Structure::globalObjectOffset()),
1793 MacroAssembler::TrustedImmPtr(m_jit.graph().globalObjectFor(m_currentNode->codeOrigin))));
1794
1795 isNotMasqueradesAsUndefined.link(&m_jit);
1796 }
1797 jump(taken, ForceJump);
1798
1799 notCell.link(&m_jit);
1800
1801 if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) {
1802 m_jit.move(valueGPR, scratchGPR);
1803 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), scratchGPR);
1804 typeCheck(
1805 JSValueRegs(valueGPR), nodeUse, SpecCell | SpecOther, m_jit.branch64(
1806 MacroAssembler::NotEqual, scratchGPR, MacroAssembler::TrustedImm64(ValueNull)));
1807 }
1808 jump(notTaken);
1809
1810 noResult(m_currentNode);
1811 }
1812
1813 void SpeculativeJIT::emitBranch(Node* node)
1814 {
1815 BlockIndex taken = node->takenBlockIndex();
1816 BlockIndex notTaken = node->notTakenBlockIndex();
1817
1818 switch (node->child1().useKind()) {
1819 case ObjectOrOtherUse: {
1820 emitObjectOrOtherBranch(node->child1(), taken, notTaken);
1821 return;
1822 }
1823
1824 case Int32Use:
1825 case NumberUse: {
1826 if (node->child1().useKind() == Int32Use) {
1827 bool invert = false;
1828
1829 if (taken == nextBlock()) {
1830 invert = true;
1831 BlockIndex tmp = taken;
1832 taken = notTaken;
1833 notTaken = tmp;
1834 }
1835
1836 SpeculateIntegerOperand value(this, node->child1());
1837 branchTest32(invert ? MacroAssembler::Zero : MacroAssembler::NonZero, value.gpr(), taken);
1838 } else {
1839 SpeculateDoubleOperand value(this, node->child1());
1840 FPRTemporary scratch(this);
1841 branchDoubleNonZero(value.fpr(), scratch.fpr(), taken);
1842 }
1843
1844 jump(notTaken);
1845
1846 noResult(node);
1847 return;
1848 }
1849
1850 case UntypedUse:
1851 case BooleanUse: {
1852 JSValueOperand value(this, node->child1(), ManualOperandSpeculation);
1853 GPRReg valueGPR = value.gpr();
1854
1855 if (node->child1().useKind() == BooleanUse) {
1856 if (!needsTypeCheck(node->child1(), SpecBoolean)) {
1857 MacroAssembler::ResultCondition condition = MacroAssembler::NonZero;
1858
1859 if (taken == nextBlock()) {
1860 condition = MacroAssembler::Zero;
1861 BlockIndex tmp = taken;
1862 taken = notTaken;
1863 notTaken = tmp;
1864 }
1865
1866 branchTest32(condition, valueGPR, TrustedImm32(true), taken);
1867 jump(notTaken);
1868 } else {
1869 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(false))), notTaken);
1870 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(true))), taken);
1871
1872 typeCheck(JSValueRegs(valueGPR), node->child1(), SpecBoolean, m_jit.jump());
1873 }
1874 value.use();
1875 } else {
1876 GPRTemporary result(this);
1877 GPRReg resultGPR = result.gpr();
1878
1879 if (node->child1()->prediction() & SpecInt32) {
1880 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsNumber(0))), notTaken);
1881 branch64(MacroAssembler::AboveOrEqual, valueGPR, GPRInfo::tagTypeNumberRegister, taken);
1882 }
1883
1884 if (node->child1()->prediction() & SpecBoolean) {
1885 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(false))), notTaken);
1886 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(true))), taken);
1887 }
1888
1889 value.use();
1890
1891 silentSpillAllRegisters(resultGPR);
1892 callOperation(dfgConvertJSValueToBoolean, resultGPR, valueGPR);
1893 silentFillAllRegisters(resultGPR);
1894
1895 branchTest32(MacroAssembler::NonZero, resultGPR, taken);
1896 jump(notTaken);
1897 }
1898
1899 noResult(node, UseChildrenCalledExplicitly);
1900 return;
1901 }
1902
1903 default:
1904 RELEASE_ASSERT_NOT_REACHED();
1905 }
1906 }
1907
1908 void SpeculativeJIT::compile(Node* node)
1909 {
1910 NodeType op = node->op();
1911
1912 #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
1913 m_jit.clearRegisterAllocationOffsets();
1914 #endif
1915
1916 switch (op) {
1917 case JSConstant:
1918 initConstantInfo(node);
1919 break;
1920
1921 case PhantomArguments:
1922 initConstantInfo(node);
1923 break;
1924
1925 case WeakJSConstant:
1926 m_jit.addWeakReference(node->weakConstant());
1927 initConstantInfo(node);
1928 break;
1929
1930 case Identity: {
1931 // CSE should always eliminate this.
1932 RELEASE_ASSERT_NOT_REACHED();
1933 break;
1934 }
1935
1936 case GetLocal: {
1937 SpeculatedType prediction = node->variableAccessData()->prediction();
1938 AbstractValue& value = m_state.variables().operand(node->local());
1939
1940 // If we have no prediction for this local, then don't attempt to compile.
1941 if (prediction == SpecNone) {
1942 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
1943 break;
1944 }
1945
1946 // If the CFA is tracking this variable and it found that the variable
1947 // cannot have been assigned, then don't attempt to proceed.
1948 if (value.isClear()) {
1949 // FIXME: We should trap instead.
1950 // https://bugs.webkit.org/show_bug.cgi?id=110383
1951 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
1952 break;
1953 }
1954
1955 if (node->variableAccessData()->shouldUseDoubleFormat()) {
1956 FPRTemporary result(this);
1957 m_jit.loadDouble(JITCompiler::addressFor(node->local()), result.fpr());
1958 VirtualRegister virtualRegister = node->virtualRegister();
1959 m_fprs.retain(result.fpr(), virtualRegister, SpillOrderDouble);
1960 m_generationInfo[virtualRegister].initDouble(node, node->refCount(), result.fpr());
1961 break;
1962 }
1963
1964 if (isInt32Speculation(value.m_type)) {
1965 GPRTemporary result(this);
1966 m_jit.load32(JITCompiler::payloadFor(node->local()), result.gpr());
1967
1968 // Like integerResult, but don't useChildren - our children are phi nodes,
1969 // and don't represent values within this dataflow with virtual registers.
1970 VirtualRegister virtualRegister = node->virtualRegister();
1971 m_gprs.retain(result.gpr(), virtualRegister, SpillOrderInteger);
1972 m_generationInfo[virtualRegister].initInteger(node, node->refCount(), result.gpr());
1973 break;
1974 }
1975
1976 GPRTemporary result(this);
1977 m_jit.load64(JITCompiler::addressFor(node->local()), result.gpr());
1978
1979 // Like jsValueResult, but don't useChildren - our children are phi nodes,
1980 // and don't represent values within this dataflow with virtual registers.
1981 VirtualRegister virtualRegister = node->virtualRegister();
1982 m_gprs.retain(result.gpr(), virtualRegister, SpillOrderJS);
1983
1984 DataFormat format;
1985 if (isCellSpeculation(value.m_type))
1986 format = DataFormatJSCell;
1987 else if (isBooleanSpeculation(value.m_type))
1988 format = DataFormatJSBoolean;
1989 else
1990 format = DataFormatJS;
1991
1992 m_generationInfo[virtualRegister].initJSValue(node, node->refCount(), result.gpr(), format);
1993 break;
1994 }
1995
1996 case GetLocalUnlinked: {
1997 GPRTemporary result(this);
1998
1999 m_jit.load64(JITCompiler::addressFor(node->unlinkedLocal()), result.gpr());
2000
2001 jsValueResult(result.gpr(), node);
2002 break;
2003 }
2004
2005 case MovHintAndCheck: {
2006 compileMovHintAndCheck(node);
2007 break;
2008 }
2009
2010 case InlineStart: {
2011 compileInlineStart(node);
2012 break;
2013 }
2014
2015 case MovHint:
2016 case ZombieHint: {
2017 RELEASE_ASSERT_NOT_REACHED();
2018 break;
2019 }
2020
2021 case SetLocal: {
2022 // SetLocal doubles as a hint as to where a node will be stored and
2023 // as a speculation point. So before we speculate make sure that we
2024 // know where the child of this node needs to go in the virtual
2025 // stack.
2026 compileMovHint(node);
2027
2028 if (node->variableAccessData()->shouldUnboxIfPossible()) {
2029 if (node->variableAccessData()->shouldUseDoubleFormat()) {
2030 SpeculateDoubleOperand value(this, node->child1());
2031 m_jit.storeDouble(value.fpr(), JITCompiler::addressFor(node->local()));
2032 noResult(node);
2033 // Indicate that it's no longer necessary to retrieve the value of
2034 // this bytecode variable from registers or other locations in the stack,
2035 // but that it is stored as a double.
2036 recordSetLocal(node->local(), ValueSource(DoubleInJSStack));
2037 break;
2038 }
2039
2040 SpeculatedType predictedType = node->variableAccessData()->argumentAwarePrediction();
2041 if (isInt32Speculation(predictedType)) {
2042 SpeculateIntegerOperand value(this, node->child1());
2043 m_jit.store32(value.gpr(), JITCompiler::payloadFor(node->local()));
2044 noResult(node);
2045 recordSetLocal(node->local(), ValueSource(Int32InJSStack));
2046 break;
2047 }
2048 if (isCellSpeculation(predictedType)) {
2049 SpeculateCellOperand cell(this, node->child1());
2050 GPRReg cellGPR = cell.gpr();
2051 m_jit.store64(cellGPR, JITCompiler::addressFor(node->local()));
2052 noResult(node);
2053 recordSetLocal(node->local(), ValueSource(CellInJSStack));
2054 break;
2055 }
2056 if (isBooleanSpeculation(predictedType)) {
2057 SpeculateBooleanOperand boolean(this, node->child1());
2058 m_jit.store64(boolean.gpr(), JITCompiler::addressFor(node->local()));
2059 noResult(node);
2060 recordSetLocal(node->local(), ValueSource(BooleanInJSStack));
2061 break;
2062 }
2063 }
2064
2065 JSValueOperand value(this, node->child1());
2066 m_jit.store64(value.gpr(), JITCompiler::addressFor(node->local()));
2067 noResult(node);
2068
2069 recordSetLocal(node->local(), ValueSource(ValueInJSStack));
2070
2071 // If we're storing an arguments object that has been optimized away,
2072 // our variable event stream for OSR exit now reflects the optimized
2073 // value (JSValue()). On the slow path, we want an arguments object
2074 // instead. We add an additional move hint to show OSR exit that it
2075 // needs to reconstruct the arguments object.
2076 if (node->child1()->op() == PhantomArguments)
2077 compileMovHint(node);
2078
2079 break;
2080 }
2081
2082 case SetArgument:
2083 // This is a no-op; it just marks the fact that the argument is being used.
2084 // But it may be profitable to use this as a hook to run speculation checks
2085 // on arguments, thereby allowing us to trivially eliminate such checks if
2086 // the argument is not used.
2087 break;
2088
2089 case BitAnd:
2090 case BitOr:
2091 case BitXor:
2092 if (isInt32Constant(node->child1().node())) {
2093 SpeculateIntegerOperand op2(this, node->child2());
2094 GPRTemporary result(this, op2);
2095
2096 bitOp(op, valueOfInt32Constant(node->child1().node()), op2.gpr(), result.gpr());
2097
2098 integerResult(result.gpr(), node);
2099 } else if (isInt32Constant(node->child2().node())) {
2100 SpeculateIntegerOperand op1(this, node->child1());
2101 GPRTemporary result(this, op1);
2102
2103 bitOp(op, valueOfInt32Constant(node->child2().node()), op1.gpr(), result.gpr());
2104
2105 integerResult(result.gpr(), node);
2106 } else {
2107 SpeculateIntegerOperand op1(this, node->child1());
2108 SpeculateIntegerOperand op2(this, node->child2());
2109 GPRTemporary result(this, op1, op2);
2110
2111 GPRReg reg1 = op1.gpr();
2112 GPRReg reg2 = op2.gpr();
2113 bitOp(op, reg1, reg2, result.gpr());
2114
2115 integerResult(result.gpr(), node);
2116 }
2117 break;
2118
2119 case BitRShift:
2120 case BitLShift:
2121 case BitURShift:
2122 if (isInt32Constant(node->child2().node())) {
2123 SpeculateIntegerOperand op1(this, node->child1());
2124 GPRTemporary result(this, op1);
2125
2126 shiftOp(op, op1.gpr(), valueOfInt32Constant(node->child2().node()) & 0x1f, result.gpr());
2127
2128 integerResult(result.gpr(), node);
2129 } else {
2130 // Do not allow shift amount to be used as the result, MacroAssembler does not permit this.
2131 SpeculateIntegerOperand op1(this, node->child1());
2132 SpeculateIntegerOperand op2(this, node->child2());
2133 GPRTemporary result(this, op1);
2134
2135 GPRReg reg1 = op1.gpr();
2136 GPRReg reg2 = op2.gpr();
2137 shiftOp(op, reg1, reg2, result.gpr());
2138
2139 integerResult(result.gpr(), node);
2140 }
2141 break;
2142
2143 case UInt32ToNumber: {
2144 compileUInt32ToNumber(node);
2145 break;
2146 }
2147
2148 case DoubleAsInt32: {
2149 compileDoubleAsInt32(node);
2150 break;
2151 }
2152
2153 case ValueToInt32: {
2154 compileValueToInt32(node);
2155 break;
2156 }
2157
2158 case Int32ToDouble:
2159 case ForwardInt32ToDouble: {
2160 compileInt32ToDouble(node);
2161 break;
2162 }
2163
2164 case ValueAdd:
2165 case ArithAdd:
2166 compileAdd(node);
2167 break;
2168
2169 case MakeRope:
2170 compileMakeRope(node);
2171 break;
2172
2173 case ArithSub:
2174 compileArithSub(node);
2175 break;
2176
2177 case ArithNegate:
2178 compileArithNegate(node);
2179 break;
2180
2181 case ArithMul:
2182 compileArithMul(node);
2183 break;
2184
2185 case ArithIMul:
2186 compileArithIMul(node);
2187 break;
2188
2189 case ArithDiv: {
2190 switch (node->binaryUseKind()) {
2191 case Int32Use: {
2192 #if CPU(X86) || CPU(X86_64)
2193 compileIntegerArithDivForX86(node);
2194 #elif CPU(ARM64)
2195 compileIntegerArithDivForARM64(node);
2196 #else
2197 // See DFGFixupPhase - on any architecture other than X86[_64] we'll force the prediction to double.
2198 ASSERT_NOT_REACHED();
2199 #endif
2200 break;
2201 }
2202
2203 case NumberUse: {
2204 SpeculateDoubleOperand op1(this, node->child1());
2205 SpeculateDoubleOperand op2(this, node->child2());
2206 FPRTemporary result(this, op1);
2207
2208 FPRReg reg1 = op1.fpr();
2209 FPRReg reg2 = op2.fpr();
2210 m_jit.divDouble(reg1, reg2, result.fpr());
2211
2212 doubleResult(result.fpr(), node);
2213 break;
2214 }
2215
2216 default:
2217 RELEASE_ASSERT_NOT_REACHED();
2218 break;
2219 }
2220 break;
2221 }
2222
2223 case ArithMod: {
2224 compileArithMod(node);
2225 break;
2226 }
2227
2228 case ArithAbs: {
2229 switch (node->child1().useKind()) {
2230 case Int32Use: {
2231 SpeculateIntegerOperand op1(this, node->child1());
2232 GPRTemporary result(this);
2233 GPRTemporary scratch(this);
2234
2235 m_jit.zeroExtend32ToPtr(op1.gpr(), result.gpr());
2236 m_jit.rshift32(result.gpr(), MacroAssembler::TrustedImm32(31), scratch.gpr());
2237 m_jit.add32(scratch.gpr(), result.gpr());
2238 m_jit.xor32(scratch.gpr(), result.gpr());
2239 speculationCheck(Overflow, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::Equal, result.gpr(), MacroAssembler::TrustedImm32(1 << 31)));
2240 integerResult(result.gpr(), node);
2241 break;
2242 }
2243
2244 case NumberUse: {
2245 SpeculateDoubleOperand op1(this, node->child1());
2246 FPRTemporary result(this);
2247
2248 m_jit.absDouble(op1.fpr(), result.fpr());
2249 doubleResult(result.fpr(), node);
2250 break;
2251 }
2252
2253 default:
2254 RELEASE_ASSERT_NOT_REACHED();
2255 break;
2256 }
2257 break;
2258 }
2259
2260 case ArithMin:
2261 case ArithMax: {
2262 switch (node->binaryUseKind()) {
2263 case Int32Use: {
2264 SpeculateStrictInt32Operand op1(this, node->child1());
2265 SpeculateStrictInt32Operand op2(this, node->child2());
2266 GPRTemporary result(this, op1);
2267
2268 MacroAssembler::Jump op1Less = m_jit.branch32(op == ArithMin ? MacroAssembler::LessThan : MacroAssembler::GreaterThan, op1.gpr(), op2.gpr());
2269 m_jit.move(op2.gpr(), result.gpr());
2270 if (op1.gpr() != result.gpr()) {
2271 MacroAssembler::Jump done = m_jit.jump();
2272 op1Less.link(&m_jit);
2273 m_jit.move(op1.gpr(), result.gpr());
2274 done.link(&m_jit);
2275 } else
2276 op1Less.link(&m_jit);
2277
2278 integerResult(result.gpr(), node);
2279 break;
2280 }
2281
2282 case NumberUse: {
2283 SpeculateDoubleOperand op1(this, node->child1());
2284 SpeculateDoubleOperand op2(this, node->child2());
2285 FPRTemporary result(this, op1);
2286
2287 FPRReg op1FPR = op1.fpr();
2288 FPRReg op2FPR = op2.fpr();
2289 FPRReg resultFPR = result.fpr();
2290
2291 MacroAssembler::JumpList done;
2292
2293 MacroAssembler::Jump op1Less = m_jit.branchDouble(op == ArithMin ? MacroAssembler::DoubleLessThan : MacroAssembler::DoubleGreaterThan, op1FPR, op2FPR);
2294
2295 // op2 is eather the lesser one or one of then is NaN
2296 MacroAssembler::Jump op2Less = m_jit.branchDouble(op == ArithMin ? MacroAssembler::DoubleGreaterThanOrEqual : MacroAssembler::DoubleLessThanOrEqual, op1FPR, op2FPR);
2297
2298 // Unordered case. We don't know which of op1, op2 is NaN. Manufacture NaN by adding
2299 // op1 + op2 and putting it into result.
2300 m_jit.addDouble(op1FPR, op2FPR, resultFPR);
2301 done.append(m_jit.jump());
2302
2303 op2Less.link(&m_jit);
2304 m_jit.moveDouble(op2FPR, resultFPR);
2305
2306 if (op1FPR != resultFPR) {
2307 done.append(m_jit.jump());
2308
2309 op1Less.link(&m_jit);
2310 m_jit.moveDouble(op1FPR, resultFPR);
2311 } else
2312 op1Less.link(&m_jit);
2313
2314 done.link(&m_jit);
2315
2316 doubleResult(resultFPR, node);
2317 break;
2318 }
2319
2320 default:
2321 RELEASE_ASSERT_NOT_REACHED();
2322 break;
2323 }
2324 break;
2325 }
2326
2327 case ArithSqrt: {
2328 SpeculateDoubleOperand op1(this, node->child1());
2329 FPRTemporary result(this, op1);
2330
2331 m_jit.sqrtDouble(op1.fpr(), result.fpr());
2332
2333 doubleResult(result.fpr(), node);
2334 break;
2335 }
2336
2337 case LogicalNot:
2338 compileLogicalNot(node);
2339 break;
2340
2341 case CompareLess:
2342 if (compare(node, JITCompiler::LessThan, JITCompiler::DoubleLessThan, operationCompareLess))
2343 return;
2344 break;
2345
2346 case CompareLessEq:
2347 if (compare(node, JITCompiler::LessThanOrEqual, JITCompiler::DoubleLessThanOrEqual, operationCompareLessEq))
2348 return;
2349 break;
2350
2351 case CompareGreater:
2352 if (compare(node, JITCompiler::GreaterThan, JITCompiler::DoubleGreaterThan, operationCompareGreater))
2353 return;
2354 break;
2355
2356 case CompareGreaterEq:
2357 if (compare(node, JITCompiler::GreaterThanOrEqual, JITCompiler::DoubleGreaterThanOrEqual, operationCompareGreaterEq))
2358 return;
2359 break;
2360
2361 case CompareEqConstant:
2362 ASSERT(isNullConstant(node->child2().node()));
2363 if (nonSpeculativeCompareNull(node, node->child1()))
2364 return;
2365 break;
2366
2367 case CompareEq:
2368 if (compare(node, JITCompiler::Equal, JITCompiler::DoubleEqual, operationCompareEq))
2369 return;
2370 break;
2371
2372 case CompareStrictEqConstant:
2373 if (compileStrictEqForConstant(node, node->child1(), valueOfJSConstant(node->child2().node())))
2374 return;
2375 break;
2376
2377 case CompareStrictEq:
2378 if (compileStrictEq(node))
2379 return;
2380 break;
2381
2382 case StringCharCodeAt: {
2383 compileGetCharCodeAt(node);
2384 break;
2385 }
2386
2387 case StringCharAt: {
2388 // Relies on StringCharAt node having same basic layout as GetByVal
2389 compileGetByValOnString(node);
2390 break;
2391 }
2392
2393 case StringFromCharCode: {
2394 compileFromCharCode(node);
2395 break;
2396 }
2397
2398 case CheckArray: {
2399 checkArray(node);
2400 break;
2401 }
2402
2403 case Arrayify:
2404 case ArrayifyToStructure: {
2405 arrayify(node);
2406 break;
2407 }
2408
2409 case GetByVal: {
2410 switch (node->arrayMode().type()) {
2411 case Array::SelectUsingPredictions:
2412 case Array::ForceExit:
2413 RELEASE_ASSERT_NOT_REACHED();
2414 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
2415 break;
2416 case Array::Generic: {
2417 JSValueOperand base(this, node->child1());
2418 JSValueOperand property(this, node->child2());
2419 GPRReg baseGPR = base.gpr();
2420 GPRReg propertyGPR = property.gpr();
2421
2422 flushRegisters();
2423 GPRResult result(this);
2424 callOperation(operationGetByVal, result.gpr(), baseGPR, propertyGPR);
2425
2426 jsValueResult(result.gpr(), node);
2427 break;
2428 }
2429 case Array::Int32:
2430 case Array::Contiguous: {
2431 if (node->arrayMode().isInBounds()) {
2432 SpeculateStrictInt32Operand property(this, node->child2());
2433 StorageOperand storage(this, node->child3());
2434
2435 GPRReg propertyReg = property.gpr();
2436 GPRReg storageReg = storage.gpr();
2437
2438 if (!m_compileOkay)
2439 return;
2440
2441 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2442
2443 GPRTemporary result(this);
2444 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), result.gpr());
2445 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchTest64(MacroAssembler::Zero, result.gpr()));
2446 jsValueResult(result.gpr(), node, node->arrayMode().type() == Array::Int32 ? DataFormatJSInteger : DataFormatJS);
2447 break;
2448 }
2449
2450 SpeculateCellOperand base(this, node->child1());
2451 SpeculateStrictInt32Operand property(this, node->child2());
2452 StorageOperand storage(this, node->child3());
2453
2454 GPRReg baseReg = base.gpr();
2455 GPRReg propertyReg = property.gpr();
2456 GPRReg storageReg = storage.gpr();
2457
2458 if (!m_compileOkay)
2459 return;
2460
2461 GPRTemporary result(this);
2462 GPRReg resultReg = result.gpr();
2463
2464 MacroAssembler::JumpList slowCases;
2465
2466 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2467
2468 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), resultReg);
2469 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, resultReg));
2470
2471 addSlowPathGenerator(
2472 slowPathCall(
2473 slowCases, this, operationGetByValArrayInt,
2474 result.gpr(), baseReg, propertyReg));
2475
2476 jsValueResult(resultReg, node);
2477 break;
2478 }
2479
2480 case Array::Double: {
2481 if (node->arrayMode().isInBounds()) {
2482 if (node->arrayMode().isSaneChain()) {
2483 JSGlobalObject* globalObject = m_jit.globalObjectFor(node->codeOrigin);
2484 ASSERT(globalObject->arrayPrototypeChainIsSane());
2485 globalObject->arrayPrototype()->structure()->addTransitionWatchpoint(speculationWatchpoint());
2486 globalObject->objectPrototype()->structure()->addTransitionWatchpoint(speculationWatchpoint());
2487 }
2488
2489 SpeculateStrictInt32Operand property(this, node->child2());
2490 StorageOperand storage(this, node->child3());
2491
2492 GPRReg propertyReg = property.gpr();
2493 GPRReg storageReg = storage.gpr();
2494
2495 if (!m_compileOkay)
2496 return;
2497
2498 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2499
2500 FPRTemporary result(this);
2501 m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), result.fpr());
2502 if (!node->arrayMode().isSaneChain())
2503 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, result.fpr(), result.fpr()));
2504 doubleResult(result.fpr(), node);
2505 break;
2506 }
2507
2508 SpeculateCellOperand base(this, node->child1());
2509 SpeculateStrictInt32Operand property(this, node->child2());
2510 StorageOperand storage(this, node->child3());
2511
2512 GPRReg baseReg = base.gpr();
2513 GPRReg propertyReg = property.gpr();
2514 GPRReg storageReg = storage.gpr();
2515
2516 if (!m_compileOkay)
2517 return;
2518
2519 GPRTemporary result(this);
2520 FPRTemporary temp(this);
2521 GPRReg resultReg = result.gpr();
2522 FPRReg tempReg = temp.fpr();
2523
2524 MacroAssembler::JumpList slowCases;
2525
2526 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2527
2528 m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), tempReg);
2529 slowCases.append(m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, tempReg, tempReg));
2530 boxDouble(tempReg, resultReg);
2531
2532 addSlowPathGenerator(
2533 slowPathCall(
2534 slowCases, this, operationGetByValArrayInt,
2535 result.gpr(), baseReg, propertyReg));
2536
2537 jsValueResult(resultReg, node);
2538 break;
2539 }
2540
2541 case Array::ArrayStorage:
2542 case Array::SlowPutArrayStorage: {
2543 if (node->arrayMode().isInBounds()) {
2544 SpeculateStrictInt32Operand property(this, node->child2());
2545 StorageOperand storage(this, node->child3());
2546
2547 GPRReg propertyReg = property.gpr();
2548 GPRReg storageReg = storage.gpr();
2549
2550 if (!m_compileOkay)
2551 return;
2552
2553 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset())));
2554
2555 GPRTemporary result(this);
2556 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), result.gpr());
2557 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchTest64(MacroAssembler::Zero, result.gpr()));
2558
2559 jsValueResult(result.gpr(), node);
2560 break;
2561 }
2562
2563 SpeculateCellOperand base(this, node->child1());
2564 SpeculateStrictInt32Operand property(this, node->child2());
2565 StorageOperand storage(this, node->child3());
2566
2567 GPRReg baseReg = base.gpr();
2568 GPRReg propertyReg = property.gpr();
2569 GPRReg storageReg = storage.gpr();
2570
2571 if (!m_compileOkay)
2572 return;
2573
2574 GPRTemporary result(this);
2575 GPRReg resultReg = result.gpr();
2576
2577 MacroAssembler::JumpList slowCases;
2578
2579 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset())));
2580
2581 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), resultReg);
2582 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, resultReg));
2583
2584 addSlowPathGenerator(
2585 slowPathCall(
2586 slowCases, this, operationGetByValArrayInt,
2587 result.gpr(), baseReg, propertyReg));
2588
2589 jsValueResult(resultReg, node);
2590 break;
2591 }
2592 case Array::String:
2593 compileGetByValOnString(node);
2594 break;
2595 case Array::Arguments:
2596 compileGetByValOnArguments(node);
2597 break;
2598 case Array::Int8Array:
2599 compileGetByValOnIntTypedArray(m_jit.vm()->int8ArrayDescriptor(), node, sizeof(int8_t), SignedTypedArray);
2600 break;
2601 case Array::Int16Array:
2602 compileGetByValOnIntTypedArray(m_jit.vm()->int16ArrayDescriptor(), node, sizeof(int16_t), SignedTypedArray);
2603 break;
2604 case Array::Int32Array:
2605 compileGetByValOnIntTypedArray(m_jit.vm()->int32ArrayDescriptor(), node, sizeof(int32_t), SignedTypedArray);
2606 break;
2607 case Array::Uint8Array:
2608 compileGetByValOnIntTypedArray(m_jit.vm()->uint8ArrayDescriptor(), node, sizeof(uint8_t), UnsignedTypedArray);
2609 break;
2610 case Array::Uint8ClampedArray:
2611 compileGetByValOnIntTypedArray(m_jit.vm()->uint8ClampedArrayDescriptor(), node, sizeof(uint8_t), UnsignedTypedArray);
2612 break;
2613 case Array::Uint16Array:
2614 compileGetByValOnIntTypedArray(m_jit.vm()->uint16ArrayDescriptor(), node, sizeof(uint16_t), UnsignedTypedArray);
2615 break;
2616 case Array::Uint32Array:
2617 compileGetByValOnIntTypedArray(m_jit.vm()->uint32ArrayDescriptor(), node, sizeof(uint32_t), UnsignedTypedArray);
2618 break;
2619 case Array::Float32Array:
2620 compileGetByValOnFloatTypedArray(m_jit.vm()->float32ArrayDescriptor(), node, sizeof(float));
2621 break;
2622 case Array::Float64Array:
2623 compileGetByValOnFloatTypedArray(m_jit.vm()->float64ArrayDescriptor(), node, sizeof(double));
2624 break;
2625 default:
2626 RELEASE_ASSERT_NOT_REACHED();
2627 break;
2628 }
2629 break;
2630 }
2631
2632 case PutByVal:
2633 case PutByValAlias: {
2634 Edge child1 = m_jit.graph().varArgChild(node, 0);
2635 Edge child2 = m_jit.graph().varArgChild(node, 1);
2636 Edge child3 = m_jit.graph().varArgChild(node, 2);
2637 Edge child4 = m_jit.graph().varArgChild(node, 3);
2638
2639 ArrayMode arrayMode = node->arrayMode().modeForPut();
2640 bool alreadyHandled = false;
2641
2642 switch (arrayMode.type()) {
2643 case Array::SelectUsingPredictions:
2644 case Array::ForceExit:
2645 RELEASE_ASSERT_NOT_REACHED();
2646 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
2647 alreadyHandled = true;
2648 break;
2649 case Array::Generic: {
2650 RELEASE_ASSERT(node->op() == PutByVal);
2651
2652 JSValueOperand arg1(this, child1);
2653 JSValueOperand arg2(this, child2);
2654 JSValueOperand arg3(this, child3);
2655 GPRReg arg1GPR = arg1.gpr();
2656 GPRReg arg2GPR = arg2.gpr();
2657 GPRReg arg3GPR = arg3.gpr();
2658 flushRegisters();
2659
2660 callOperation(m_jit.strictModeFor(node->codeOrigin) ? operationPutByValStrict : operationPutByValNonStrict, arg1GPR, arg2GPR, arg3GPR);
2661
2662 noResult(node);
2663 alreadyHandled = true;
2664 break;
2665 }
2666 default:
2667 break;
2668 }
2669
2670 if (alreadyHandled)
2671 break;
2672
2673 // FIXME: the base may not be necessary for some array access modes. But we have to
2674 // keep it alive to this point, so it's likely to be in a register anyway. Likely
2675 // no harm in locking it here.
2676 SpeculateCellOperand base(this, child1);
2677 SpeculateStrictInt32Operand property(this, child2);
2678
2679 GPRReg baseReg = base.gpr();
2680 GPRReg propertyReg = property.gpr();
2681
2682 switch (arrayMode.type()) {
2683 case Array::Int32:
2684 case Array::Contiguous: {
2685 JSValueOperand value(this, child3, ManualOperandSpeculation);
2686
2687 GPRReg valueReg = value.gpr();
2688
2689 if (!m_compileOkay)
2690 return;
2691
2692 if (arrayMode.type() == Array::Int32) {
2693 DFG_TYPE_CHECK(
2694 JSValueRegs(valueReg), child3, SpecInt32,
2695 m_jit.branch64(
2696 MacroAssembler::Below, valueReg, GPRInfo::tagTypeNumberRegister));
2697 }
2698
2699 if (arrayMode.type() == Array::Contiguous && Heap::isWriteBarrierEnabled()) {
2700 GPRTemporary scratch(this);
2701 writeBarrier(baseReg, value.gpr(), child3, WriteBarrierForPropertyAccess, scratch.gpr());
2702 }
2703
2704 StorageOperand storage(this, child4);
2705 GPRReg storageReg = storage.gpr();
2706
2707 if (node->op() == PutByValAlias) {
2708 // Store the value to the array.
2709 GPRReg propertyReg = property.gpr();
2710 GPRReg valueReg = value.gpr();
2711 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight));
2712
2713 noResult(node);
2714 break;
2715 }
2716
2717 GPRTemporary temporary;
2718 GPRReg temporaryReg = temporaryRegisterForPutByVal(temporary, node);
2719
2720 MacroAssembler::Jump slowCase;
2721
2722 if (arrayMode.isInBounds()) {
2723 speculationCheck(
2724 StoreToHoleOrOutOfBounds, JSValueRegs(), 0,
2725 m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2726 } else {
2727 MacroAssembler::Jump inBounds = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()));
2728
2729 slowCase = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfVectorLength()));
2730
2731 if (!arrayMode.isOutOfBounds())
2732 speculationCheck(OutOfBounds, JSValueRegs(), 0, slowCase);
2733
2734 m_jit.add32(TrustedImm32(1), propertyReg, temporaryReg);
2735 m_jit.store32(temporaryReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()));
2736
2737 inBounds.link(&m_jit);
2738 }
2739
2740 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight));
2741
2742 base.use();
2743 property.use();
2744 value.use();
2745 storage.use();
2746
2747 if (arrayMode.isOutOfBounds()) {
2748 addSlowPathGenerator(
2749 slowPathCall(
2750 slowCase, this,
2751 m_jit.codeBlock()->isStrictMode() ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict,
2752 NoResult, baseReg, propertyReg, valueReg));
2753 }
2754
2755 noResult(node, UseChildrenCalledExplicitly);
2756 break;
2757 }
2758
2759 case Array::Double: {
2760 compileDoublePutByVal(node, base, property);
2761 break;
2762 }
2763
2764 case Array::ArrayStorage:
2765 case Array::SlowPutArrayStorage: {
2766 JSValueOperand value(this, child3);
2767
2768 GPRReg valueReg = value.gpr();
2769
2770 if (!m_compileOkay)
2771 return;
2772
2773 if (Heap::isWriteBarrierEnabled()) {
2774 GPRTemporary scratch(this);
2775 writeBarrier(baseReg, value.gpr(), child3, WriteBarrierForPropertyAccess, scratch.gpr());
2776 }
2777
2778 StorageOperand storage(this, child4);
2779 GPRReg storageReg = storage.gpr();
2780
2781 if (node->op() == PutByValAlias) {
2782 // Store the value to the array.
2783 GPRReg propertyReg = property.gpr();
2784 GPRReg valueReg = value.gpr();
2785 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
2786
2787 noResult(node);
2788 break;
2789 }
2790
2791 GPRTemporary temporary;
2792 GPRReg temporaryReg = temporaryRegisterForPutByVal(temporary, node);
2793
2794 MacroAssembler::JumpList slowCases;
2795
2796 MacroAssembler::Jump beyondArrayBounds = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset()));
2797 if (!arrayMode.isOutOfBounds())
2798 speculationCheck(OutOfBounds, JSValueRegs(), 0, beyondArrayBounds);
2799 else
2800 slowCases.append(beyondArrayBounds);
2801
2802 // Check if we're writing to a hole; if so increment m_numValuesInVector.
2803 if (arrayMode.isInBounds()) {
2804 speculationCheck(
2805 StoreToHole, JSValueRegs(), 0,
2806 m_jit.branchTest64(MacroAssembler::Zero, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))));
2807 } else {
2808 MacroAssembler::Jump notHoleValue = m_jit.branchTest64(MacroAssembler::NonZero, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
2809 if (arrayMode.isSlowPut()) {
2810 // This is sort of strange. If we wanted to optimize this code path, we would invert
2811 // the above branch. But it's simply not worth it since this only happens if we're
2812 // already having a bad time.
2813 slowCases.append(m_jit.jump());
2814 } else {
2815 m_jit.add32(TrustedImm32(1), MacroAssembler::Address(storageReg, ArrayStorage::numValuesInVectorOffset()));
2816
2817 // If we're writing to a hole we might be growing the array;
2818 MacroAssembler::Jump lengthDoesNotNeedUpdate = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset()));
2819 m_jit.add32(TrustedImm32(1), propertyReg, temporaryReg);
2820 m_jit.store32(temporaryReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset()));
2821
2822 lengthDoesNotNeedUpdate.link(&m_jit);
2823 }
2824 notHoleValue.link(&m_jit);
2825 }
2826
2827 // Store the value to the array.
2828 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
2829
2830 base.use();
2831 property.use();
2832 value.use();
2833 storage.use();
2834
2835 if (!slowCases.empty()) {
2836 addSlowPathGenerator(
2837 slowPathCall(
2838 slowCases, this,
2839 m_jit.codeBlock()->isStrictMode() ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict,
2840 NoResult, baseReg, propertyReg, valueReg));
2841 }
2842
2843 noResult(node, UseChildrenCalledExplicitly);
2844 break;
2845 }
2846
2847 case Array::Arguments: {
2848 JSValueOperand value(this, child3);
2849 GPRTemporary scratch(this);
2850 GPRTemporary scratch2(this);
2851
2852 GPRReg valueReg = value.gpr();
2853 GPRReg scratchReg = scratch.gpr();
2854 GPRReg scratch2Reg = scratch2.gpr();
2855
2856 if (!m_compileOkay)
2857 return;
2858
2859 // Two really lame checks.
2860 speculationCheck(
2861 Uncountable, JSValueSource(), 0,
2862 m_jit.branch32(
2863 MacroAssembler::AboveOrEqual, propertyReg,
2864 MacroAssembler::Address(baseReg, OBJECT_OFFSETOF(Arguments, m_numArguments))));
2865 speculationCheck(
2866 Uncountable, JSValueSource(), 0,
2867 m_jit.branchTestPtr(
2868 MacroAssembler::NonZero,
2869 MacroAssembler::Address(
2870 baseReg, OBJECT_OFFSETOF(Arguments, m_slowArguments))));
2871
2872 m_jit.move(propertyReg, scratch2Reg);
2873 m_jit.neg32(scratch2Reg);
2874 m_jit.signExtend32ToPtr(scratch2Reg, scratch2Reg);
2875 m_jit.loadPtr(
2876 MacroAssembler::Address(baseReg, OBJECT_OFFSETOF(Arguments, m_registers)),
2877 scratchReg);
2878
2879 m_jit.store64(
2880 valueReg,
2881 MacroAssembler::BaseIndex(
2882 scratchReg, scratch2Reg, MacroAssembler::TimesEight,
2883 CallFrame::thisArgumentOffset() * sizeof(Register) - sizeof(Register)));
2884
2885 noResult(node);
2886 break;
2887 }
2888
2889 case Array::Int8Array:
2890 compilePutByValForIntTypedArray(m_jit.vm()->int8ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int8_t), SignedTypedArray);
2891 break;
2892
2893 case Array::Int16Array:
2894 compilePutByValForIntTypedArray(m_jit.vm()->int16ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int16_t), SignedTypedArray);
2895 break;
2896
2897 case Array::Int32Array:
2898 compilePutByValForIntTypedArray(m_jit.vm()->int32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int32_t), SignedTypedArray);
2899 break;
2900
2901 case Array::Uint8Array:
2902 compilePutByValForIntTypedArray(m_jit.vm()->uint8ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint8_t), UnsignedTypedArray);
2903 break;
2904
2905 case Array::Uint8ClampedArray:
2906 compilePutByValForIntTypedArray(m_jit.vm()->uint8ClampedArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint8_t), UnsignedTypedArray, ClampRounding);
2907 break;
2908
2909 case Array::Uint16Array:
2910 compilePutByValForIntTypedArray(m_jit.vm()->uint16ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint16_t), UnsignedTypedArray);
2911 break;
2912
2913 case Array::Uint32Array:
2914 compilePutByValForIntTypedArray(m_jit.vm()->uint32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint32_t), UnsignedTypedArray);
2915 break;
2916
2917 case Array::Float32Array:
2918 compilePutByValForFloatTypedArray(m_jit.vm()->float32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(float));
2919 break;
2920
2921 case Array::Float64Array:
2922 compilePutByValForFloatTypedArray(m_jit.vm()->float64ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(double));
2923 break;
2924
2925 default:
2926 RELEASE_ASSERT_NOT_REACHED();
2927 break;
2928 }
2929
2930 break;
2931 }
2932
2933 case RegExpExec: {
2934 if (compileRegExpExec(node))
2935 return;
2936 if (!node->adjustedRefCount()) {
2937 SpeculateCellOperand base(this, node->child1());
2938 SpeculateCellOperand argument(this, node->child2());
2939 GPRReg baseGPR = base.gpr();
2940 GPRReg argumentGPR = argument.gpr();
2941
2942 flushRegisters();
2943 GPRResult result(this);
2944 callOperation(operationRegExpTest, result.gpr(), baseGPR, argumentGPR);
2945
2946 // Must use jsValueResult because otherwise we screw up register
2947 // allocation, which thinks that this node has a result.
2948 jsValueResult(result.gpr(), node);
2949 break;
2950 }
2951
2952 SpeculateCellOperand base(this, node->child1());
2953 SpeculateCellOperand argument(this, node->child2());
2954 GPRReg baseGPR = base.gpr();
2955 GPRReg argumentGPR = argument.gpr();
2956
2957 flushRegisters();
2958 GPRResult result(this);
2959 callOperation(operationRegExpExec, result.gpr(), baseGPR, argumentGPR);
2960
2961 jsValueResult(result.gpr(), node);
2962 break;
2963 }
2964
2965 case RegExpTest: {
2966 SpeculateCellOperand base(this, node->child1());
2967 SpeculateCellOperand argument(this, node->child2());
2968 GPRReg baseGPR = base.gpr();
2969 GPRReg argumentGPR = argument.gpr();
2970
2971 flushRegisters();
2972 GPRResult result(this);
2973 callOperation(operationRegExpTest, result.gpr(), baseGPR, argumentGPR);
2974
2975 // If we add a DataFormatBool, we should use it here.
2976 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
2977 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
2978 break;
2979 }
2980
2981 case ArrayPush: {
2982 ASSERT(node->arrayMode().isJSArray());
2983
2984 SpeculateCellOperand base(this, node->child1());
2985 GPRTemporary storageLength(this);
2986
2987 GPRReg baseGPR = base.gpr();
2988 GPRReg storageLengthGPR = storageLength.gpr();
2989
2990 StorageOperand storage(this, node->child3());
2991 GPRReg storageGPR = storage.gpr();
2992
2993 switch (node->arrayMode().type()) {
2994 case Array::Int32:
2995 case Array::Contiguous: {
2996 JSValueOperand value(this, node->child2(), ManualOperandSpeculation);
2997 GPRReg valueGPR = value.gpr();
2998
2999 if (node->arrayMode().type() == Array::Int32) {
3000 DFG_TYPE_CHECK(
3001 JSValueRegs(valueGPR), node->child2(), SpecInt32,
3002 m_jit.branch64(
3003 MacroAssembler::Below, valueGPR, GPRInfo::tagTypeNumberRegister));
3004 }
3005
3006 if (node->arrayMode().type() != Array::Int32 && Heap::isWriteBarrierEnabled()) {
3007 GPRTemporary scratch(this);
3008 writeBarrier(baseGPR, valueGPR, node->child2(), WriteBarrierForPropertyAccess, scratch.gpr(), storageLengthGPR);
3009 }
3010
3011 m_jit.load32(MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR);
3012 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength()));
3013 m_jit.store64(valueGPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3014 m_jit.add32(TrustedImm32(1), storageLengthGPR);
3015 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3016 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR);
3017
3018 addSlowPathGenerator(
3019 slowPathCall(
3020 slowPath, this, operationArrayPush, NoResult, storageLengthGPR,
3021 valueGPR, baseGPR));
3022
3023 jsValueResult(storageLengthGPR, node);
3024 break;
3025 }
3026
3027 case Array::Double: {
3028 SpeculateDoubleOperand value(this, node->child2());
3029 FPRReg valueFPR = value.fpr();
3030
3031 DFG_TYPE_CHECK(
3032 JSValueRegs(), node->child2(), SpecRealNumber,
3033 m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, valueFPR, valueFPR));
3034
3035 m_jit.load32(MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR);
3036 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength()));
3037 m_jit.storeDouble(valueFPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3038 m_jit.add32(TrustedImm32(1), storageLengthGPR);
3039 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3040 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR);
3041
3042 addSlowPathGenerator(
3043 slowPathCall(
3044 slowPath, this, operationArrayPushDouble, NoResult, storageLengthGPR,
3045 valueFPR, baseGPR));
3046
3047 jsValueResult(storageLengthGPR, node);
3048 break;
3049 }
3050
3051 case Array::ArrayStorage: {
3052 JSValueOperand value(this, node->child2());
3053 GPRReg valueGPR = value.gpr();
3054
3055 if (Heap::isWriteBarrierEnabled()) {
3056 GPRTemporary scratch(this);
3057 writeBarrier(baseGPR, valueGPR, node->child2(), WriteBarrierForPropertyAccess, scratch.gpr(), storageLengthGPR);
3058 }
3059
3060 m_jit.load32(MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()), storageLengthGPR);
3061
3062 // Refuse to handle bizarre lengths.
3063 speculationCheck(Uncountable, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::Above, storageLengthGPR, TrustedImm32(0x7ffffffe)));
3064
3065 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset()));
3066
3067 m_jit.store64(valueGPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
3068
3069 m_jit.add32(TrustedImm32(1), storageLengthGPR);
3070 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()));
3071 m_jit.add32(TrustedImm32(1), MacroAssembler::Address(storageGPR, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
3072 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR);
3073
3074 addSlowPathGenerator(
3075 slowPathCall(
3076 slowPath, this, operationArrayPush, NoResult, storageLengthGPR,
3077 valueGPR, baseGPR));
3078
3079 jsValueResult(storageLengthGPR, node);
3080 break;
3081 }
3082
3083 default:
3084 CRASH();
3085 break;
3086 }
3087 break;
3088 }
3089
3090 case ArrayPop: {
3091 ASSERT(node->arrayMode().isJSArray());
3092
3093 SpeculateCellOperand base(this, node->child1());
3094 StorageOperand storage(this, node->child2());
3095 GPRTemporary value(this);
3096 GPRTemporary storageLength(this);
3097 FPRTemporary temp(this); // This is kind of lame, since we don't always need it. I'm relying on the fact that we don't have FPR pressure, especially in code that uses pop().
3098
3099 GPRReg baseGPR = base.gpr();
3100 GPRReg storageGPR = storage.gpr();
3101 GPRReg valueGPR = value.gpr();
3102 GPRReg storageLengthGPR = storageLength.gpr();
3103 FPRReg tempFPR = temp.fpr();
3104
3105 switch (node->arrayMode().type()) {
3106 case Array::Int32:
3107 case Array::Double:
3108 case Array::Contiguous: {
3109 m_jit.load32(
3110 MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR);
3111 MacroAssembler::Jump undefinedCase =
3112 m_jit.branchTest32(MacroAssembler::Zero, storageLengthGPR);
3113 m_jit.sub32(TrustedImm32(1), storageLengthGPR);
3114 m_jit.store32(
3115 storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3116 MacroAssembler::Jump slowCase;
3117 if (node->arrayMode().type() == Array::Double) {
3118 m_jit.loadDouble(
3119 MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight),
3120 tempFPR);
3121 // FIXME: This would not have to be here if changing the publicLength also zeroed the values between the old
3122 // length and the new length.
3123 m_jit.store64(
3124 MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3125 slowCase = m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, tempFPR, tempFPR);
3126 boxDouble(tempFPR, valueGPR);
3127 } else {
3128 m_jit.load64(
3129 MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight),
3130 valueGPR);
3131 // FIXME: This would not have to be here if changing the publicLength also zeroed the values between the old
3132 // length and the new length.
3133 m_jit.store64(
3134 MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3135 slowCase = m_jit.branchTest64(MacroAssembler::Zero, valueGPR);
3136 }
3137
3138 addSlowPathGenerator(
3139 slowPathMove(
3140 undefinedCase, this,
3141 MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined())), valueGPR));
3142 addSlowPathGenerator(
3143 slowPathCall(
3144 slowCase, this, operationArrayPopAndRecoverLength, valueGPR, baseGPR));
3145
3146 // We can't know for sure that the result is an int because of the slow paths. :-/
3147 jsValueResult(valueGPR, node);
3148 break;
3149 }
3150
3151 case Array::ArrayStorage: {
3152 m_jit.load32(MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()), storageLengthGPR);
3153
3154 JITCompiler::Jump undefinedCase =
3155 m_jit.branchTest32(MacroAssembler::Zero, storageLengthGPR);
3156
3157 m_jit.sub32(TrustedImm32(1), storageLengthGPR);
3158
3159 JITCompiler::JumpList slowCases;
3160 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset())));
3161
3162 m_jit.load64(MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), valueGPR);
3163 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, valueGPR));
3164
3165 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()));
3166
3167 m_jit.store64(MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
3168 m_jit.sub32(MacroAssembler::TrustedImm32(1), MacroAssembler::Address(storageGPR, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
3169
3170 addSlowPathGenerator(
3171 slowPathMove(
3172 undefinedCase, this,
3173 MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined())), valueGPR));
3174
3175 addSlowPathGenerator(
3176 slowPathCall(
3177 slowCases, this, operationArrayPop, valueGPR, baseGPR));
3178
3179 jsValueResult(valueGPR, node);
3180 break;
3181 }
3182
3183 default:
3184 CRASH();
3185 break;
3186 }
3187 break;
3188 }
3189
3190 case DFG::Jump: {
3191 BlockIndex taken = node->takenBlockIndex();
3192 jump(taken);
3193 noResult(node);
3194 break;
3195 }
3196
3197 case Branch:
3198 emitBranch(node);
3199 break;
3200
3201 case Return: {
3202 ASSERT(GPRInfo::callFrameRegister != GPRInfo::regT1);
3203 ASSERT(GPRInfo::regT1 != GPRInfo::returnValueGPR);
3204 ASSERT(GPRInfo::returnValueGPR != GPRInfo::callFrameRegister);
3205
3206 #if DFG_ENABLE(SUCCESS_STATS)
3207 static SamplingCounter counter("SpeculativeJIT");
3208 m_jit.emitCount(counter);
3209 #endif
3210
3211 // Return the result in returnValueGPR.
3212 JSValueOperand op1(this, node->child1());
3213 m_jit.move(op1.gpr(), GPRInfo::returnValueGPR);
3214
3215 // Grab the return address.
3216 m_jit.emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, GPRInfo::regT1);
3217 // Restore our caller's "r".
3218 m_jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, GPRInfo::callFrameRegister);
3219 // Return.
3220 m_jit.restoreReturnAddressBeforeReturn(GPRInfo::regT1);
3221 m_jit.ret();
3222
3223 noResult(node);
3224 break;
3225 }
3226
3227 case Throw:
3228 case ThrowReferenceError: {
3229 // We expect that throw statements are rare and are intended to exit the code block
3230 // anyway, so we just OSR back to the old JIT for now.
3231 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
3232 break;
3233 }
3234
3235 case ToPrimitive: {
3236 RELEASE_ASSERT(node->child1().useKind() == UntypedUse);
3237 JSValueOperand op1(this, node->child1());
3238 GPRTemporary result(this, op1);
3239
3240 GPRReg op1GPR = op1.gpr();
3241 GPRReg resultGPR = result.gpr();
3242
3243 op1.use();
3244
3245 if (!(m_state.forNode(node->child1()).m_type & ~(SpecNumber | SpecBoolean)))
3246 m_jit.move(op1GPR, resultGPR);
3247 else {
3248 MacroAssembler::Jump alreadyPrimitive = m_jit.branchTest64(MacroAssembler::NonZero, op1GPR, GPRInfo::tagMaskRegister);
3249 MacroAssembler::Jump notPrimitive = m_jit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(op1GPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()));
3250
3251 alreadyPrimitive.link(&m_jit);
3252 m_jit.move(op1GPR, resultGPR);
3253
3254 addSlowPathGenerator(
3255 slowPathCall(notPrimitive, this, operationToPrimitive, resultGPR, op1GPR));
3256 }
3257
3258 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3259 break;
3260 }
3261
3262 case ToString: {
3263 if (node->child1().useKind() == UntypedUse) {
3264 JSValueOperand op1(this, node->child1());
3265 GPRReg op1GPR = op1.gpr();
3266
3267 GPRResult result(this);
3268 GPRReg resultGPR = result.gpr();
3269
3270 flushRegisters();
3271
3272 JITCompiler::Jump done;
3273 if (node->child1()->prediction() & SpecString) {
3274 JITCompiler::Jump slowPath1 = m_jit.branchTest64(
3275 JITCompiler::NonZero, op1GPR, GPRInfo::tagMaskRegister);
3276 JITCompiler::Jump slowPath2 = m_jit.branchPtr(
3277 JITCompiler::NotEqual,
3278 JITCompiler::Address(op1GPR, JSCell::structureOffset()),
3279 TrustedImmPtr(m_jit.vm()->stringStructure.get()));
3280 m_jit.move(op1GPR, resultGPR);
3281 done = m_jit.jump();
3282 slowPath1.link(&m_jit);
3283 slowPath2.link(&m_jit);
3284 }
3285 callOperation(operationToString, resultGPR, op1GPR);
3286 if (done.isSet())
3287 done.link(&m_jit);
3288 cellResult(resultGPR, node);
3289 break;
3290 }
3291
3292 compileToStringOnCell(node);
3293 break;
3294 }
3295
3296 case NewStringObject: {
3297 compileNewStringObject(node);
3298 break;
3299 }
3300
3301 case NewArray: {
3302 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin);
3303 if (!globalObject->isHavingABadTime() && !hasArrayStorage(node->indexingType())) {
3304 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint());
3305
3306 Structure* structure = globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType());
3307 RELEASE_ASSERT(structure->indexingType() == node->indexingType());
3308 ASSERT(
3309 hasUndecided(structure->indexingType())
3310 || hasInt32(structure->indexingType())
3311 || hasDouble(structure->indexingType())
3312 || hasContiguous(structure->indexingType()));
3313
3314 unsigned numElements = node->numChildren();
3315
3316 GPRTemporary result(this);
3317 GPRTemporary storage(this);
3318
3319 GPRReg resultGPR = result.gpr();
3320 GPRReg storageGPR = storage.gpr();
3321
3322 emitAllocateJSArray(resultGPR, structure, storageGPR, numElements);
3323
3324 // At this point, one way or another, resultGPR and storageGPR have pointers to
3325 // the JSArray and the Butterfly, respectively.
3326
3327 ASSERT(!hasUndecided(structure->indexingType()) || !node->numChildren());
3328
3329 for (unsigned operandIdx = 0; operandIdx < node->numChildren(); ++operandIdx) {
3330 Edge use = m_jit.graph().m_varArgChildren[node->firstChild() + operandIdx];
3331 switch (node->indexingType()) {
3332 case ALL_BLANK_INDEXING_TYPES:
3333 case ALL_UNDECIDED_INDEXING_TYPES:
3334 CRASH();
3335 break;
3336 case ALL_DOUBLE_INDEXING_TYPES: {
3337 SpeculateDoubleOperand operand(this, use);
3338 FPRReg opFPR = operand.fpr();
3339 DFG_TYPE_CHECK(
3340 JSValueRegs(), use, SpecRealNumber,
3341 m_jit.branchDouble(
3342 MacroAssembler::DoubleNotEqualOrUnordered, opFPR, opFPR));
3343 m_jit.storeDouble(opFPR, MacroAssembler::Address(storageGPR, sizeof(double) * operandIdx));
3344 break;
3345 }
3346 case ALL_INT32_INDEXING_TYPES:
3347 case ALL_CONTIGUOUS_INDEXING_TYPES: {
3348 JSValueOperand operand(this, use, ManualOperandSpeculation);
3349 GPRReg opGPR = operand.gpr();
3350 if (hasInt32(node->indexingType())) {
3351 DFG_TYPE_CHECK(
3352 JSValueRegs(opGPR), use, SpecInt32,
3353 m_jit.branch64(
3354 MacroAssembler::Below, opGPR, GPRInfo::tagTypeNumberRegister));
3355 }
3356 m_jit.store64(opGPR, MacroAssembler::Address(storageGPR, sizeof(JSValue) * operandIdx));
3357 break;
3358 }
3359 default:
3360 CRASH();
3361 break;
3362 }
3363 }
3364
3365 // Yuck, we should *really* have a way of also returning the storageGPR. But
3366 // that's the least of what's wrong with this code. We really shouldn't be
3367 // allocating the array after having computed - and probably spilled to the
3368 // stack - all of the things that will go into the array. The solution to that
3369 // bigger problem will also likely fix the redundancy in reloading the storage
3370 // pointer that we currently have.
3371
3372 cellResult(resultGPR, node);
3373 break;
3374 }
3375
3376 if (!node->numChildren()) {
3377 flushRegisters();
3378 GPRResult result(this);
3379 callOperation(operationNewEmptyArray, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()));
3380 cellResult(result.gpr(), node);
3381 break;
3382 }
3383
3384 size_t scratchSize = sizeof(EncodedJSValue) * node->numChildren();
3385 ScratchBuffer* scratchBuffer = m_jit.vm()->scratchBufferForSize(scratchSize);
3386 EncodedJSValue* buffer = scratchBuffer ? static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) : 0;
3387
3388 for (unsigned operandIdx = 0; operandIdx < node->numChildren(); ++operandIdx) {
3389 // Need to perform the speculations that this node promises to perform. If we're
3390 // emitting code here and the indexing type is not array storage then there is
3391 // probably something hilarious going on and we're already failing at all the
3392 // things, but at least we're going to be sound.
3393 Edge use = m_jit.graph().m_varArgChildren[node->firstChild() + operandIdx];
3394 switch (node->indexingType()) {
3395 case ALL_BLANK_INDEXING_TYPES:
3396 case ALL_UNDECIDED_INDEXING_TYPES:
3397 CRASH();
3398 break;
3399 case ALL_DOUBLE_INDEXING_TYPES: {
3400 SpeculateDoubleOperand operand(this, use);
3401 GPRTemporary scratch(this);
3402 FPRReg opFPR = operand.fpr();
3403 GPRReg scratchGPR = scratch.gpr();
3404 DFG_TYPE_CHECK(
3405 JSValueRegs(), use, SpecRealNumber,
3406 m_jit.branchDouble(
3407 MacroAssembler::DoubleNotEqualOrUnordered, opFPR, opFPR));
3408 m_jit.boxDouble(opFPR, scratchGPR);
3409 m_jit.store64(scratchGPR, buffer + operandIdx);
3410 break;
3411 }
3412 case ALL_INT32_INDEXING_TYPES: {
3413 JSValueOperand operand(this, use, ManualOperandSpeculation);
3414 GPRReg opGPR = operand.gpr();
3415 if (hasInt32(node->indexingType())) {
3416 DFG_TYPE_CHECK(
3417 JSValueRegs(opGPR), use, SpecInt32,
3418 m_jit.branch64(
3419 MacroAssembler::Below, opGPR, GPRInfo::tagTypeNumberRegister));
3420 }
3421 m_jit.store64(opGPR, buffer + operandIdx);
3422 break;
3423 }
3424 case ALL_CONTIGUOUS_INDEXING_TYPES:
3425 case ALL_ARRAY_STORAGE_INDEXING_TYPES: {
3426 JSValueOperand operand(this, use);
3427 GPRReg opGPR = operand.gpr();
3428 m_jit.store64(opGPR, buffer + operandIdx);
3429 operand.use();
3430 break;
3431 }
3432 default:
3433 CRASH();
3434 break;
3435 }
3436 }
3437
3438 switch (node->indexingType()) {
3439 case ALL_DOUBLE_INDEXING_TYPES:
3440 case ALL_INT32_INDEXING_TYPES:
3441 useChildren(node);
3442 break;
3443 default:
3444 break;
3445 }
3446
3447 flushRegisters();
3448
3449 if (scratchSize) {
3450 GPRTemporary scratch(this);
3451
3452 // Tell GC mark phase how much of the scratch buffer is active during call.
3453 m_jit.move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), scratch.gpr());
3454 m_jit.storePtr(TrustedImmPtr(scratchSize), scratch.gpr());
3455 }
3456
3457 GPRResult result(this);
3458
3459 callOperation(
3460 operationNewArray, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()),
3461 static_cast<void*>(buffer), node->numChildren());
3462
3463 if (scratchSize) {
3464 GPRTemporary scratch(this);
3465
3466 m_jit.move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), scratch.gpr());
3467 m_jit.storePtr(TrustedImmPtr(0), scratch.gpr());
3468 }
3469
3470 cellResult(result.gpr(), node, UseChildrenCalledExplicitly);
3471 break;
3472 }
3473
3474 case NewArrayWithSize: {
3475 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin);
3476 if (!globalObject->isHavingABadTime() && !hasArrayStorage(node->indexingType())) {
3477 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint());
3478
3479 SpeculateStrictInt32Operand size(this, node->child1());
3480 GPRTemporary result(this);
3481 GPRTemporary storage(this);
3482 GPRTemporary scratch(this);
3483 GPRTemporary scratch2(this);
3484
3485 GPRReg sizeGPR = size.gpr();
3486 GPRReg resultGPR = result.gpr();
3487 GPRReg storageGPR = storage.gpr();
3488 GPRReg scratchGPR = scratch.gpr();
3489 GPRReg scratch2GPR = scratch2.gpr();
3490
3491 MacroAssembler::JumpList slowCases;
3492 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, sizeGPR, TrustedImm32(MIN_SPARSE_ARRAY_INDEX)));
3493
3494 ASSERT((1 << 3) == sizeof(JSValue));
3495 m_jit.move(sizeGPR, scratchGPR);
3496 m_jit.lshift32(TrustedImm32(3), scratchGPR);
3497 m_jit.add32(TrustedImm32(sizeof(IndexingHeader)), scratchGPR, resultGPR);
3498 slowCases.append(
3499 emitAllocateBasicStorage(resultGPR, storageGPR));
3500 m_jit.subPtr(scratchGPR, storageGPR);
3501 Structure* structure = globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType());
3502 emitAllocateJSObject<JSArray>(resultGPR, ImmPtr(structure), storageGPR, scratchGPR, scratch2GPR, slowCases);
3503
3504 m_jit.store32(sizeGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3505 m_jit.store32(sizeGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength()));
3506
3507 if (hasDouble(node->indexingType())) {
3508 m_jit.move(TrustedImm64(bitwise_cast<int64_t>(QNaN)), scratchGPR);
3509 m_jit.move(sizeGPR, scratch2GPR);
3510 MacroAssembler::Jump done = m_jit.branchTest32(MacroAssembler::Zero, scratch2GPR);
3511 MacroAssembler::Label loop = m_jit.label();
3512 m_jit.sub32(TrustedImm32(1), scratch2GPR);
3513 m_jit.store64(scratchGPR, MacroAssembler::BaseIndex(storageGPR, scratch2GPR, MacroAssembler::TimesEight));
3514 m_jit.branchTest32(MacroAssembler::NonZero, scratch2GPR).linkTo(loop, &m_jit);
3515 done.link(&m_jit);
3516 }
3517
3518 addSlowPathGenerator(adoptPtr(
3519 new CallArrayAllocatorWithVariableSizeSlowPathGenerator(
3520 slowCases, this, operationNewArrayWithSize, resultGPR,
3521 globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()),
3522 globalObject->arrayStructureForIndexingTypeDuringAllocation(ArrayWithArrayStorage),
3523 sizeGPR)));
3524
3525 cellResult(resultGPR, node);
3526 break;
3527 }
3528
3529 SpeculateStrictInt32Operand size(this, node->child1());
3530 GPRReg sizeGPR = size.gpr();
3531 flushRegisters();
3532 GPRResult result(this);
3533 GPRReg resultGPR = result.gpr();
3534 GPRReg structureGPR = selectScratchGPR(sizeGPR);
3535 MacroAssembler::Jump bigLength = m_jit.branch32(MacroAssembler::AboveOrEqual, sizeGPR, TrustedImm32(MIN_SPARSE_ARRAY_INDEX));
3536 m_jit.move(TrustedImmPtr(globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType())), structureGPR);
3537 MacroAssembler::Jump done = m_jit.jump();
3538 bigLength.link(&m_jit);
3539 m_jit.move(TrustedImmPtr(globalObject->arrayStructureForIndexingTypeDuringAllocation(ArrayWithArrayStorage)), structureGPR);
3540 done.link(&m_jit);
3541 callOperation(operationNewArrayWithSize, resultGPR, structureGPR, sizeGPR);
3542 cellResult(resultGPR, node);
3543 break;
3544 }
3545
3546 case NewArrayBuffer: {
3547 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin);
3548 IndexingType indexingType = node->indexingType();
3549 if (!globalObject->isHavingABadTime() && !hasArrayStorage(indexingType)) {
3550 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint());
3551
3552 unsigned numElements = node->numConstants();
3553
3554 GPRTemporary result(this);
3555 GPRTemporary storage(this);
3556
3557 GPRReg resultGPR = result.gpr();
3558 GPRReg storageGPR = storage.gpr();
3559
3560 emitAllocateJSArray(resultGPR, globalObject->arrayStructureForIndexingTypeDuringAllocation(indexingType), storageGPR, numElements);
3561
3562 RELEASE_ASSERT(indexingType & IsArray);
3563 JSValue* data = m_jit.codeBlock()->constantBuffer(node->startConstant());
3564 if (indexingType == ArrayWithDouble) {
3565 for (unsigned index = 0; index < node->numConstants(); ++index) {
3566 double value = data[index].asNumber();
3567 m_jit.store64(
3568 Imm64(bitwise_cast<int64_t>(value)),
3569 MacroAssembler::Address(storageGPR, sizeof(double) * index));
3570 }
3571 } else {
3572 for (unsigned index = 0; index < node->numConstants(); ++index) {
3573 m_jit.store64(
3574 Imm64(JSValue::encode(data[index])),
3575 MacroAssembler::Address(storageGPR, sizeof(JSValue) * index));
3576 }
3577 }
3578
3579 cellResult(resultGPR, node);
3580 break;
3581 }
3582
3583 flushRegisters();
3584 GPRResult result(this);
3585
3586 callOperation(operationNewArrayBuffer, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()), node->startConstant(), node->numConstants());
3587
3588 cellResult(result.gpr(), node);
3589 break;
3590 }
3591
3592 case NewRegexp: {
3593 flushRegisters();
3594 GPRResult result(this);
3595
3596 callOperation(operationNewRegexp, result.gpr(), m_jit.codeBlock()->regexp(node->regexpIndex()));
3597
3598 cellResult(result.gpr(), node);
3599 break;
3600 }
3601
3602 case ConvertThis: {
3603 ASSERT(node->child1().useKind() == UntypedUse);
3604 JSValueOperand thisValue(this, node->child1());
3605 GPRReg thisValueGPR = thisValue.gpr();
3606
3607 flushRegisters();
3608
3609 GPRResult result(this);
3610 callOperation(operationConvertThis, result.gpr(), thisValueGPR);
3611
3612 cellResult(result.gpr(), node);
3613 break;
3614 }
3615
3616 case CreateThis: {
3617 // Note that there is not so much profit to speculate here. The only things we
3618 // speculate on are (1) that it's a cell, since that eliminates cell checks
3619 // later if the proto is reused, and (2) if we have a FinalObject prediction
3620 // then we speculate because we want to get recompiled if it isn't (since
3621 // otherwise we'd start taking slow path a lot).
3622
3623 SpeculateCellOperand callee(this, node->child1());
3624 GPRTemporary result(this);
3625 GPRTemporary allocator(this);
3626 GPRTemporary structure(this);
3627 GPRTemporary scratch(this);
3628
3629 GPRReg calleeGPR = callee.gpr();
3630 GPRReg resultGPR = result.gpr();
3631 GPRReg allocatorGPR = allocator.gpr();
3632 GPRReg structureGPR = structure.gpr();
3633 GPRReg scratchGPR = scratch.gpr();
3634
3635 MacroAssembler::JumpList slowPath;
3636
3637 m_jit.loadPtr(JITCompiler::Address(calleeGPR, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorGPR);
3638 m_jit.loadPtr(JITCompiler::Address(calleeGPR, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureGPR);
3639 slowPath.append(m_jit.branchTestPtr(MacroAssembler::Zero, allocatorGPR));
3640 emitAllocateJSObject(resultGPR, allocatorGPR, structureGPR, TrustedImmPtr(0), scratchGPR, slowPath);
3641
3642 addSlowPathGenerator(slowPathCall(slowPath, this, operationCreateThis, resultGPR, calleeGPR, node->inlineCapacity()));
3643
3644 cellResult(resultGPR, node);
3645 break;
3646 }
3647
3648 case AllocationProfileWatchpoint: {
3649 jsCast<JSFunction*>(node->function())->addAllocationProfileWatchpoint(speculationWatchpoint());
3650 noResult(node);
3651 break;
3652 }
3653
3654 case NewObject: {
3655 GPRTemporary result(this);
3656 GPRTemporary allocator(this);
3657 GPRTemporary scratch(this);
3658
3659 GPRReg resultGPR = result.gpr();
3660 GPRReg allocatorGPR = allocator.gpr();
3661 GPRReg scratchGPR = scratch.gpr();
3662
3663 MacroAssembler::JumpList slowPath;
3664
3665 Structure* structure = node->structure();
3666 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity());
3667 MarkedAllocator* allocatorPtr = &m_jit.vm()->heap.allocatorForObjectWithoutDestructor(allocationSize);
3668
3669 m_jit.move(TrustedImmPtr(allocatorPtr), allocatorGPR);
3670 emitAllocateJSObject(resultGPR, allocatorGPR, TrustedImmPtr(structure), TrustedImmPtr(0), scratchGPR, slowPath);
3671
3672 addSlowPathGenerator(slowPathCall(slowPath, this, operationNewObject, resultGPR, structure));
3673
3674 cellResult(resultGPR, node);
3675 break;
3676 }
3677
3678 case GetCallee: {
3679 GPRTemporary result(this);
3680 m_jit.loadPtr(JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::Callee))), result.gpr());
3681 cellResult(result.gpr(), node);
3682 break;
3683 }
3684
3685 case SetCallee: {
3686 SpeculateCellOperand callee(this, node->child1());
3687 m_jit.storePtr(callee.gpr(), JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::Callee))));
3688 noResult(node);
3689 break;
3690 }
3691
3692 case GetScope: {
3693 SpeculateCellOperand function(this, node->child1());
3694 GPRTemporary result(this, function);
3695 m_jit.loadPtr(JITCompiler::Address(function.gpr(), JSFunction::offsetOfScopeChain()), result.gpr());
3696 cellResult(result.gpr(), node);
3697 break;
3698 }
3699
3700 case GetMyScope: {
3701 GPRTemporary result(this);
3702 GPRReg resultGPR = result.gpr();
3703
3704 m_jit.loadPtr(JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::ScopeChain))), resultGPR);
3705 cellResult(resultGPR, node);
3706 break;
3707 }
3708
3709 case SetMyScope: {
3710 SpeculateCellOperand callee(this, node->child1());
3711 m_jit.storePtr(callee.gpr(), JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::ScopeChain))));
3712 noResult(node);
3713 break;
3714 }
3715
3716 case SkipTopScope: {
3717 SpeculateCellOperand scope(this, node->child1());
3718 GPRTemporary result(this, scope);
3719 GPRReg resultGPR = result.gpr();
3720 m_jit.move(scope.gpr(), resultGPR);
3721 JITCompiler::Jump activationNotCreated =
3722 m_jit.branchTest64(
3723 JITCompiler::Zero,
3724 JITCompiler::addressFor(
3725 static_cast<VirtualRegister>(m_jit.codeBlock()->activationRegister())));
3726 m_jit.loadPtr(JITCompiler::Address(resultGPR, JSScope::offsetOfNext()), resultGPR);
3727 activationNotCreated.link(&m_jit);
3728 cellResult(resultGPR, node);
3729 break;
3730 }
3731
3732 case SkipScope: {
3733 SpeculateCellOperand scope(this, node->child1());
3734 GPRTemporary result(this, scope);
3735 m_jit.loadPtr(JITCompiler::Address(scope.gpr(), JSScope::offsetOfNext()), result.gpr());
3736 cellResult(result.gpr(), node);
3737 break;
3738 }
3739
3740 case GetScopeRegisters: {
3741 SpeculateCellOperand scope(this, node->child1());
3742 GPRTemporary result(this);
3743 GPRReg scopeGPR = scope.gpr();
3744 GPRReg resultGPR = result.gpr();
3745
3746 m_jit.loadPtr(JITCompiler::Address(scopeGPR, JSVariableObject::offsetOfRegisters()), resultGPR);
3747 storageResult(resultGPR, node);
3748 break;
3749 }
3750 case GetScopedVar: {
3751 StorageOperand registers(this, node->child1());
3752 GPRTemporary result(this);
3753 GPRReg registersGPR = registers.gpr();
3754 GPRReg resultGPR = result.gpr();
3755
3756 m_jit.load64(JITCompiler::Address(registersGPR, node->varNumber() * sizeof(Register)), resultGPR);
3757 jsValueResult(resultGPR, node);
3758 break;
3759 }
3760 case PutScopedVar: {
3761 SpeculateCellOperand scope(this, node->child1());
3762 StorageOperand registers(this, node->child2());
3763 JSValueOperand value(this, node->child3());
3764 GPRTemporary scratchRegister(this);
3765
3766 GPRReg scopeGPR = scope.gpr();
3767 GPRReg registersGPR = registers.gpr();
3768 GPRReg valueGPR = value.gpr();
3769 GPRReg scratchGPR = scratchRegister.gpr();
3770
3771 m_jit.store64(valueGPR, JITCompiler::Address(registersGPR, node->varNumber() * sizeof(Register)));
3772 writeBarrier(scopeGPR, valueGPR, node->child3(), WriteBarrierForVariableAccess, scratchGPR);
3773 noResult(node);
3774 break;
3775 }
3776 case GetById: {
3777 if (!node->prediction()) {
3778 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
3779 break;
3780 }
3781
3782 switch (node->child1().useKind()) {
3783 case CellUse: {
3784 SpeculateCellOperand base(this, node->child1());
3785 GPRTemporary result(this, base);
3786
3787 GPRReg baseGPR = base.gpr();
3788 GPRReg resultGPR = result.gpr();
3789
3790 base.use();
3791
3792 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber());
3793
3794 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3795 break;
3796 }
3797
3798 case UntypedUse: {
3799 JSValueOperand base(this, node->child1());
3800 GPRTemporary result(this, base);
3801
3802 GPRReg baseGPR = base.gpr();
3803 GPRReg resultGPR = result.gpr();
3804
3805 base.use();
3806
3807 JITCompiler::Jump notCell = m_jit.branchTest64(JITCompiler::NonZero, baseGPR, GPRInfo::tagMaskRegister);
3808
3809 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), notCell);
3810
3811 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3812 break;
3813 }
3814
3815 default:
3816 RELEASE_ASSERT_NOT_REACHED();
3817 break;
3818 }
3819 break;
3820 }
3821
3822 case GetByIdFlush: {
3823 if (!node->prediction()) {
3824 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
3825 break;
3826 }
3827
3828 switch (node->child1().useKind()) {
3829 case CellUse: {
3830 SpeculateCellOperand base(this, node->child1());
3831 GPRReg baseGPR = base.gpr();
3832
3833 GPRResult result(this);
3834
3835 GPRReg resultGPR = result.gpr();
3836
3837 base.use();
3838
3839 flushRegisters();
3840
3841 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), JITCompiler::Jump(), DontSpill);
3842
3843 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3844 break;
3845 }
3846
3847 case UntypedUse: {
3848 JSValueOperand base(this, node->child1());
3849 GPRReg baseGPR = base.gpr();
3850
3851 GPRResult result(this);
3852 GPRReg resultGPR = result.gpr();
3853
3854 base.use();
3855 flushRegisters();
3856
3857 JITCompiler::Jump notCell = m_jit.branchTest64(JITCompiler::NonZero, baseGPR, GPRInfo::tagMaskRegister);
3858
3859 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), notCell, DontSpill);
3860
3861 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3862 break;
3863 }
3864
3865 default:
3866 RELEASE_ASSERT_NOT_REACHED();
3867 break;
3868 }
3869 break;
3870 }
3871
3872 case GetArrayLength:
3873 compileGetArrayLength(node);
3874 break;
3875
3876 case CheckFunction: {
3877 SpeculateCellOperand function(this, node->child1());
3878 speculationCheck(BadFunction, JSValueSource::unboxedCell(function.gpr()), node->child1(), m_jit.branchWeakPtr(JITCompiler::NotEqual, function.gpr(), node->function()));
3879 noResult(node);
3880 break;
3881 }
3882
3883 case CheckExecutable: {
3884 SpeculateCellOperand function(this, node->child1());
3885 speculationCheck(BadExecutable, JSValueSource::unboxedCell(function.gpr()), node->child1(), m_jit.branchWeakPtr(JITCompiler::NotEqual, JITCompiler::Address(function.gpr(), JSFunction::offsetOfExecutable()), node->executable()));
3886 noResult(node);
3887 break;
3888 }
3889
3890 case CheckStructure:
3891 case ForwardCheckStructure: {
3892 SpeculateCellOperand base(this, node->child1());
3893
3894 ASSERT(node->structureSet().size());
3895
3896 ExitKind exitKind;
3897 if (node->child1()->op() == WeakJSConstant)
3898 exitKind = BadWeakConstantCache;
3899 else
3900 exitKind = BadCache;
3901
3902 if (node->structureSet().size() == 1) {
3903 speculationCheck(
3904 exitKind, JSValueSource::unboxedCell(base.gpr()), 0,
3905 m_jit.branchWeakPtr(
3906 JITCompiler::NotEqual,
3907 JITCompiler::Address(base.gpr(), JSCell::structureOffset()),
3908 node->structureSet()[0]));
3909 } else {
3910 GPRTemporary structure(this);
3911
3912 m_jit.loadPtr(JITCompiler::Address(base.gpr(), JSCell::structureOffset()), structure.gpr());
3913
3914 JITCompiler::JumpList done;
3915
3916 for (size_t i = 0; i < node->structureSet().size() - 1; ++i)
3917 done.append(m_jit.branchWeakPtr(JITCompiler::Equal, structure.gpr(), node->structureSet()[i]));
3918
3919 speculationCheck(
3920 exitKind, JSValueSource::unboxedCell(base.gpr()), 0,
3921 m_jit.branchWeakPtr(
3922 JITCompiler::NotEqual, structure.gpr(), node->structureSet().last()));
3923
3924 done.link(&m_jit);
3925 }
3926
3927 noResult(node);
3928 break;
3929 }
3930
3931 case StructureTransitionWatchpoint:
3932 case ForwardStructureTransitionWatchpoint: {
3933 // There is a fascinating question here of what to do about array profiling.
3934 // We *could* try to tell the OSR exit about where the base of the access is.
3935 // The DFG will have kept it alive, though it may not be in a register, and
3936 // we shouldn't really load it since that could be a waste. For now though,
3937 // we'll just rely on the fact that when a watchpoint fires then that's
3938 // quite a hint already.
3939
3940 m_jit.addWeakReference(node->structure());
3941 node->structure()->addTransitionWatchpoint(
3942 speculationWatchpoint(
3943 node->child1()->op() == WeakJSConstant ? BadWeakConstantCache : BadCache));
3944
3945 #if !ASSERT_DISABLED
3946 SpeculateCellOperand op1(this, node->child1());
3947 JITCompiler::Jump isOK = m_jit.branchPtr(JITCompiler::Equal, JITCompiler::Address(op1.gpr(), JSCell::structureOffset()), TrustedImmPtr(node->structure()));
3948 m_jit.breakpoint();
3949 isOK.link(&m_jit);
3950 #else
3951 speculateCell(node->child1());
3952 #endif
3953
3954 noResult(node);
3955 break;
3956 }
3957
3958 case PhantomPutStructure: {
3959 ASSERT(isKnownCell(node->child1().node()));
3960
3961 ASSERT(node->structureTransitionData().previousStructure->transitionWatchpointSetHasBeenInvalidated());
3962 m_jit.addWeakReferenceTransition(
3963 node->codeOrigin.codeOriginOwner(),
3964 node->structureTransitionData().previousStructure,
3965 node->structureTransitionData().newStructure);
3966 noResult(node);
3967 break;
3968 }
3969
3970 case PutStructure: {
3971 ASSERT(node->structureTransitionData().previousStructure->transitionWatchpointSetHasBeenInvalidated());
3972
3973 SpeculateCellOperand base(this, node->child1());
3974 GPRReg baseGPR = base.gpr();
3975
3976 m_jit.addWeakReferenceTransition(
3977 node->codeOrigin.codeOriginOwner(),
3978 node->structureTransitionData().previousStructure,
3979 node->structureTransitionData().newStructure);
3980
3981 #if ENABLE(WRITE_BARRIER_PROFILING)
3982 // Must always emit this write barrier as the structure transition itself requires it
3983 writeBarrier(baseGPR, node->structureTransitionData().newStructure, WriteBarrierForGenericAccess);
3984 #endif
3985
3986 m_jit.storePtr(MacroAssembler::TrustedImmPtr(node->structureTransitionData().newStructure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
3987
3988 noResult(node);
3989 break;
3990 }
3991
3992 case AllocatePropertyStorage:
3993 compileAllocatePropertyStorage(node);
3994 break;
3995
3996 case ReallocatePropertyStorage:
3997 compileReallocatePropertyStorage(node);
3998 break;
3999
4000 case GetButterfly: {
4001 SpeculateCellOperand base(this, node->child1());
4002 GPRTemporary result(this, base);
4003
4004 GPRReg baseGPR = base.gpr();
4005 GPRReg resultGPR = result.gpr();
4006
4007 m_jit.loadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
4008
4009 storageResult(resultGPR, node);
4010 break;
4011 }
4012
4013 case GetIndexedPropertyStorage: {
4014 compileGetIndexedPropertyStorage(node);
4015 break;
4016 }
4017
4018 case GetByOffset: {
4019 StorageOperand storage(this, node->child1());
4020 GPRTemporary result(this, storage);
4021
4022 GPRReg storageGPR = storage.gpr();
4023 GPRReg resultGPR = result.gpr();
4024
4025 StorageAccessData& storageAccessData = m_jit.graph().m_storageAccessData[node->storageAccessDataIndex()];
4026
4027 m_jit.load64(JITCompiler::Address(storageGPR, storageAccessData.offset * sizeof(EncodedJSValue)), resultGPR);
4028
4029 jsValueResult(resultGPR, node);
4030 break;
4031 }
4032
4033 case PutByOffset: {
4034 #if ENABLE(WRITE_BARRIER_PROFILING)
4035 SpeculateCellOperand base(this, node->child2());
4036 #endif
4037 StorageOperand storage(this, node->child1());
4038 JSValueOperand value(this, node->child3());
4039
4040 GPRReg storageGPR = storage.gpr();
4041 GPRReg valueGPR = value.gpr();
4042
4043 #if ENABLE(WRITE_BARRIER_PROFILING)
4044 writeBarrier(base.gpr(), value.gpr(), node->child3(), WriteBarrierForPropertyAccess);
4045 #endif
4046
4047 StorageAccessData& storageAccessData = m_jit.graph().m_storageAccessData[node->storageAccessDataIndex()];
4048
4049 m_jit.store64(valueGPR, JITCompiler::Address(storageGPR, storageAccessData.offset * sizeof(EncodedJSValue)));
4050
4051 noResult(node);
4052 break;
4053 }
4054
4055 case PutById: {
4056 SpeculateCellOperand base(this, node->child1());
4057 JSValueOperand value(this, node->child2());
4058 GPRTemporary scratch(this);
4059
4060 GPRReg baseGPR = base.gpr();
4061 GPRReg valueGPR = value.gpr();
4062 GPRReg scratchGPR = scratch.gpr();
4063
4064 base.use();
4065 value.use();
4066
4067 cachedPutById(node->codeOrigin, baseGPR, valueGPR, node->child2(), scratchGPR, node->identifierNumber(), NotDirect);
4068
4069 noResult(node, UseChildrenCalledExplicitly);
4070 break;
4071 }
4072
4073 case PutByIdDirect: {
4074 SpeculateCellOperand base(this, node->child1());
4075 JSValueOperand value(this, node->child2());
4076 GPRTemporary scratch(this);
4077
4078 GPRReg baseGPR = base.gpr();
4079 GPRReg valueGPR = value.gpr();
4080 GPRReg scratchGPR = scratch.gpr();
4081
4082 base.use();
4083 value.use();
4084
4085 cachedPutById(node->codeOrigin, baseGPR, valueGPR, node->child2(), scratchGPR, node->identifierNumber(), Direct);
4086
4087 noResult(node, UseChildrenCalledExplicitly);
4088 break;
4089 }
4090
4091 case GetGlobalVar: {
4092 GPRTemporary result(this);
4093
4094 m_jit.load64(node->registerPointer(), result.gpr());
4095
4096 jsValueResult(result.gpr(), node);
4097 break;
4098 }
4099
4100 case PutGlobalVar: {
4101 JSValueOperand value(this, node->child1());
4102
4103 if (Heap::isWriteBarrierEnabled()) {
4104 GPRTemporary scratch(this);
4105 GPRReg scratchReg = scratch.gpr();
4106
4107 writeBarrier(m_jit.globalObjectFor(node->codeOrigin), value.gpr(), node->child1(), WriteBarrierForVariableAccess, scratchReg);
4108 }
4109
4110 m_jit.store64(value.gpr(), node->registerPointer());
4111
4112 noResult(node);
4113 break;
4114 }
4115
4116 case PutGlobalVarCheck: {
4117 JSValueOperand value(this, node->child1());
4118
4119 WatchpointSet* watchpointSet =
4120 m_jit.globalObjectFor(node->codeOrigin)->symbolTable()->get(
4121 identifier(node->identifierNumberForCheck())->impl()).watchpointSet();
4122 addSlowPathGenerator(
4123 slowPathCall(
4124 m_jit.branchTest8(
4125 JITCompiler::NonZero,
4126 JITCompiler::AbsoluteAddress(watchpointSet->addressOfIsWatched())),
4127 this, operationNotifyGlobalVarWrite, NoResult, watchpointSet));
4128
4129 if (Heap::isWriteBarrierEnabled()) {
4130 GPRTemporary scratch(this);
4131 GPRReg scratchReg = scratch.gpr();
4132
4133 writeBarrier(m_jit.globalObjectFor(node->codeOrigin), value.gpr(), node->child1(), WriteBarrierForVariableAccess, scratchReg);
4134 }
4135
4136 m_jit.store64(value.gpr(), node->registerPointer());
4137
4138 noResult(node);
4139 break;
4140 }
4141
4142 case GlobalVarWatchpoint: {
4143 m_jit.globalObjectFor(node->codeOrigin)->symbolTable()->get(
4144 identifier(node->identifierNumberForCheck())->impl()).addWatchpoint(
4145 speculationWatchpoint());
4146
4147 #if DFG_ENABLE(JIT_ASSERT)
4148 GPRTemporary scratch(this);
4149 GPRReg scratchGPR = scratch.gpr();
4150 m_jit.load64(node->registerPointer(), scratchGPR);
4151 JITCompiler::Jump ok = m_jit.branch64(
4152 JITCompiler::Equal, scratchGPR,
4153 TrustedImm64(JSValue::encode(node->registerPointer()->get())));
4154 m_jit.breakpoint();
4155 ok.link(&m_jit);
4156 #endif
4157
4158 noResult(node);
4159 break;
4160 }
4161
4162 case CheckHasInstance: {
4163 SpeculateCellOperand base(this, node->child1());
4164 GPRTemporary structure(this);
4165
4166 // Speculate that base 'ImplementsDefaultHasInstance'.
4167 m_jit.loadPtr(MacroAssembler::Address(base.gpr(), JSCell::structureOffset()), structure.gpr());
4168 speculationCheck(Uncountable, JSValueRegs(), 0, m_jit.branchTest8(MacroAssembler::Zero, MacroAssembler::Address(structure.gpr(), Structure::typeInfoFlagsOffset()), MacroAssembler::TrustedImm32(ImplementsDefaultHasInstance)));
4169
4170 noResult(node);
4171 break;
4172 }
4173
4174 case InstanceOf: {
4175 compileInstanceOf(node);
4176 break;
4177 }
4178
4179 case IsUndefined: {
4180 JSValueOperand value(this, node->child1());
4181 GPRTemporary result(this);
4182 GPRTemporary localGlobalObject(this);
4183 GPRTemporary remoteGlobalObject(this);
4184
4185 JITCompiler::Jump isCell = m_jit.branchTest64(JITCompiler::Zero, value.gpr(), GPRInfo::tagMaskRegister);
4186
4187 m_jit.compare64(JITCompiler::Equal, value.gpr(), TrustedImm32(ValueUndefined), result.gpr());
4188 JITCompiler::Jump done = m_jit.jump();
4189
4190 isCell.link(&m_jit);
4191 JITCompiler::Jump notMasqueradesAsUndefined;
4192 if (m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
4193 m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
4194 m_jit.move(TrustedImm32(0), result.gpr());
4195 notMasqueradesAsUndefined = m_jit.jump();
4196 } else {
4197 m_jit.loadPtr(JITCompiler::Address(value.gpr(), JSCell::structureOffset()), result.gpr());
4198 JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::NonZero, JITCompiler::Address(result.gpr(), Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
4199 m_jit.move(TrustedImm32(0), result.gpr());
4200 notMasqueradesAsUndefined = m_jit.jump();
4201
4202 isMasqueradesAsUndefined.link(&m_jit);
4203 GPRReg localGlobalObjectGPR = localGlobalObject.gpr();
4204 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr();
4205 m_jit.move(TrustedImmPtr(m_jit.globalObjectFor(node->codeOrigin)), localGlobalObjectGPR);
4206 m_jit.loadPtr(JITCompiler::Address(result.gpr(), Structure::globalObjectOffset()), remoteGlobalObjectGPR);
4207 m_jit.comparePtr(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, result.gpr());
4208 }
4209
4210 notMasqueradesAsUndefined.link(&m_jit);
4211 done.link(&m_jit);
4212 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4213 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4214 break;
4215 }
4216
4217 case IsBoolean: {
4218 JSValueOperand value(this, node->child1());
4219 GPRTemporary result(this, value);
4220
4221 m_jit.move(value.gpr(), result.gpr());
4222 m_jit.xor64(JITCompiler::TrustedImm32(ValueFalse), result.gpr());
4223 m_jit.test64(JITCompiler::Zero, result.gpr(), JITCompiler::TrustedImm32(static_cast<int32_t>(~1)), result.gpr());
4224 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4225 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4226 break;
4227 }
4228
4229 case IsNumber: {
4230 JSValueOperand value(this, node->child1());
4231 GPRTemporary result(this, value);
4232
4233 m_jit.test64(JITCompiler::NonZero, value.gpr(), GPRInfo::tagTypeNumberRegister, result.gpr());
4234 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4235 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4236 break;
4237 }
4238
4239 case IsString: {
4240 JSValueOperand value(this, node->child1());
4241 GPRTemporary result(this, value);
4242
4243 JITCompiler::Jump isNotCell = m_jit.branchTest64(JITCompiler::NonZero, value.gpr(), GPRInfo::tagMaskRegister);
4244
4245 m_jit.loadPtr(JITCompiler::Address(value.gpr(), JSCell::structureOffset()), result.gpr());
4246 m_jit.compare8(JITCompiler::Equal, JITCompiler::Address(result.gpr(), Structure::typeInfoTypeOffset()), TrustedImm32(StringType), result.gpr());
4247 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4248 JITCompiler::Jump done = m_jit.jump();
4249
4250 isNotCell.link(&m_jit);
4251 m_jit.move(TrustedImm32(ValueFalse), result.gpr());
4252
4253 done.link(&m_jit);
4254 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4255 break;
4256 }
4257
4258 case IsObject: {
4259 JSValueOperand value(this, node->child1());
4260 GPRReg valueGPR = value.gpr();
4261 GPRResult result(this);
4262 GPRReg resultGPR = result.gpr();
4263 flushRegisters();
4264 callOperation(operationIsObject, resultGPR, valueGPR);
4265 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
4266 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4267 break;
4268 }
4269
4270 case IsFunction: {
4271 JSValueOperand value(this, node->child1());
4272 GPRReg valueGPR = value.gpr();
4273 GPRResult result(this);
4274 GPRReg resultGPR = result.gpr();
4275 flushRegisters();
4276 callOperation(operationIsFunction, resultGPR, valueGPR);
4277 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
4278 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4279 break;
4280 }
4281
4282 case TypeOf: {
4283 JSValueOperand value(this, node->child1(), ManualOperandSpeculation);
4284 GPRReg valueGPR = value.gpr();
4285 GPRTemporary temp(this);
4286 GPRReg tempGPR = temp.gpr();
4287 GPRResult result(this);
4288 GPRReg resultGPR = result.gpr();
4289 JITCompiler::JumpList doneJumps;
4290
4291 flushRegisters();
4292
4293 ASSERT(node->child1().useKind() == UntypedUse || node->child1().useKind() == CellUse || node->child1().useKind() == StringUse);
4294
4295 JITCompiler::Jump isNotCell = m_jit.branchTest64(JITCompiler::NonZero, valueGPR, GPRInfo::tagMaskRegister);
4296 if (node->child1().useKind() != UntypedUse)
4297 DFG_TYPE_CHECK(JSValueSource(valueGPR), node->child1(), SpecCell, isNotCell);
4298
4299 if (!node->child1()->shouldSpeculateObject() || node->child1().useKind() == StringUse) {
4300 m_jit.loadPtr(JITCompiler::Address(valueGPR, JSCell::structureOffset()), tempGPR);
4301 JITCompiler::Jump notString = m_jit.branch8(JITCompiler::NotEqual, JITCompiler::Address(tempGPR, Structure::typeInfoTypeOffset()), TrustedImm32(StringType));
4302 if (node->child1().useKind() == StringUse)
4303 DFG_TYPE_CHECK(JSValueSource(valueGPR), node->child1(), SpecString, notString);
4304 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.stringString()), resultGPR);
4305 doneJumps.append(m_jit.jump());
4306 if (node->child1().useKind() != StringUse) {
4307 notString.link(&m_jit);
4308 callOperation(operationTypeOf, resultGPR, valueGPR);
4309 doneJumps.append(m_jit.jump());
4310 }
4311 } else {
4312 callOperation(operationTypeOf, resultGPR, valueGPR);
4313 doneJumps.append(m_jit.jump());
4314 }
4315
4316 if (node->child1().useKind() == UntypedUse) {
4317 isNotCell.link(&m_jit);
4318 JITCompiler::Jump notNumber = m_jit.branchTest64(JITCompiler::Zero, valueGPR, GPRInfo::tagTypeNumberRegister);
4319 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.numberString()), resultGPR);
4320 doneJumps.append(m_jit.jump());
4321 notNumber.link(&m_jit);
4322
4323 JITCompiler::Jump notUndefined = m_jit.branch64(JITCompiler::NotEqual, valueGPR, JITCompiler::TrustedImm64(ValueUndefined));
4324 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.undefinedString()), resultGPR);
4325 doneJumps.append(m_jit.jump());
4326 notUndefined.link(&m_jit);
4327
4328 JITCompiler::Jump notNull = m_jit.branch64(JITCompiler::NotEqual, valueGPR, JITCompiler::TrustedImm64(ValueNull));
4329 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.objectString()), resultGPR);
4330 doneJumps.append(m_jit.jump());
4331 notNull.link(&m_jit);
4332
4333 // Only boolean left
4334 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.booleanString()), resultGPR);
4335 }
4336 doneJumps.link(&m_jit);
4337 cellResult(resultGPR, node);
4338 break;
4339 }
4340
4341 case Flush:
4342 case Phi:
4343 break;
4344
4345 case Breakpoint:
4346 #if ENABLE(DEBUG_WITH_BREAKPOINT)
4347 m_jit.breakpoint();
4348 #else
4349 RELEASE_ASSERT_NOT_REACHED();
4350 #endif
4351 break;
4352
4353 case Call:
4354 case Construct:
4355 emitCall(node);
4356 break;
4357
4358 case Resolve: {
4359 flushRegisters();
4360 GPRResult result(this);
4361 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()];
4362 callOperation(operationResolve, result.gpr(), identifier(data.identifierNumber), data.resolveOperations);
4363 jsValueResult(result.gpr(), node);
4364 break;
4365 }
4366
4367 case ResolveBase: {
4368 flushRegisters();
4369 GPRResult result(this);
4370 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()];
4371 callOperation(operationResolveBase, result.gpr(), identifier(data.identifierNumber), data.resolveOperations, data.putToBaseOperation);
4372 jsValueResult(result.gpr(), node);
4373 break;
4374 }
4375
4376 case ResolveBaseStrictPut: {
4377 flushRegisters();
4378 GPRResult result(this);
4379 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()];
4380 callOperation(operationResolveBaseStrictPut, result.gpr(), identifier(data.identifierNumber), data.resolveOperations, data.putToBaseOperation);
4381 jsValueResult(result.gpr(), node);
4382 break;
4383 }
4384
4385 case ResolveGlobal: {
4386 GPRTemporary globalObject(this);
4387 GPRTemporary resolveInfo(this);
4388 GPRTemporary result(this);
4389
4390 GPRReg globalObjectGPR = globalObject.gpr();
4391 GPRReg resolveInfoGPR = resolveInfo.gpr();
4392 GPRReg resultGPR = result.gpr();
4393
4394 ResolveGlobalData& data = m_jit.graph().m_resolveGlobalData[node->resolveGlobalDataIndex()];
4395 ResolveOperation* resolveOperationAddress = &(data.resolveOperations->data()[data.resolvePropertyIndex]);
4396
4397 // Check Structure of global object
4398 m_jit.move(JITCompiler::TrustedImmPtr(m_jit.globalObjectFor(node->codeOrigin)), globalObjectGPR);
4399 m_jit.move(JITCompiler::TrustedImmPtr(resolveOperationAddress), resolveInfoGPR);
4400 m_jit.loadPtr(JITCompiler::Address(resolveInfoGPR, OBJECT_OFFSETOF(ResolveOperation, m_structure)), resultGPR);
4401 JITCompiler::Jump structuresDontMatch = m_jit.branchPtr(JITCompiler::NotEqual, resultGPR, JITCompiler::Address(globalObjectGPR, JSCell::structureOffset()));
4402
4403 // Fast case
4404 m_jit.load32(JITCompiler::Address(resolveInfoGPR, OBJECT_OFFSETOF(ResolveOperation, m_offset)), resolveInfoGPR);
4405 #if DFG_ENABLE(JIT_ASSERT)
4406 JITCompiler::Jump isOutOfLine = m_jit.branch32(JITCompiler::GreaterThanOrEqual, resolveInfoGPR, TrustedImm32(firstOutOfLineOffset));
4407 m_jit.breakpoint();
4408 isOutOfLine.link(&m_jit);
4409 #endif
4410 m_jit.neg32(resolveInfoGPR);
4411 m_jit.signExtend32ToPtr(resolveInfoGPR, resolveInfoGPR);
4412 m_jit.loadPtr(JITCompiler::Address(globalObjectGPR, JSObject::butterflyOffset()), resultGPR);
4413 m_jit.load64(JITCompiler::BaseIndex(resultGPR, resolveInfoGPR, JITCompiler::TimesEight, (firstOutOfLineOffset - 2) * static_cast<ptrdiff_t>(sizeof(JSValue))), resultGPR);
4414
4415 addSlowPathGenerator(
4416 slowPathCall(
4417 structuresDontMatch, this, operationResolveGlobal,
4418 resultGPR, resolveInfoGPR, globalObjectGPR,
4419 &m_jit.codeBlock()->identifier(data.identifierNumber)));
4420
4421 jsValueResult(resultGPR, node);
4422 break;
4423 }
4424
4425 case CreateActivation: {
4426 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame);
4427
4428 JSValueOperand value(this, node->child1());
4429 GPRTemporary result(this, value);
4430
4431 GPRReg valueGPR = value.gpr();
4432 GPRReg resultGPR = result.gpr();
4433
4434 m_jit.move(valueGPR, resultGPR);
4435
4436 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR);
4437
4438 addSlowPathGenerator(
4439 slowPathCall(notCreated, this, operationCreateActivation, resultGPR));
4440
4441 cellResult(resultGPR, node);
4442 break;
4443 }
4444
4445 case CreateArguments: {
4446 JSValueOperand value(this, node->child1());
4447 GPRTemporary result(this, value);
4448
4449 GPRReg valueGPR = value.gpr();
4450 GPRReg resultGPR = result.gpr();
4451
4452 m_jit.move(valueGPR, resultGPR);
4453
4454 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR);
4455
4456 if (node->codeOrigin.inlineCallFrame) {
4457 addSlowPathGenerator(
4458 slowPathCall(
4459 notCreated, this, operationCreateInlinedArguments, resultGPR,
4460 node->codeOrigin.inlineCallFrame));
4461 } else {
4462 addSlowPathGenerator(
4463 slowPathCall(notCreated, this, operationCreateArguments, resultGPR));
4464 }
4465
4466 cellResult(resultGPR, node);
4467 break;
4468 }
4469
4470 case TearOffActivation: {
4471 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame);
4472
4473 JSValueOperand activationValue(this, node->child1());
4474 GPRTemporary scratch(this);
4475 GPRReg activationValueGPR = activationValue.gpr();
4476 GPRReg scratchGPR = scratch.gpr();
4477
4478 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, activationValueGPR);
4479
4480 SharedSymbolTable* symbolTable = m_jit.symbolTableFor(node->codeOrigin);
4481 int registersOffset = JSActivation::registersOffset(symbolTable);
4482
4483 int captureEnd = symbolTable->captureEnd();
4484 for (int i = symbolTable->captureStart(); i < captureEnd; ++i) {
4485 m_jit.load64(
4486 JITCompiler::Address(
4487 GPRInfo::callFrameRegister, i * sizeof(Register)), scratchGPR);
4488 m_jit.store64(
4489 scratchGPR, JITCompiler::Address(
4490 activationValueGPR, registersOffset + i * sizeof(Register)));
4491 }
4492 m_jit.addPtr(TrustedImm32(registersOffset), activationValueGPR, scratchGPR);
4493 m_jit.storePtr(scratchGPR, JITCompiler::Address(activationValueGPR, JSActivation::offsetOfRegisters()));
4494
4495 notCreated.link(&m_jit);
4496 noResult(node);
4497 break;
4498 }
4499
4500 case TearOffArguments: {
4501 JSValueOperand unmodifiedArgumentsValue(this, node->child1());
4502 JSValueOperand activationValue(this, node->child2());
4503 GPRReg unmodifiedArgumentsValueGPR = unmodifiedArgumentsValue.gpr();
4504 GPRReg activationValueGPR = activationValue.gpr();
4505
4506 JITCompiler::Jump created = m_jit.branchTest64(JITCompiler::NonZero, unmodifiedArgumentsValueGPR);
4507
4508 if (node->codeOrigin.inlineCallFrame) {
4509 addSlowPathGenerator(
4510 slowPathCall(
4511 created, this, operationTearOffInlinedArguments, NoResult,
4512 unmodifiedArgumentsValueGPR, activationValueGPR, node->codeOrigin.inlineCallFrame));
4513 } else {
4514 addSlowPathGenerator(
4515 slowPathCall(
4516 created, this, operationTearOffArguments, NoResult, unmodifiedArgumentsValueGPR, activationValueGPR));
4517 }
4518
4519 noResult(node);
4520 break;
4521 }
4522
4523 case GetMyArgumentsLength: {
4524 GPRTemporary result(this);
4525 GPRReg resultGPR = result.gpr();
4526
4527 if (!isEmptySpeculation(
4528 m_state.variables().operand(
4529 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type)) {
4530 speculationCheck(
4531 ArgumentsEscaped, JSValueRegs(), 0,
4532 m_jit.branchTest64(
4533 JITCompiler::NonZero,
4534 JITCompiler::addressFor(
4535 m_jit.argumentsRegisterFor(node->codeOrigin))));
4536 }
4537
4538 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame);
4539 m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), resultGPR);
4540 m_jit.sub32(TrustedImm32(1), resultGPR);
4541 integerResult(resultGPR, node);
4542 break;
4543 }
4544
4545 case GetMyArgumentsLengthSafe: {
4546 GPRTemporary result(this);
4547 GPRReg resultGPR = result.gpr();
4548
4549 JITCompiler::Jump created = m_jit.branchTest64(
4550 JITCompiler::NonZero,
4551 JITCompiler::addressFor(
4552 m_jit.argumentsRegisterFor(node->codeOrigin)));
4553
4554 if (node->codeOrigin.inlineCallFrame) {
4555 m_jit.move(
4556 Imm64(JSValue::encode(jsNumber(node->codeOrigin.inlineCallFrame->arguments.size() - 1))),
4557 resultGPR);
4558 } else {
4559 m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), resultGPR);
4560 m_jit.sub32(TrustedImm32(1), resultGPR);
4561 m_jit.or64(GPRInfo::tagTypeNumberRegister, resultGPR);
4562 }
4563
4564 // FIXME: the slow path generator should perform a forward speculation that the
4565 // result is an integer. For now we postpone the speculation by having this return
4566 // a JSValue.
4567
4568 addSlowPathGenerator(
4569 slowPathCall(
4570 created, this, operationGetArgumentsLength, resultGPR,
4571 m_jit.argumentsRegisterFor(node->codeOrigin)));
4572
4573 jsValueResult(resultGPR, node);
4574 break;
4575 }
4576
4577 case GetMyArgumentByVal: {
4578 SpeculateStrictInt32Operand index(this, node->child1());
4579 GPRTemporary result(this);
4580 GPRReg indexGPR = index.gpr();
4581 GPRReg resultGPR = result.gpr();
4582
4583 if (!isEmptySpeculation(
4584 m_state.variables().operand(
4585 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type)) {
4586 speculationCheck(
4587 ArgumentsEscaped, JSValueRegs(), 0,
4588 m_jit.branchTest64(
4589 JITCompiler::NonZero,
4590 JITCompiler::addressFor(
4591 m_jit.argumentsRegisterFor(node->codeOrigin))));
4592 }
4593
4594 m_jit.add32(TrustedImm32(1), indexGPR, resultGPR);
4595 if (node->codeOrigin.inlineCallFrame) {
4596 speculationCheck(
4597 Uncountable, JSValueRegs(), 0,
4598 m_jit.branch32(
4599 JITCompiler::AboveOrEqual,
4600 resultGPR,
4601 Imm32(node->codeOrigin.inlineCallFrame->arguments.size())));
4602 } else {
4603 speculationCheck(
4604 Uncountable, JSValueRegs(), 0,
4605 m_jit.branch32(
4606 JITCompiler::AboveOrEqual,
4607 resultGPR,
4608 JITCompiler::payloadFor(JSStack::ArgumentCount)));
4609 }
4610
4611 JITCompiler::JumpList slowArgument;
4612 JITCompiler::JumpList slowArgumentOutOfBounds;
4613 if (const SlowArgument* slowArguments = m_jit.symbolTableFor(node->codeOrigin)->slowArguments()) {
4614 slowArgumentOutOfBounds.append(
4615 m_jit.branch32(
4616 JITCompiler::AboveOrEqual, indexGPR,
4617 Imm32(m_jit.symbolTableFor(node->codeOrigin)->parameterCount())));
4618
4619 COMPILE_ASSERT(sizeof(SlowArgument) == 8, SlowArgument_size_is_eight_bytes);
4620 m_jit.move(ImmPtr(slowArguments), resultGPR);
4621 m_jit.load32(
4622 JITCompiler::BaseIndex(
4623 resultGPR, indexGPR, JITCompiler::TimesEight,
4624 OBJECT_OFFSETOF(SlowArgument, index)),
4625 resultGPR);
4626 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4627 m_jit.load64(
4628 JITCompiler::BaseIndex(
4629 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfLocals(node->codeOrigin)),
4630 resultGPR);
4631 slowArgument.append(m_jit.jump());
4632 }
4633 slowArgumentOutOfBounds.link(&m_jit);
4634
4635 m_jit.neg32(resultGPR);
4636 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4637
4638 m_jit.load64(
4639 JITCompiler::BaseIndex(
4640 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfArgumentsIncludingThis(node->codeOrigin)),
4641 resultGPR);
4642
4643 slowArgument.link(&m_jit);
4644 jsValueResult(resultGPR, node);
4645 break;
4646 }
4647
4648 case GetMyArgumentByValSafe: {
4649 SpeculateStrictInt32Operand index(this, node->child1());
4650 GPRTemporary result(this);
4651 GPRReg indexGPR = index.gpr();
4652 GPRReg resultGPR = result.gpr();
4653
4654 JITCompiler::JumpList slowPath;
4655 slowPath.append(
4656 m_jit.branchTest64(
4657 JITCompiler::NonZero,
4658 JITCompiler::addressFor(
4659 m_jit.argumentsRegisterFor(node->codeOrigin))));
4660
4661 m_jit.add32(TrustedImm32(1), indexGPR, resultGPR);
4662 if (node->codeOrigin.inlineCallFrame) {
4663 slowPath.append(
4664 m_jit.branch32(
4665 JITCompiler::AboveOrEqual,
4666 resultGPR,
4667 Imm32(node->codeOrigin.inlineCallFrame->arguments.size())));
4668 } else {
4669 slowPath.append(
4670 m_jit.branch32(
4671 JITCompiler::AboveOrEqual,
4672 resultGPR,
4673 JITCompiler::payloadFor(JSStack::ArgumentCount)));
4674 }
4675
4676 JITCompiler::JumpList slowArgument;
4677 JITCompiler::JumpList slowArgumentOutOfBounds;
4678 if (const SlowArgument* slowArguments = m_jit.symbolTableFor(node->codeOrigin)->slowArguments()) {
4679 slowArgumentOutOfBounds.append(
4680 m_jit.branch32(
4681 JITCompiler::AboveOrEqual, indexGPR,
4682 Imm32(m_jit.symbolTableFor(node->codeOrigin)->parameterCount())));
4683
4684 COMPILE_ASSERT(sizeof(SlowArgument) == 8, SlowArgument_size_is_eight_bytes);
4685 m_jit.move(ImmPtr(slowArguments), resultGPR);
4686 m_jit.load32(
4687 JITCompiler::BaseIndex(
4688 resultGPR, indexGPR, JITCompiler::TimesEight,
4689 OBJECT_OFFSETOF(SlowArgument, index)),
4690 resultGPR);
4691 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4692 m_jit.load64(
4693 JITCompiler::BaseIndex(
4694 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfLocals(node->codeOrigin)),
4695 resultGPR);
4696 slowArgument.append(m_jit.jump());
4697 }
4698 slowArgumentOutOfBounds.link(&m_jit);
4699
4700 m_jit.neg32(resultGPR);
4701 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4702
4703 m_jit.load64(
4704 JITCompiler::BaseIndex(
4705 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfArgumentsIncludingThis(node->codeOrigin)),
4706 resultGPR);
4707
4708 if (node->codeOrigin.inlineCallFrame) {
4709 addSlowPathGenerator(
4710 slowPathCall(
4711 slowPath, this, operationGetInlinedArgumentByVal, resultGPR,
4712 m_jit.argumentsRegisterFor(node->codeOrigin),
4713 node->codeOrigin.inlineCallFrame,
4714 indexGPR));
4715 } else {
4716 addSlowPathGenerator(
4717 slowPathCall(
4718 slowPath, this, operationGetArgumentByVal, resultGPR,
4719 m_jit.argumentsRegisterFor(node->codeOrigin),
4720 indexGPR));
4721 }
4722
4723 slowArgument.link(&m_jit);
4724 jsValueResult(resultGPR, node);
4725 break;
4726 }
4727
4728 case CheckArgumentsNotCreated: {
4729 ASSERT(!isEmptySpeculation(
4730 m_state.variables().operand(
4731 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type));
4732 speculationCheck(
4733 ArgumentsEscaped, JSValueRegs(), 0,
4734 m_jit.branchTest64(
4735 JITCompiler::NonZero,
4736 JITCompiler::addressFor(
4737 m_jit.argumentsRegisterFor(node->codeOrigin))));
4738 noResult(node);
4739 break;
4740 }
4741
4742 case NewFunctionNoCheck:
4743 compileNewFunctionNoCheck(node);
4744 break;
4745
4746 case NewFunction: {
4747 JSValueOperand value(this, node->child1());
4748 GPRTemporary result(this, value);
4749
4750 GPRReg valueGPR = value.gpr();
4751 GPRReg resultGPR = result.gpr();
4752
4753 m_jit.move(valueGPR, resultGPR);
4754
4755 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR);
4756
4757 addSlowPathGenerator(
4758 slowPathCall(
4759 notCreated, this, operationNewFunction,
4760 resultGPR, m_jit.codeBlock()->functionDecl(node->functionDeclIndex())));
4761
4762 jsValueResult(resultGPR, node);
4763 break;
4764 }
4765
4766 case NewFunctionExpression:
4767 compileNewFunctionExpression(node);
4768 break;
4769
4770 case CountExecution:
4771 m_jit.add64(TrustedImm32(1), MacroAssembler::AbsoluteAddress(node->executionCounter()->address()));
4772 break;
4773
4774 case GarbageValue:
4775 // We should never get to the point of code emission for a GarbageValue
4776 CRASH();
4777 break;
4778
4779 case ForceOSRExit: {
4780 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
4781 break;
4782 }
4783
4784 case CheckWatchdogTimer:
4785 speculationCheck(
4786 WatchdogTimerFired, JSValueRegs(), 0,
4787 m_jit.branchTest8(
4788 JITCompiler::NonZero,
4789 JITCompiler::AbsoluteAddress(m_jit.vm()->watchdog.timerDidFireAddress())));
4790 break;
4791
4792 case Phantom:
4793 DFG_NODE_DO_TO_CHILDREN(m_jit.graph(), node, speculate);
4794 noResult(node);
4795 break;
4796
4797 case PhantomLocal:
4798 // This is a no-op.
4799 noResult(node);
4800 break;
4801
4802 case Nop:
4803 RELEASE_ASSERT_NOT_REACHED();
4804 break;
4805
4806 case LastNodeType:
4807 RELEASE_ASSERT_NOT_REACHED();
4808 break;
4809 }
4810
4811 #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
4812 m_jit.clearRegisterAllocationOffsets();
4813 #endif
4814
4815 if (!m_compileOkay)
4816 return;
4817
4818 if (node->hasResult() && node->mustGenerate())
4819 use(node);
4820 }
4821
4822 #endif
4823
4824 } } // namespace JSC::DFG
4825
4826 #endif