]> git.saurik.com Git - apple/javascriptcore.git/blob - dfg/DFGSpeculativeJIT64.cpp
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / dfg / DFGSpeculativeJIT64.cpp
1 /*
2 * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "DFGSpeculativeJIT.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "Arguments.h"
32 #include "ArrayPrototype.h"
33 #include "DFGCallArrayAllocatorSlowPathGenerator.h"
34 #include "DFGSlowPathGenerator.h"
35 #include "JSCJSValueInlines.h"
36 #include "ObjectPrototype.h"
37
38 namespace JSC { namespace DFG {
39
40 #if USE(JSVALUE64)
41
42 GPRReg SpeculativeJIT::fillInteger(Edge edge, DataFormat& returnFormat)
43 {
44 ASSERT(!needsTypeCheck(edge, SpecInt32));
45
46 VirtualRegister virtualRegister = edge->virtualRegister();
47 GenerationInfo& info = m_generationInfo[virtualRegister];
48
49 if (info.registerFormat() == DataFormatNone) {
50 GPRReg gpr = allocate();
51
52 if (edge->hasConstant()) {
53 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
54 if (isInt32Constant(edge.node())) {
55 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr);
56 info.fillInteger(*m_stream, gpr);
57 returnFormat = DataFormatInteger;
58 return gpr;
59 }
60 if (isNumberConstant(edge.node())) {
61 JSValue jsValue = jsNumber(valueOfNumberConstant(edge.node()));
62 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr);
63 } else {
64 ASSERT(isJSConstant(edge.node()));
65 JSValue jsValue = valueOfJSConstant(edge.node());
66 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
67 }
68 } else if (info.spillFormat() == DataFormatInteger) {
69 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
70 m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr);
71 // Tag it, since fillInteger() is used when we want a boxed integer.
72 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
73 } else {
74 RELEASE_ASSERT(info.spillFormat() == DataFormatJS || info.spillFormat() == DataFormatJSInteger);
75 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
76 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
77 }
78
79 // Since we statically know that we're filling an integer, and values
80 // in the JSStack are boxed, this must be DataFormatJSInteger.
81 // We will check this with a jitAssert below.
82 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
83 unlock(gpr);
84 }
85
86 switch (info.registerFormat()) {
87 case DataFormatNone:
88 // Should have filled, above.
89 case DataFormatJSDouble:
90 case DataFormatDouble:
91 case DataFormatJS:
92 case DataFormatCell:
93 case DataFormatJSCell:
94 case DataFormatBoolean:
95 case DataFormatJSBoolean:
96 case DataFormatStorage:
97 // Should only be calling this function if we know this operand to be integer.
98 RELEASE_ASSERT_NOT_REACHED();
99
100 case DataFormatJSInteger: {
101 GPRReg gpr = info.gpr();
102 m_gprs.lock(gpr);
103 m_jit.jitAssertIsJSInt32(gpr);
104 returnFormat = DataFormatJSInteger;
105 return gpr;
106 }
107
108 case DataFormatInteger: {
109 GPRReg gpr = info.gpr();
110 m_gprs.lock(gpr);
111 m_jit.jitAssertIsInt32(gpr);
112 returnFormat = DataFormatInteger;
113 return gpr;
114 }
115
116 default:
117 RELEASE_ASSERT_NOT_REACHED();
118 return InvalidGPRReg;
119 }
120 }
121
122 GPRReg SpeculativeJIT::fillJSValue(Edge edge)
123 {
124 VirtualRegister virtualRegister = edge->virtualRegister();
125 GenerationInfo& info = m_generationInfo[virtualRegister];
126
127 switch (info.registerFormat()) {
128 case DataFormatNone: {
129 GPRReg gpr = allocate();
130
131 if (edge->hasConstant()) {
132 if (isInt32Constant(edge.node())) {
133 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
134 JSValue jsValue = jsNumber(valueOfInt32Constant(edge.node()));
135 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr);
136 } else if (isNumberConstant(edge.node())) {
137 info.fillJSValue(*m_stream, gpr, DataFormatJSDouble);
138 JSValue jsValue(JSValue::EncodeAsDouble, valueOfNumberConstant(edge.node()));
139 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr);
140 } else {
141 ASSERT(isJSConstant(edge.node()));
142 JSValue jsValue = valueOfJSConstant(edge.node());
143 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
144 info.fillJSValue(*m_stream, gpr, DataFormatJS);
145 }
146
147 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
148 } else {
149 DataFormat spillFormat = info.spillFormat();
150 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
151 if (spillFormat == DataFormatInteger) {
152 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
153 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
154 spillFormat = DataFormatJSInteger;
155 } else {
156 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
157 if (spillFormat == DataFormatDouble) {
158 // Need to box the double, since we want a JSValue.
159 m_jit.sub64(GPRInfo::tagTypeNumberRegister, gpr);
160 spillFormat = DataFormatJSDouble;
161 } else
162 RELEASE_ASSERT(spillFormat & DataFormatJS);
163 }
164 info.fillJSValue(*m_stream, gpr, spillFormat);
165 }
166 return gpr;
167 }
168
169 case DataFormatInteger: {
170 GPRReg gpr = info.gpr();
171 // If the register has already been locked we need to take a copy.
172 // If not, we'll zero extend in place, so mark on the info that this is now type DataFormatInteger, not DataFormatJSInteger.
173 if (m_gprs.isLocked(gpr)) {
174 GPRReg result = allocate();
175 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr, result);
176 return result;
177 }
178 m_gprs.lock(gpr);
179 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
180 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
181 return gpr;
182 }
183
184 case DataFormatDouble: {
185 FPRReg fpr = info.fpr();
186 GPRReg gpr = boxDouble(fpr);
187
188 // Update all info
189 info.fillJSValue(*m_stream, gpr, DataFormatJSDouble);
190 m_fprs.release(fpr);
191 m_gprs.retain(gpr, virtualRegister, SpillOrderJS);
192
193 return gpr;
194 }
195
196 case DataFormatCell:
197 // No retag required on JSVALUE64!
198 case DataFormatJS:
199 case DataFormatJSInteger:
200 case DataFormatJSDouble:
201 case DataFormatJSCell:
202 case DataFormatJSBoolean: {
203 GPRReg gpr = info.gpr();
204 m_gprs.lock(gpr);
205 return gpr;
206 }
207
208 case DataFormatBoolean:
209 case DataFormatStorage:
210 // this type currently never occurs
211 RELEASE_ASSERT_NOT_REACHED();
212
213 default:
214 RELEASE_ASSERT_NOT_REACHED();
215 return InvalidGPRReg;
216 }
217 }
218
219 void SpeculativeJIT::nonSpeculativeUInt32ToNumber(Node* node)
220 {
221 IntegerOperand op1(this, node->child1());
222 FPRTemporary boxer(this);
223 GPRTemporary result(this, op1);
224
225 JITCompiler::Jump positive = m_jit.branch32(MacroAssembler::GreaterThanOrEqual, op1.gpr(), TrustedImm32(0));
226
227 m_jit.convertInt32ToDouble(op1.gpr(), boxer.fpr());
228 m_jit.addDouble(JITCompiler::AbsoluteAddress(&AssemblyHelpers::twoToThe32), boxer.fpr());
229
230 boxDouble(boxer.fpr(), result.gpr());
231
232 JITCompiler::Jump done = m_jit.jump();
233
234 positive.link(&m_jit);
235
236 m_jit.or64(GPRInfo::tagTypeNumberRegister, op1.gpr(), result.gpr());
237
238 done.link(&m_jit);
239
240 jsValueResult(result.gpr(), m_currentNode);
241 }
242
243 void SpeculativeJIT::cachedGetById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg resultGPR, unsigned identifierNumber, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
244 {
245 JITCompiler::DataLabelPtr structureToCompare;
246 JITCompiler::PatchableJump structureCheck = m_jit.patchableBranchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(unusedPointer)));
247
248 JITCompiler::ConvertibleLoadLabel propertyStorageLoad =
249 m_jit.convertibleLoadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
250 JITCompiler::DataLabelCompact loadWithPatch = m_jit.load64WithCompactAddressOffsetPatch(JITCompiler::Address(resultGPR, 0), resultGPR);
251
252 JITCompiler::Label doneLabel = m_jit.label();
253
254 OwnPtr<SlowPathGenerator> slowPath;
255 if (!slowPathTarget.isSet()) {
256 slowPath = slowPathCall(
257 structureCheck.m_jump, this, operationGetByIdOptimize, resultGPR, baseGPR,
258 identifier(identifierNumber), spillMode);
259 } else {
260 JITCompiler::JumpList slowCases;
261 slowCases.append(structureCheck.m_jump);
262 slowCases.append(slowPathTarget);
263 slowPath = slowPathCall(
264 slowCases, this, operationGetByIdOptimize, resultGPR, baseGPR,
265 identifier(identifierNumber), spillMode);
266 }
267 m_jit.addPropertyAccess(
268 PropertyAccessRecord(
269 codeOrigin, structureToCompare, structureCheck, propertyStorageLoad, loadWithPatch,
270 slowPath.get(), doneLabel, safeCast<int8_t>(baseGPR), safeCast<int8_t>(resultGPR),
271 usedRegisters(),
272 spillMode == NeedToSpill ? PropertyAccessRecord::RegistersInUse : PropertyAccessRecord::RegistersFlushed));
273 addSlowPathGenerator(slowPath.release());
274 }
275
276 void SpeculativeJIT::cachedPutById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg valueGPR, Edge valueUse, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget)
277 {
278
279 JITCompiler::DataLabelPtr structureToCompare;
280 JITCompiler::PatchableJump structureCheck = m_jit.patchableBranchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(unusedPointer)));
281
282 writeBarrier(baseGPR, valueGPR, valueUse, WriteBarrierForPropertyAccess, scratchGPR);
283
284 JITCompiler::ConvertibleLoadLabel propertyStorageLoad =
285 m_jit.convertibleLoadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
286 JITCompiler::DataLabel32 storeWithPatch = m_jit.store64WithAddressOffsetPatch(valueGPR, JITCompiler::Address(scratchGPR, 0));
287
288 JITCompiler::Label doneLabel = m_jit.label();
289
290 V_DFGOperation_EJCI optimizedCall;
291 if (m_jit.strictModeFor(m_currentNode->codeOrigin)) {
292 if (putKind == Direct)
293 optimizedCall = operationPutByIdDirectStrictOptimize;
294 else
295 optimizedCall = operationPutByIdStrictOptimize;
296 } else {
297 if (putKind == Direct)
298 optimizedCall = operationPutByIdDirectNonStrictOptimize;
299 else
300 optimizedCall = operationPutByIdNonStrictOptimize;
301 }
302 OwnPtr<SlowPathGenerator> slowPath;
303 if (!slowPathTarget.isSet()) {
304 slowPath = slowPathCall(
305 structureCheck.m_jump, this, optimizedCall, NoResult, valueGPR, baseGPR,
306 identifier(identifierNumber));
307 } else {
308 JITCompiler::JumpList slowCases;
309 slowCases.append(structureCheck.m_jump);
310 slowCases.append(slowPathTarget);
311 slowPath = slowPathCall(
312 slowCases, this, optimizedCall, NoResult, valueGPR, baseGPR,
313 identifier(identifierNumber));
314 }
315 RegisterSet currentlyUsedRegisters = usedRegisters();
316 currentlyUsedRegisters.clear(scratchGPR);
317 ASSERT(currentlyUsedRegisters.get(baseGPR));
318 ASSERT(currentlyUsedRegisters.get(valueGPR));
319 m_jit.addPropertyAccess(
320 PropertyAccessRecord(
321 codeOrigin, structureToCompare, structureCheck, propertyStorageLoad,
322 JITCompiler::DataLabelCompact(storeWithPatch.label()), slowPath.get(), doneLabel,
323 safeCast<int8_t>(baseGPR), safeCast<int8_t>(valueGPR), currentlyUsedRegisters));
324 addSlowPathGenerator(slowPath.release());
325 }
326
327 void SpeculativeJIT::nonSpeculativeNonPeepholeCompareNull(Edge operand, bool invert)
328 {
329 JSValueOperand arg(this, operand);
330 GPRReg argGPR = arg.gpr();
331
332 GPRTemporary result(this, arg);
333 GPRReg resultGPR = result.gpr();
334
335 JITCompiler::Jump notCell;
336
337 JITCompiler::Jump notMasqueradesAsUndefined;
338 if (m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
339 if (!isKnownCell(operand.node()))
340 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
341
342 m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
343 m_jit.move(invert ? TrustedImm32(1) : TrustedImm32(0), resultGPR);
344 notMasqueradesAsUndefined = m_jit.jump();
345 } else {
346 GPRTemporary localGlobalObject(this);
347 GPRTemporary remoteGlobalObject(this);
348
349 if (!isKnownCell(operand.node()))
350 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
351
352 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR);
353 JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::NonZero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined));
354
355 m_jit.move(invert ? TrustedImm32(1) : TrustedImm32(0), resultGPR);
356 notMasqueradesAsUndefined = m_jit.jump();
357
358 isMasqueradesAsUndefined.link(&m_jit);
359 GPRReg localGlobalObjectGPR = localGlobalObject.gpr();
360 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr();
361 m_jit.move(JITCompiler::TrustedImmPtr(m_jit.graph().globalObjectFor(operand->codeOrigin)), localGlobalObjectGPR);
362 m_jit.loadPtr(JITCompiler::Address(resultGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR);
363 m_jit.comparePtr(invert ? JITCompiler::NotEqual : JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, resultGPR);
364 }
365
366 if (!isKnownCell(operand.node())) {
367 JITCompiler::Jump done = m_jit.jump();
368
369 notCell.link(&m_jit);
370
371 m_jit.move(argGPR, resultGPR);
372 m_jit.and64(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR);
373 m_jit.compare64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm32(ValueNull), resultGPR);
374
375 done.link(&m_jit);
376 }
377
378 notMasqueradesAsUndefined.link(&m_jit);
379
380 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
381 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
382 }
383
384 void SpeculativeJIT::nonSpeculativePeepholeBranchNull(Edge operand, Node* branchNode, bool invert)
385 {
386 BlockIndex taken = branchNode->takenBlockIndex();
387 BlockIndex notTaken = branchNode->notTakenBlockIndex();
388
389 if (taken == nextBlock()) {
390 invert = !invert;
391 BlockIndex tmp = taken;
392 taken = notTaken;
393 notTaken = tmp;
394 }
395
396 JSValueOperand arg(this, operand);
397 GPRReg argGPR = arg.gpr();
398
399 GPRTemporary result(this, arg);
400 GPRReg resultGPR = result.gpr();
401
402 JITCompiler::Jump notCell;
403
404 if (m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
405 if (!isKnownCell(operand.node()))
406 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
407
408 m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
409 jump(invert ? taken : notTaken, ForceJump);
410 } else {
411 GPRTemporary localGlobalObject(this);
412 GPRTemporary remoteGlobalObject(this);
413
414 if (!isKnownCell(operand.node()))
415 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister);
416
417 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR);
418 branchTest8(JITCompiler::Zero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined), invert ? taken : notTaken);
419
420 GPRReg localGlobalObjectGPR = localGlobalObject.gpr();
421 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr();
422 m_jit.move(TrustedImmPtr(m_jit.graph().globalObjectFor(operand->codeOrigin)), localGlobalObjectGPR);
423 m_jit.loadPtr(JITCompiler::Address(resultGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR);
424 branchPtr(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, invert ? notTaken : taken);
425 }
426
427 if (!isKnownCell(operand.node())) {
428 jump(notTaken, ForceJump);
429
430 notCell.link(&m_jit);
431
432 m_jit.move(argGPR, resultGPR);
433 m_jit.and64(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR);
434 branch64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm64(ValueNull), taken);
435 }
436
437 jump(notTaken);
438 }
439
440 bool SpeculativeJIT::nonSpeculativeCompareNull(Node* node, Edge operand, bool invert)
441 {
442 unsigned branchIndexInBlock = detectPeepHoleBranch();
443 if (branchIndexInBlock != UINT_MAX) {
444 Node* branchNode = m_jit.graph().m_blocks[m_block]->at(branchIndexInBlock);
445
446 RELEASE_ASSERT(node->adjustedRefCount() == 1);
447
448 nonSpeculativePeepholeBranchNull(operand, branchNode, invert);
449
450 use(node->child1());
451 use(node->child2());
452 m_indexInBlock = branchIndexInBlock;
453 m_currentNode = branchNode;
454
455 return true;
456 }
457
458 nonSpeculativeNonPeepholeCompareNull(operand, invert);
459
460 return false;
461 }
462
463 void SpeculativeJIT::nonSpeculativePeepholeBranch(Node* node, Node* branchNode, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
464 {
465 BlockIndex taken = branchNode->takenBlockIndex();
466 BlockIndex notTaken = branchNode->notTakenBlockIndex();
467
468 JITCompiler::ResultCondition callResultCondition = JITCompiler::NonZero;
469
470 // The branch instruction will branch to the taken block.
471 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through.
472 if (taken == nextBlock()) {
473 cond = JITCompiler::invert(cond);
474 callResultCondition = JITCompiler::Zero;
475 BlockIndex tmp = taken;
476 taken = notTaken;
477 notTaken = tmp;
478 }
479
480 JSValueOperand arg1(this, node->child1());
481 JSValueOperand arg2(this, node->child2());
482 GPRReg arg1GPR = arg1.gpr();
483 GPRReg arg2GPR = arg2.gpr();
484
485 JITCompiler::JumpList slowPath;
486
487 if (isKnownNotInteger(node->child1().node()) || isKnownNotInteger(node->child2().node())) {
488 GPRResult result(this);
489 GPRReg resultGPR = result.gpr();
490
491 arg1.use();
492 arg2.use();
493
494 flushRegisters();
495 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
496
497 branchTest32(callResultCondition, resultGPR, taken);
498 } else {
499 GPRTemporary result(this, arg2);
500 GPRReg resultGPR = result.gpr();
501
502 arg1.use();
503 arg2.use();
504
505 if (!isKnownInteger(node->child1().node()))
506 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister));
507 if (!isKnownInteger(node->child2().node()))
508 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister));
509
510 branch32(cond, arg1GPR, arg2GPR, taken);
511
512 if (!isKnownInteger(node->child1().node()) || !isKnownInteger(node->child2().node())) {
513 jump(notTaken, ForceJump);
514
515 slowPath.link(&m_jit);
516
517 silentSpillAllRegisters(resultGPR);
518 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
519 silentFillAllRegisters(resultGPR);
520
521 branchTest32(callResultCondition, resultGPR, taken);
522 }
523 }
524
525 jump(notTaken);
526
527 m_indexInBlock = m_jit.graph().m_blocks[m_block]->size() - 1;
528 m_currentNode = branchNode;
529 }
530
531 template<typename JumpType>
532 class CompareAndBoxBooleanSlowPathGenerator
533 : public CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg> {
534 public:
535 CompareAndBoxBooleanSlowPathGenerator(
536 JumpType from, SpeculativeJIT* jit,
537 S_DFGOperation_EJJ function, GPRReg result, GPRReg arg1, GPRReg arg2)
538 : CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg>(
539 from, jit, function, NeedToSpill, result)
540 , m_arg1(arg1)
541 , m_arg2(arg2)
542 {
543 }
544
545 protected:
546 virtual void generateInternal(SpeculativeJIT* jit)
547 {
548 this->setUp(jit);
549 this->recordCall(jit->callOperation(this->m_function, this->m_result, m_arg1, m_arg2));
550 jit->m_jit.and32(JITCompiler::TrustedImm32(1), this->m_result);
551 jit->m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), this->m_result);
552 this->tearDown(jit);
553 }
554
555 private:
556 GPRReg m_arg1;
557 GPRReg m_arg2;
558 };
559
560 void SpeculativeJIT::nonSpeculativeNonPeepholeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
561 {
562 JSValueOperand arg1(this, node->child1());
563 JSValueOperand arg2(this, node->child2());
564 GPRReg arg1GPR = arg1.gpr();
565 GPRReg arg2GPR = arg2.gpr();
566
567 JITCompiler::JumpList slowPath;
568
569 if (isKnownNotInteger(node->child1().node()) || isKnownNotInteger(node->child2().node())) {
570 GPRResult result(this);
571 GPRReg resultGPR = result.gpr();
572
573 arg1.use();
574 arg2.use();
575
576 flushRegisters();
577 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR);
578
579 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
580 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly);
581 } else {
582 GPRTemporary result(this, arg2);
583 GPRReg resultGPR = result.gpr();
584
585 arg1.use();
586 arg2.use();
587
588 if (!isKnownInteger(node->child1().node()))
589 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister));
590 if (!isKnownInteger(node->child2().node()))
591 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister));
592
593 m_jit.compare32(cond, arg1GPR, arg2GPR, resultGPR);
594 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
595
596 if (!isKnownInteger(node->child1().node()) || !isKnownInteger(node->child2().node())) {
597 addSlowPathGenerator(adoptPtr(
598 new CompareAndBoxBooleanSlowPathGenerator<JITCompiler::JumpList>(
599 slowPath, this, helperFunction, resultGPR, arg1GPR, arg2GPR)));
600 }
601
602 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly);
603 }
604 }
605
606 void SpeculativeJIT::nonSpeculativePeepholeStrictEq(Node* node, Node* branchNode, bool invert)
607 {
608 BlockIndex taken = branchNode->takenBlockIndex();
609 BlockIndex notTaken = branchNode->notTakenBlockIndex();
610
611 // The branch instruction will branch to the taken block.
612 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through.
613 if (taken == nextBlock()) {
614 invert = !invert;
615 BlockIndex tmp = taken;
616 taken = notTaken;
617 notTaken = tmp;
618 }
619
620 JSValueOperand arg1(this, node->child1());
621 JSValueOperand arg2(this, node->child2());
622 GPRReg arg1GPR = arg1.gpr();
623 GPRReg arg2GPR = arg2.gpr();
624
625 GPRTemporary result(this);
626 GPRReg resultGPR = result.gpr();
627
628 arg1.use();
629 arg2.use();
630
631 if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) {
632 // see if we get lucky: if the arguments are cells and they reference the same
633 // cell, then they must be strictly equal.
634 branch64(JITCompiler::Equal, arg1GPR, arg2GPR, invert ? notTaken : taken);
635
636 silentSpillAllRegisters(resultGPR);
637 callOperation(operationCompareStrictEqCell, resultGPR, arg1GPR, arg2GPR);
638 silentFillAllRegisters(resultGPR);
639
640 branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultGPR, taken);
641 } else {
642 m_jit.or64(arg1GPR, arg2GPR, resultGPR);
643
644 JITCompiler::Jump twoCellsCase = m_jit.branchTest64(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister);
645
646 JITCompiler::Jump leftOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg1GPR, GPRInfo::tagTypeNumberRegister);
647 JITCompiler::Jump leftDouble = m_jit.branchTest64(JITCompiler::NonZero, arg1GPR, GPRInfo::tagTypeNumberRegister);
648 leftOK.link(&m_jit);
649 JITCompiler::Jump rightOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg2GPR, GPRInfo::tagTypeNumberRegister);
650 JITCompiler::Jump rightDouble = m_jit.branchTest64(JITCompiler::NonZero, arg2GPR, GPRInfo::tagTypeNumberRegister);
651 rightOK.link(&m_jit);
652
653 branch64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR, taken);
654 jump(notTaken, ForceJump);
655
656 twoCellsCase.link(&m_jit);
657 branch64(JITCompiler::Equal, arg1GPR, arg2GPR, invert ? notTaken : taken);
658
659 leftDouble.link(&m_jit);
660 rightDouble.link(&m_jit);
661
662 silentSpillAllRegisters(resultGPR);
663 callOperation(operationCompareStrictEq, resultGPR, arg1GPR, arg2GPR);
664 silentFillAllRegisters(resultGPR);
665
666 branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultGPR, taken);
667 }
668
669 jump(notTaken);
670 }
671
672 void SpeculativeJIT::nonSpeculativeNonPeepholeStrictEq(Node* node, bool invert)
673 {
674 JSValueOperand arg1(this, node->child1());
675 JSValueOperand arg2(this, node->child2());
676 GPRReg arg1GPR = arg1.gpr();
677 GPRReg arg2GPR = arg2.gpr();
678
679 GPRTemporary result(this);
680 GPRReg resultGPR = result.gpr();
681
682 arg1.use();
683 arg2.use();
684
685 if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) {
686 // see if we get lucky: if the arguments are cells and they reference the same
687 // cell, then they must be strictly equal.
688 // FIXME: this should flush registers instead of silent spill/fill.
689 JITCompiler::Jump notEqualCase = m_jit.branch64(JITCompiler::NotEqual, arg1GPR, arg2GPR);
690
691 m_jit.move(JITCompiler::TrustedImm64(JSValue::encode(jsBoolean(!invert))), resultGPR);
692
693 JITCompiler::Jump done = m_jit.jump();
694
695 notEqualCase.link(&m_jit);
696
697 silentSpillAllRegisters(resultGPR);
698 callOperation(operationCompareStrictEqCell, resultGPR, arg1GPR, arg2GPR);
699 silentFillAllRegisters(resultGPR);
700
701 m_jit.and64(JITCompiler::TrustedImm32(1), resultGPR);
702 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR);
703
704 done.link(&m_jit);
705 } else {
706 m_jit.or64(arg1GPR, arg2GPR, resultGPR);
707
708 JITCompiler::JumpList slowPathCases;
709
710 JITCompiler::Jump twoCellsCase = m_jit.branchTest64(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister);
711
712 JITCompiler::Jump leftOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg1GPR, GPRInfo::tagTypeNumberRegister);
713 slowPathCases.append(m_jit.branchTest64(JITCompiler::NonZero, arg1GPR, GPRInfo::tagTypeNumberRegister));
714 leftOK.link(&m_jit);
715 JITCompiler::Jump rightOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg2GPR, GPRInfo::tagTypeNumberRegister);
716 slowPathCases.append(m_jit.branchTest64(JITCompiler::NonZero, arg2GPR, GPRInfo::tagTypeNumberRegister));
717 rightOK.link(&m_jit);
718
719 m_jit.compare64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR, resultGPR);
720 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR);
721
722 JITCompiler::Jump done = m_jit.jump();
723
724 twoCellsCase.link(&m_jit);
725 slowPathCases.append(m_jit.branch64(JITCompiler::NotEqual, arg1GPR, arg2GPR));
726
727 m_jit.move(JITCompiler::TrustedImm64(JSValue::encode(jsBoolean(!invert))), resultGPR);
728
729 addSlowPathGenerator(
730 adoptPtr(
731 new CompareAndBoxBooleanSlowPathGenerator<MacroAssembler::JumpList>(
732 slowPathCases, this, operationCompareStrictEq, resultGPR, arg1GPR,
733 arg2GPR)));
734
735 done.link(&m_jit);
736 }
737
738 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly);
739 }
740
741 void SpeculativeJIT::emitCall(Node* node)
742 {
743 if (node->op() != Call)
744 RELEASE_ASSERT(node->op() == Construct);
745
746 // For constructors, the this argument is not passed but we have to make space
747 // for it.
748 int dummyThisArgument = node->op() == Call ? 0 : 1;
749
750 CallLinkInfo::CallType callType = node->op() == Call ? CallLinkInfo::Call : CallLinkInfo::Construct;
751
752 Edge calleeEdge = m_jit.graph().m_varArgChildren[node->firstChild()];
753 JSValueOperand callee(this, calleeEdge);
754 GPRReg calleeGPR = callee.gpr();
755 use(calleeEdge);
756
757 // The call instruction's first child is the function; the subsequent children are the
758 // arguments.
759 int numPassedArgs = node->numChildren() - 1;
760
761 m_jit.store32(MacroAssembler::TrustedImm32(numPassedArgs + dummyThisArgument), callFramePayloadSlot(JSStack::ArgumentCount));
762 m_jit.store64(GPRInfo::callFrameRegister, callFrameSlot(JSStack::CallerFrame));
763 m_jit.store64(calleeGPR, callFrameSlot(JSStack::Callee));
764
765 for (int i = 0; i < numPassedArgs; i++) {
766 Edge argEdge = m_jit.graph().m_varArgChildren[node->firstChild() + 1 + i];
767 JSValueOperand arg(this, argEdge);
768 GPRReg argGPR = arg.gpr();
769 use(argEdge);
770
771 m_jit.store64(argGPR, argumentSlot(i + dummyThisArgument));
772 }
773
774 flushRegisters();
775
776 GPRResult result(this);
777 GPRReg resultGPR = result.gpr();
778
779 JITCompiler::DataLabelPtr targetToCheck;
780 JITCompiler::JumpList slowPath;
781
782 CallBeginToken token;
783 m_jit.beginCall(node->codeOrigin, token);
784
785 m_jit.addPtr(TrustedImm32(m_jit.codeBlock()->m_numCalleeRegisters * sizeof(Register)), GPRInfo::callFrameRegister);
786
787 slowPath.append(m_jit.branchPtrWithPatch(MacroAssembler::NotEqual, calleeGPR, targetToCheck, MacroAssembler::TrustedImmPtr(0)));
788
789 m_jit.loadPtr(MacroAssembler::Address(calleeGPR, OBJECT_OFFSETOF(JSFunction, m_scope)), resultGPR);
790 m_jit.store64(resultGPR, MacroAssembler::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain));
791
792 CodeOrigin codeOrigin = m_currentNode->codeOrigin;
793 JITCompiler::Call fastCall = m_jit.nearCall();
794 m_jit.notifyCall(fastCall, codeOrigin, token);
795
796 JITCompiler::Jump done = m_jit.jump();
797
798 slowPath.link(&m_jit);
799
800 m_jit.move(calleeGPR, GPRInfo::nonArgGPR0);
801 m_jit.prepareForExceptionCheck();
802 JITCompiler::Call slowCall = m_jit.nearCall();
803 m_jit.notifyCall(slowCall, codeOrigin, token);
804
805 done.link(&m_jit);
806
807 m_jit.move(GPRInfo::returnValueGPR, resultGPR);
808
809 jsValueResult(resultGPR, m_currentNode, DataFormatJS, UseChildrenCalledExplicitly);
810
811 m_jit.addJSCall(fastCall, slowCall, targetToCheck, callType, calleeGPR, m_currentNode->codeOrigin);
812 }
813
814 template<bool strict>
815 GPRReg SpeculativeJIT::fillSpeculateIntInternal(Edge edge, DataFormat& returnFormat)
816 {
817 #if DFG_ENABLE(DEBUG_VERBOSE)
818 dataLogF("SpecInt@%d ", edge->index());
819 #endif
820 AbstractValue& value = m_state.forNode(edge);
821 SpeculatedType type = value.m_type;
822 ASSERT(edge.useKind() != KnownInt32Use || !(value.m_type & ~SpecInt32));
823 value.filter(SpecInt32);
824 VirtualRegister virtualRegister = edge->virtualRegister();
825 GenerationInfo& info = m_generationInfo[virtualRegister];
826
827 switch (info.registerFormat()) {
828 case DataFormatNone: {
829 if ((edge->hasConstant() && !isInt32Constant(edge.node())) || info.spillFormat() == DataFormatDouble) {
830 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
831 returnFormat = DataFormatInteger;
832 return allocate();
833 }
834
835 GPRReg gpr = allocate();
836
837 if (edge->hasConstant()) {
838 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
839 ASSERT(isInt32Constant(edge.node()));
840 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr);
841 info.fillInteger(*m_stream, gpr);
842 returnFormat = DataFormatInteger;
843 return gpr;
844 }
845
846 DataFormat spillFormat = info.spillFormat();
847
848 RELEASE_ASSERT((spillFormat & DataFormatJS) || spillFormat == DataFormatInteger);
849
850 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
851
852 if (spillFormat == DataFormatJSInteger || spillFormat == DataFormatInteger) {
853 // If we know this was spilled as an integer we can fill without checking.
854 if (strict) {
855 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
856 info.fillInteger(*m_stream, gpr);
857 returnFormat = DataFormatInteger;
858 return gpr;
859 }
860 if (spillFormat == DataFormatInteger) {
861 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
862 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr);
863 } else
864 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
865 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
866 returnFormat = DataFormatJSInteger;
867 return gpr;
868 }
869 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
870
871 // Fill as JSValue, and fall through.
872 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
873 m_gprs.unlock(gpr);
874 }
875
876 case DataFormatJS: {
877 // Check the value is an integer.
878 GPRReg gpr = info.gpr();
879 m_gprs.lock(gpr);
880 if (type & ~SpecInt32)
881 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branch64(MacroAssembler::Below, gpr, GPRInfo::tagTypeNumberRegister));
882 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
883 // If !strict we're done, return.
884 if (!strict) {
885 returnFormat = DataFormatJSInteger;
886 return gpr;
887 }
888 // else fall through & handle as DataFormatJSInteger.
889 m_gprs.unlock(gpr);
890 }
891
892 case DataFormatJSInteger: {
893 // In a strict fill we need to strip off the value tag.
894 if (strict) {
895 GPRReg gpr = info.gpr();
896 GPRReg result;
897 // If the register has already been locked we need to take a copy.
898 // If not, we'll zero extend in place, so mark on the info that this is now type DataFormatInteger, not DataFormatJSInteger.
899 if (m_gprs.isLocked(gpr))
900 result = allocate();
901 else {
902 m_gprs.lock(gpr);
903 info.fillInteger(*m_stream, gpr);
904 result = gpr;
905 }
906 m_jit.zeroExtend32ToPtr(gpr, result);
907 returnFormat = DataFormatInteger;
908 return result;
909 }
910
911 GPRReg gpr = info.gpr();
912 m_gprs.lock(gpr);
913 returnFormat = DataFormatJSInteger;
914 return gpr;
915 }
916
917 case DataFormatInteger: {
918 GPRReg gpr = info.gpr();
919 m_gprs.lock(gpr);
920 returnFormat = DataFormatInteger;
921 return gpr;
922 }
923
924 case DataFormatDouble:
925 case DataFormatJSDouble: {
926 if (edge->hasConstant() && isInt32Constant(edge.node())) {
927 GPRReg gpr = allocate();
928 ASSERT(isInt32Constant(edge.node()));
929 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr);
930 returnFormat = DataFormatInteger;
931 return gpr;
932 }
933 }
934 case DataFormatCell:
935 case DataFormatBoolean:
936 case DataFormatJSCell:
937 case DataFormatJSBoolean: {
938 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
939 returnFormat = DataFormatInteger;
940 return allocate();
941 }
942
943 case DataFormatStorage:
944 RELEASE_ASSERT_NOT_REACHED();
945
946 default:
947 RELEASE_ASSERT_NOT_REACHED();
948 return InvalidGPRReg;
949 }
950 }
951
952 GPRReg SpeculativeJIT::fillSpeculateInt(Edge edge, DataFormat& returnFormat)
953 {
954 return fillSpeculateIntInternal<false>(edge, returnFormat);
955 }
956
957 GPRReg SpeculativeJIT::fillSpeculateIntStrict(Edge edge)
958 {
959 DataFormat mustBeDataFormatInteger;
960 GPRReg result = fillSpeculateIntInternal<true>(edge, mustBeDataFormatInteger);
961 RELEASE_ASSERT(mustBeDataFormatInteger == DataFormatInteger);
962 return result;
963 }
964
965 FPRReg SpeculativeJIT::fillSpeculateDouble(Edge edge)
966 {
967 #if DFG_ENABLE(DEBUG_VERBOSE)
968 dataLogF("SpecDouble@%d ", edge->index());
969 #endif
970 AbstractValue& value = m_state.forNode(edge);
971 SpeculatedType type = value.m_type;
972 ASSERT(edge.useKind() != KnownNumberUse || !(value.m_type & ~SpecNumber));
973 value.filter(SpecNumber);
974 VirtualRegister virtualRegister = edge->virtualRegister();
975 GenerationInfo& info = m_generationInfo[virtualRegister];
976
977 if (info.registerFormat() == DataFormatNone) {
978 if (edge->hasConstant()) {
979 GPRReg gpr = allocate();
980
981 if (isInt32Constant(edge.node())) {
982 FPRReg fpr = fprAllocate();
983 m_jit.move(MacroAssembler::Imm64(reinterpretDoubleToInt64(static_cast<double>(valueOfInt32Constant(edge.node())))), gpr);
984 m_jit.move64ToDouble(gpr, fpr);
985 unlock(gpr);
986
987 return fpr;
988 }
989 if (isNumberConstant(edge.node())) {
990 FPRReg fpr = fprAllocate();
991 m_jit.move(MacroAssembler::Imm64(reinterpretDoubleToInt64(valueOfNumberConstant(edge.node()))), gpr);
992 m_jit.move64ToDouble(gpr, fpr);
993 unlock(gpr);
994
995 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
996 info.fillDouble(*m_stream, fpr);
997 return fpr;
998 }
999 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1000 return fprAllocate();
1001 }
1002
1003 DataFormat spillFormat = info.spillFormat();
1004 switch (spillFormat) {
1005 case DataFormatDouble: {
1006 FPRReg fpr = fprAllocate();
1007 m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr);
1008 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
1009 info.fillDouble(*m_stream, fpr);
1010 return fpr;
1011 }
1012
1013 case DataFormatInteger: {
1014 GPRReg gpr = allocate();
1015
1016 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1017 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
1018 info.fillInteger(*m_stream, gpr);
1019 unlock(gpr);
1020 break;
1021 }
1022
1023 default:
1024 GPRReg gpr = allocate();
1025
1026 RELEASE_ASSERT(spillFormat & DataFormatJS);
1027 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1028 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
1029 info.fillJSValue(*m_stream, gpr, spillFormat);
1030 unlock(gpr);
1031 break;
1032 }
1033 }
1034
1035 switch (info.registerFormat()) {
1036 case DataFormatNone: // Should have filled, above.
1037 case DataFormatBoolean: // This type never occurs.
1038 case DataFormatStorage:
1039 RELEASE_ASSERT_NOT_REACHED();
1040
1041 case DataFormatCell:
1042 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1043 return fprAllocate();
1044
1045 case DataFormatJSCell:
1046 case DataFormatJS:
1047 case DataFormatJSBoolean: {
1048 GPRReg jsValueGpr = info.gpr();
1049 m_gprs.lock(jsValueGpr);
1050 FPRReg fpr = fprAllocate();
1051 GPRReg tempGpr = allocate();
1052
1053 JITCompiler::Jump isInteger = m_jit.branch64(MacroAssembler::AboveOrEqual, jsValueGpr, GPRInfo::tagTypeNumberRegister);
1054
1055 if (type & ~SpecNumber)
1056 speculationCheck(BadType, JSValueRegs(jsValueGpr), edge, m_jit.branchTest64(MacroAssembler::Zero, jsValueGpr, GPRInfo::tagTypeNumberRegister));
1057
1058 // First, if we get here we have a double encoded as a JSValue
1059 m_jit.move(jsValueGpr, tempGpr);
1060 unboxDouble(tempGpr, fpr);
1061 JITCompiler::Jump hasUnboxedDouble = m_jit.jump();
1062
1063 // Finally, handle integers.
1064 isInteger.link(&m_jit);
1065 m_jit.convertInt32ToDouble(jsValueGpr, fpr);
1066 hasUnboxedDouble.link(&m_jit);
1067
1068 m_gprs.release(jsValueGpr);
1069 m_gprs.unlock(jsValueGpr);
1070 m_gprs.unlock(tempGpr);
1071 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
1072 info.fillDouble(*m_stream, fpr);
1073 info.killSpilled();
1074 return fpr;
1075 }
1076
1077 case DataFormatJSInteger:
1078 case DataFormatInteger: {
1079 FPRReg fpr = fprAllocate();
1080 GPRReg gpr = info.gpr();
1081 m_gprs.lock(gpr);
1082 m_jit.convertInt32ToDouble(gpr, fpr);
1083 m_gprs.unlock(gpr);
1084 return fpr;
1085 }
1086
1087 // Unbox the double
1088 case DataFormatJSDouble: {
1089 GPRReg gpr = info.gpr();
1090 FPRReg fpr = fprAllocate();
1091 if (m_gprs.isLocked(gpr)) {
1092 // Make sure we don't trample gpr if it is in use.
1093 GPRReg temp = allocate();
1094 m_jit.move(gpr, temp);
1095 unboxDouble(temp, fpr);
1096 unlock(temp);
1097 } else
1098 unboxDouble(gpr, fpr);
1099
1100 m_gprs.release(gpr);
1101 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
1102
1103 info.fillDouble(*m_stream, fpr);
1104 return fpr;
1105 }
1106
1107 case DataFormatDouble: {
1108 FPRReg fpr = info.fpr();
1109 m_fprs.lock(fpr);
1110 return fpr;
1111 }
1112
1113 default:
1114 RELEASE_ASSERT_NOT_REACHED();
1115 return InvalidFPRReg;
1116 }
1117 }
1118
1119 GPRReg SpeculativeJIT::fillSpeculateCell(Edge edge)
1120 {
1121 #if DFG_ENABLE(DEBUG_VERBOSE)
1122 dataLogF("SpecCell@%d ", edge->index());
1123 #endif
1124 AbstractValue& value = m_state.forNode(edge);
1125 SpeculatedType type = value.m_type;
1126 ASSERT((edge.useKind() != KnownCellUse && edge.useKind() != KnownStringUse) || !(value.m_type & ~SpecCell));
1127 value.filter(SpecCell);
1128 VirtualRegister virtualRegister = edge->virtualRegister();
1129 GenerationInfo& info = m_generationInfo[virtualRegister];
1130
1131 switch (info.registerFormat()) {
1132 case DataFormatNone: {
1133 if (info.spillFormat() == DataFormatInteger || info.spillFormat() == DataFormatDouble) {
1134 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1135 return allocate();
1136 }
1137
1138 GPRReg gpr = allocate();
1139
1140 if (edge->hasConstant()) {
1141 JSValue jsValue = valueOfJSConstant(edge.node());
1142 if (jsValue.isCell()) {
1143 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
1144 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
1145 info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
1146 return gpr;
1147 }
1148 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1149 return gpr;
1150 }
1151 RELEASE_ASSERT(info.spillFormat() & DataFormatJS);
1152 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1153 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
1154
1155 info.fillJSValue(*m_stream, gpr, DataFormatJS);
1156 if (type & ~SpecCell)
1157 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister));
1158 info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
1159 return gpr;
1160 }
1161
1162 case DataFormatCell:
1163 case DataFormatJSCell: {
1164 GPRReg gpr = info.gpr();
1165 m_gprs.lock(gpr);
1166 return gpr;
1167 }
1168
1169 case DataFormatJS: {
1170 GPRReg gpr = info.gpr();
1171 m_gprs.lock(gpr);
1172 if (type & ~SpecCell)
1173 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister));
1174 info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
1175 return gpr;
1176 }
1177
1178 case DataFormatJSInteger:
1179 case DataFormatInteger:
1180 case DataFormatJSDouble:
1181 case DataFormatDouble:
1182 case DataFormatJSBoolean:
1183 case DataFormatBoolean: {
1184 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1185 return allocate();
1186 }
1187
1188 case DataFormatStorage:
1189 RELEASE_ASSERT_NOT_REACHED();
1190
1191 default:
1192 RELEASE_ASSERT_NOT_REACHED();
1193 return InvalidGPRReg;
1194 }
1195 }
1196
1197 GPRReg SpeculativeJIT::fillSpeculateBoolean(Edge edge)
1198 {
1199 #if DFG_ENABLE(DEBUG_VERBOSE)
1200 dataLogF("SpecBool@%d ", edge->index());
1201 #endif
1202 AbstractValue& value = m_state.forNode(edge);
1203 SpeculatedType type = value.m_type;
1204 value.filter(SpecBoolean);
1205 VirtualRegister virtualRegister = edge->virtualRegister();
1206 GenerationInfo& info = m_generationInfo[virtualRegister];
1207
1208 switch (info.registerFormat()) {
1209 case DataFormatNone: {
1210 if (info.spillFormat() == DataFormatInteger || info.spillFormat() == DataFormatDouble) {
1211 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1212 return allocate();
1213 }
1214
1215 GPRReg gpr = allocate();
1216
1217 if (edge->hasConstant()) {
1218 JSValue jsValue = valueOfJSConstant(edge.node());
1219 if (jsValue.isBoolean()) {
1220 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
1221 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr);
1222 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
1223 return gpr;
1224 }
1225 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1226 return gpr;
1227 }
1228 RELEASE_ASSERT(info.spillFormat() & DataFormatJS);
1229 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
1230 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr);
1231
1232 info.fillJSValue(*m_stream, gpr, DataFormatJS);
1233 if (type & ~SpecBoolean) {
1234 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1235 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg));
1236 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1237 }
1238 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
1239 return gpr;
1240 }
1241
1242 case DataFormatBoolean:
1243 case DataFormatJSBoolean: {
1244 GPRReg gpr = info.gpr();
1245 m_gprs.lock(gpr);
1246 return gpr;
1247 }
1248
1249 case DataFormatJS: {
1250 GPRReg gpr = info.gpr();
1251 m_gprs.lock(gpr);
1252 if (type & ~SpecBoolean) {
1253 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1254 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg));
1255 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
1256 }
1257 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
1258 return gpr;
1259 }
1260
1261 case DataFormatJSInteger:
1262 case DataFormatInteger:
1263 case DataFormatJSDouble:
1264 case DataFormatDouble:
1265 case DataFormatJSCell:
1266 case DataFormatCell: {
1267 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
1268 return allocate();
1269 }
1270
1271 case DataFormatStorage:
1272 RELEASE_ASSERT_NOT_REACHED();
1273
1274 default:
1275 RELEASE_ASSERT_NOT_REACHED();
1276 return InvalidGPRReg;
1277 }
1278 }
1279
1280 JITCompiler::Jump SpeculativeJIT::convertToDouble(GPRReg value, FPRReg result, GPRReg tmp)
1281 {
1282 JITCompiler::Jump isInteger = m_jit.branch64(MacroAssembler::AboveOrEqual, value, GPRInfo::tagTypeNumberRegister);
1283
1284 JITCompiler::Jump notNumber = m_jit.branchTest64(MacroAssembler::Zero, value, GPRInfo::tagTypeNumberRegister);
1285
1286 m_jit.move(value, tmp);
1287 unboxDouble(tmp, result);
1288
1289 JITCompiler::Jump done = m_jit.jump();
1290
1291 isInteger.link(&m_jit);
1292
1293 m_jit.convertInt32ToDouble(value, result);
1294
1295 done.link(&m_jit);
1296
1297 return notNumber;
1298 }
1299
1300 void SpeculativeJIT::compileObjectEquality(Node* node)
1301 {
1302 SpeculateCellOperand op1(this, node->child1());
1303 SpeculateCellOperand op2(this, node->child2());
1304 GPRTemporary result(this, op1);
1305
1306 GPRReg op1GPR = op1.gpr();
1307 GPRReg op2GPR = op2.gpr();
1308 GPRReg resultGPR = result.gpr();
1309
1310 if (m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
1311 m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1312 DFG_TYPE_CHECK(
1313 JSValueSource::unboxedCell(op1GPR), node->child1(), SpecObject, m_jit.branchPtr(
1314 MacroAssembler::Equal,
1315 MacroAssembler::Address(op1GPR, JSCell::structureOffset()),
1316 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1317 DFG_TYPE_CHECK(
1318 JSValueSource::unboxedCell(op2GPR), node->child2(), SpecObject, m_jit.branchPtr(
1319 MacroAssembler::Equal,
1320 MacroAssembler::Address(op2GPR, JSCell::structureOffset()),
1321 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1322 } else {
1323 GPRTemporary structure(this);
1324 GPRReg structureGPR = structure.gpr();
1325
1326 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR);
1327 DFG_TYPE_CHECK(
1328 JSValueSource::unboxedCell(op1GPR), node->child1(), SpecObject, m_jit.branchPtr(
1329 MacroAssembler::Equal,
1330 structureGPR,
1331 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1332 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), node->child1(),
1333 m_jit.branchTest8(
1334 MacroAssembler::NonZero,
1335 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1336 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1337
1338 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR);
1339 DFG_TYPE_CHECK(
1340 JSValueSource::unboxedCell(op2GPR), node->child2(), SpecObject, m_jit.branchPtr(
1341 MacroAssembler::Equal,
1342 structureGPR,
1343 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1344 speculationCheck(BadType, JSValueSource::unboxedCell(op2GPR), node->child2(),
1345 m_jit.branchTest8(
1346 MacroAssembler::NonZero,
1347 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1348 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1349 }
1350
1351 MacroAssembler::Jump falseCase = m_jit.branch64(MacroAssembler::NotEqual, op1GPR, op2GPR);
1352 m_jit.move(TrustedImm32(ValueTrue), resultGPR);
1353 MacroAssembler::Jump done = m_jit.jump();
1354 falseCase.link(&m_jit);
1355 m_jit.move(TrustedImm32(ValueFalse), resultGPR);
1356 done.link(&m_jit);
1357
1358 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
1359 }
1360
1361 void SpeculativeJIT::compileObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild)
1362 {
1363 SpeculateCellOperand op1(this, leftChild);
1364 JSValueOperand op2(this, rightChild, ManualOperandSpeculation);
1365 GPRTemporary result(this);
1366
1367 GPRReg op1GPR = op1.gpr();
1368 GPRReg op2GPR = op2.gpr();
1369 GPRReg resultGPR = result.gpr();
1370 GPRTemporary structure;
1371 GPRReg structureGPR = InvalidGPRReg;
1372
1373 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid();
1374
1375 if (!masqueradesAsUndefinedWatchpointValid) {
1376 // The masquerades as undefined case will use the structure register, so allocate it here.
1377 // Do this at the top of the function to avoid branching around a register allocation.
1378 GPRTemporary realStructure(this);
1379 structure.adopt(realStructure);
1380 structureGPR = structure.gpr();
1381 }
1382
1383 if (masqueradesAsUndefinedWatchpointValid) {
1384 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1385 DFG_TYPE_CHECK(
1386 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1387 MacroAssembler::Equal,
1388 MacroAssembler::Address(op1GPR, JSCell::structureOffset()),
1389 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1390 } else {
1391 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR);
1392 DFG_TYPE_CHECK(
1393 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1394 MacroAssembler::Equal,
1395 structureGPR,
1396 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1397 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild,
1398 m_jit.branchTest8(
1399 MacroAssembler::NonZero,
1400 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1401 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1402 }
1403
1404 // It seems that most of the time when programs do a == b where b may be either null/undefined
1405 // or an object, b is usually an object. Balance the branches to make that case fast.
1406 MacroAssembler::Jump rightNotCell =
1407 m_jit.branchTest64(MacroAssembler::NonZero, op2GPR, GPRInfo::tagMaskRegister);
1408
1409 // We know that within this branch, rightChild must be a cell.
1410 if (masqueradesAsUndefinedWatchpointValid) {
1411 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1412 DFG_TYPE_CHECK(
1413 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1414 MacroAssembler::Equal,
1415 MacroAssembler::Address(op2GPR, JSCell::structureOffset()),
1416 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1417 } else {
1418 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR);
1419 DFG_TYPE_CHECK(
1420 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1421 MacroAssembler::Equal,
1422 structureGPR,
1423 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1424 speculationCheck(BadType, JSValueRegs(op2GPR), rightChild,
1425 m_jit.branchTest8(
1426 MacroAssembler::NonZero,
1427 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1428 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1429 }
1430
1431 // At this point we know that we can perform a straight-forward equality comparison on pointer
1432 // values because both left and right are pointers to objects that have no special equality
1433 // protocols.
1434 MacroAssembler::Jump falseCase = m_jit.branch64(MacroAssembler::NotEqual, op1GPR, op2GPR);
1435 MacroAssembler::Jump trueCase = m_jit.jump();
1436
1437 rightNotCell.link(&m_jit);
1438
1439 // We know that within this branch, rightChild must not be a cell. Check if that is enough to
1440 // prove that it is either null or undefined.
1441 if (needsTypeCheck(rightChild, SpecCell | SpecOther)) {
1442 m_jit.move(op2GPR, resultGPR);
1443 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR);
1444
1445 typeCheck(
1446 JSValueRegs(op2GPR), rightChild, SpecCell | SpecOther,
1447 m_jit.branch64(
1448 MacroAssembler::NotEqual, resultGPR,
1449 MacroAssembler::TrustedImm64(ValueNull)));
1450 }
1451
1452 falseCase.link(&m_jit);
1453 m_jit.move(TrustedImm32(ValueFalse), resultGPR);
1454 MacroAssembler::Jump done = m_jit.jump();
1455 trueCase.link(&m_jit);
1456 m_jit.move(TrustedImm32(ValueTrue), resultGPR);
1457 done.link(&m_jit);
1458
1459 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
1460 }
1461
1462 void SpeculativeJIT::compilePeepHoleObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild, Node* branchNode)
1463 {
1464 BlockIndex taken = branchNode->takenBlockIndex();
1465 BlockIndex notTaken = branchNode->notTakenBlockIndex();
1466
1467 SpeculateCellOperand op1(this, leftChild);
1468 JSValueOperand op2(this, rightChild, ManualOperandSpeculation);
1469 GPRTemporary result(this);
1470
1471 GPRReg op1GPR = op1.gpr();
1472 GPRReg op2GPR = op2.gpr();
1473 GPRReg resultGPR = result.gpr();
1474 GPRTemporary structure;
1475 GPRReg structureGPR = InvalidGPRReg;
1476
1477 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid();
1478
1479 if (!masqueradesAsUndefinedWatchpointValid) {
1480 // The masquerades as undefined case will use the structure register, so allocate it here.
1481 // Do this at the top of the function to avoid branching around a register allocation.
1482 GPRTemporary realStructure(this);
1483 structure.adopt(realStructure);
1484 structureGPR = structure.gpr();
1485 }
1486
1487 if (masqueradesAsUndefinedWatchpointValid) {
1488 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1489 DFG_TYPE_CHECK(
1490 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1491 MacroAssembler::Equal,
1492 MacroAssembler::Address(op1GPR, JSCell::structureOffset()),
1493 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1494 } else {
1495 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR);
1496 DFG_TYPE_CHECK(
1497 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr(
1498 MacroAssembler::Equal,
1499 structureGPR,
1500 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1501 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild,
1502 m_jit.branchTest8(
1503 MacroAssembler::NonZero,
1504 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1505 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1506 }
1507
1508 // It seems that most of the time when programs do a == b where b may be either null/undefined
1509 // or an object, b is usually an object. Balance the branches to make that case fast.
1510 MacroAssembler::Jump rightNotCell =
1511 m_jit.branchTest64(MacroAssembler::NonZero, op2GPR, GPRInfo::tagMaskRegister);
1512
1513 // We know that within this branch, rightChild must be a cell.
1514 if (masqueradesAsUndefinedWatchpointValid) {
1515 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1516 DFG_TYPE_CHECK(
1517 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1518 MacroAssembler::Equal,
1519 MacroAssembler::Address(op2GPR, JSCell::structureOffset()),
1520 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1521 } else {
1522 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR);
1523 DFG_TYPE_CHECK(
1524 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr(
1525 MacroAssembler::Equal,
1526 structureGPR,
1527 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1528 speculationCheck(BadType, JSValueRegs(op2GPR), rightChild,
1529 m_jit.branchTest8(
1530 MacroAssembler::NonZero,
1531 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1532 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)));
1533 }
1534
1535 // At this point we know that we can perform a straight-forward equality comparison on pointer
1536 // values because both left and right are pointers to objects that have no special equality
1537 // protocols.
1538 branch64(MacroAssembler::Equal, op1GPR, op2GPR, taken);
1539
1540 // We know that within this branch, rightChild must not be a cell. Check if that is enough to
1541 // prove that it is either null or undefined.
1542 if (!needsTypeCheck(rightChild, SpecCell | SpecOther))
1543 rightNotCell.link(&m_jit);
1544 else {
1545 jump(notTaken, ForceJump);
1546
1547 rightNotCell.link(&m_jit);
1548 m_jit.move(op2GPR, resultGPR);
1549 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR);
1550
1551 typeCheck(
1552 JSValueRegs(op2GPR), rightChild, SpecCell | SpecOther, m_jit.branch64(
1553 MacroAssembler::NotEqual, resultGPR,
1554 MacroAssembler::TrustedImm64(ValueNull)));
1555 }
1556
1557 jump(notTaken);
1558 }
1559
1560 void SpeculativeJIT::compileIntegerCompare(Node* node, MacroAssembler::RelationalCondition condition)
1561 {
1562 SpeculateIntegerOperand op1(this, node->child1());
1563 SpeculateIntegerOperand op2(this, node->child2());
1564 GPRTemporary result(this, op1, op2);
1565
1566 m_jit.compare32(condition, op1.gpr(), op2.gpr(), result.gpr());
1567
1568 // If we add a DataFormatBool, we should use it here.
1569 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
1570 jsValueResult(result.gpr(), m_currentNode, DataFormatJSBoolean);
1571 }
1572
1573 void SpeculativeJIT::compileDoubleCompare(Node* node, MacroAssembler::DoubleCondition condition)
1574 {
1575 SpeculateDoubleOperand op1(this, node->child1());
1576 SpeculateDoubleOperand op2(this, node->child2());
1577 GPRTemporary result(this);
1578
1579 m_jit.move(TrustedImm32(ValueTrue), result.gpr());
1580 MacroAssembler::Jump trueCase = m_jit.branchDouble(condition, op1.fpr(), op2.fpr());
1581 m_jit.xor64(TrustedImm32(true), result.gpr());
1582 trueCase.link(&m_jit);
1583
1584 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1585 }
1586
1587 void SpeculativeJIT::compileValueAdd(Node* node)
1588 {
1589 JSValueOperand op1(this, node->child1());
1590 JSValueOperand op2(this, node->child2());
1591
1592 GPRReg op1GPR = op1.gpr();
1593 GPRReg op2GPR = op2.gpr();
1594
1595 flushRegisters();
1596
1597 GPRResult result(this);
1598 if (isKnownNotNumber(node->child1().node()) || isKnownNotNumber(node->child2().node()))
1599 callOperation(operationValueAddNotNumber, result.gpr(), op1GPR, op2GPR);
1600 else
1601 callOperation(operationValueAdd, result.gpr(), op1GPR, op2GPR);
1602
1603 jsValueResult(result.gpr(), node);
1604 }
1605
1606 void SpeculativeJIT::compileObjectOrOtherLogicalNot(Edge nodeUse)
1607 {
1608 JSValueOperand value(this, nodeUse, ManualOperandSpeculation);
1609 GPRTemporary result(this);
1610 GPRReg valueGPR = value.gpr();
1611 GPRReg resultGPR = result.gpr();
1612 GPRTemporary structure;
1613 GPRReg structureGPR = InvalidGPRReg;
1614
1615 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid();
1616
1617 if (!masqueradesAsUndefinedWatchpointValid) {
1618 // The masquerades as undefined case will use the structure register, so allocate it here.
1619 // Do this at the top of the function to avoid branching around a register allocation.
1620 GPRTemporary realStructure(this);
1621 structure.adopt(realStructure);
1622 structureGPR = structure.gpr();
1623 }
1624
1625 MacroAssembler::Jump notCell = m_jit.branchTest64(MacroAssembler::NonZero, valueGPR, GPRInfo::tagMaskRegister);
1626 if (masqueradesAsUndefinedWatchpointValid) {
1627 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1628 DFG_TYPE_CHECK(
1629 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1630 MacroAssembler::Equal,
1631 MacroAssembler::Address(valueGPR, JSCell::structureOffset()),
1632 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1633 } else {
1634 m_jit.loadPtr(MacroAssembler::Address(valueGPR, JSCell::structureOffset()), structureGPR);
1635
1636 DFG_TYPE_CHECK(
1637 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1638 MacroAssembler::Equal,
1639 structureGPR,
1640 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1641
1642 MacroAssembler::Jump isNotMasqueradesAsUndefined =
1643 m_jit.branchTest8(
1644 MacroAssembler::Zero,
1645 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()),
1646 MacroAssembler::TrustedImm32(MasqueradesAsUndefined));
1647
1648 speculationCheck(BadType, JSValueRegs(valueGPR), nodeUse,
1649 m_jit.branchPtr(
1650 MacroAssembler::Equal,
1651 MacroAssembler::Address(structureGPR, Structure::globalObjectOffset()),
1652 MacroAssembler::TrustedImmPtr(m_jit.graph().globalObjectFor(m_currentNode->codeOrigin))));
1653
1654 isNotMasqueradesAsUndefined.link(&m_jit);
1655 }
1656 m_jit.move(TrustedImm32(ValueFalse), resultGPR);
1657 MacroAssembler::Jump done = m_jit.jump();
1658
1659 notCell.link(&m_jit);
1660
1661 if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) {
1662 m_jit.move(valueGPR, resultGPR);
1663 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR);
1664 typeCheck(
1665 JSValueRegs(valueGPR), nodeUse, SpecCell | SpecOther, m_jit.branch64(
1666 MacroAssembler::NotEqual,
1667 resultGPR,
1668 MacroAssembler::TrustedImm64(ValueNull)));
1669 }
1670 m_jit.move(TrustedImm32(ValueTrue), resultGPR);
1671
1672 done.link(&m_jit);
1673
1674 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean);
1675 }
1676
1677 void SpeculativeJIT::compileLogicalNot(Node* node)
1678 {
1679 switch (node->child1().useKind()) {
1680 case ObjectOrOtherUse: {
1681 compileObjectOrOtherLogicalNot(node->child1());
1682 return;
1683 }
1684
1685 case Int32Use: {
1686 SpeculateIntegerOperand value(this, node->child1());
1687 GPRTemporary result(this, value);
1688 m_jit.compare32(MacroAssembler::Equal, value.gpr(), MacroAssembler::TrustedImm32(0), result.gpr());
1689 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
1690 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1691 return;
1692 }
1693
1694 case NumberUse: {
1695 SpeculateDoubleOperand value(this, node->child1());
1696 FPRTemporary scratch(this);
1697 GPRTemporary result(this);
1698 m_jit.move(TrustedImm32(ValueFalse), result.gpr());
1699 MacroAssembler::Jump nonZero = m_jit.branchDoubleNonZero(value.fpr(), scratch.fpr());
1700 m_jit.xor32(TrustedImm32(true), result.gpr());
1701 nonZero.link(&m_jit);
1702 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1703 return;
1704 }
1705
1706 case BooleanUse: {
1707 if (!needsTypeCheck(node->child1(), SpecBoolean)) {
1708 SpeculateBooleanOperand value(this, node->child1());
1709 GPRTemporary result(this, value);
1710
1711 m_jit.move(value.gpr(), result.gpr());
1712 m_jit.xor64(TrustedImm32(true), result.gpr());
1713
1714 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1715 return;
1716 }
1717
1718 JSValueOperand value(this, node->child1(), ManualOperandSpeculation);
1719 GPRTemporary result(this); // FIXME: We could reuse, but on speculation fail would need recovery to restore tag (akin to add).
1720
1721 m_jit.move(value.gpr(), result.gpr());
1722 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), result.gpr());
1723 typeCheck(
1724 JSValueRegs(value.gpr()), node->child1(), SpecBoolean, m_jit.branchTest64(
1725 JITCompiler::NonZero, result.gpr(), TrustedImm32(static_cast<int32_t>(~1))));
1726 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), result.gpr());
1727
1728 // If we add a DataFormatBool, we should use it here.
1729 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
1730 return;
1731 }
1732
1733 case UntypedUse: {
1734 JSValueOperand arg1(this, node->child1());
1735 GPRTemporary result(this);
1736
1737 GPRReg arg1GPR = arg1.gpr();
1738 GPRReg resultGPR = result.gpr();
1739
1740 arg1.use();
1741
1742 m_jit.move(arg1GPR, resultGPR);
1743 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), resultGPR);
1744 JITCompiler::Jump slowCase = m_jit.branchTest64(JITCompiler::NonZero, resultGPR, TrustedImm32(static_cast<int32_t>(~1)));
1745
1746 addSlowPathGenerator(
1747 slowPathCall(slowCase, this, dfgConvertJSValueToBoolean, resultGPR, arg1GPR));
1748
1749 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), resultGPR);
1750 jsValueResult(resultGPR, node, DataFormatJSBoolean, UseChildrenCalledExplicitly);
1751 return;
1752 }
1753
1754 default:
1755 RELEASE_ASSERT_NOT_REACHED();
1756 break;
1757 }
1758 }
1759
1760 void SpeculativeJIT::emitObjectOrOtherBranch(Edge nodeUse, BlockIndex taken, BlockIndex notTaken)
1761 {
1762 JSValueOperand value(this, nodeUse, ManualOperandSpeculation);
1763 GPRTemporary scratch(this);
1764 GPRReg valueGPR = value.gpr();
1765 GPRReg scratchGPR = scratch.gpr();
1766
1767 MacroAssembler::Jump notCell = m_jit.branchTest64(MacroAssembler::NonZero, valueGPR, GPRInfo::tagMaskRegister);
1768 if (m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
1769 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
1770
1771 DFG_TYPE_CHECK(
1772 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1773 MacroAssembler::Equal,
1774 MacroAssembler::Address(valueGPR, JSCell::structureOffset()),
1775 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1776 } else {
1777 m_jit.loadPtr(MacroAssembler::Address(valueGPR, JSCell::structureOffset()), scratchGPR);
1778
1779 DFG_TYPE_CHECK(
1780 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr(
1781 MacroAssembler::Equal,
1782 scratchGPR,
1783 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())));
1784
1785 JITCompiler::Jump isNotMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::Zero, MacroAssembler::Address(scratchGPR, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
1786
1787 speculationCheck(BadType, JSValueRegs(valueGPR), nodeUse,
1788 m_jit.branchPtr(
1789 MacroAssembler::Equal,
1790 MacroAssembler::Address(scratchGPR, Structure::globalObjectOffset()),
1791 MacroAssembler::TrustedImmPtr(m_jit.graph().globalObjectFor(m_currentNode->codeOrigin))));
1792
1793 isNotMasqueradesAsUndefined.link(&m_jit);
1794 }
1795 jump(taken, ForceJump);
1796
1797 notCell.link(&m_jit);
1798
1799 if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) {
1800 m_jit.move(valueGPR, scratchGPR);
1801 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), scratchGPR);
1802 typeCheck(
1803 JSValueRegs(valueGPR), nodeUse, SpecCell | SpecOther, m_jit.branch64(
1804 MacroAssembler::NotEqual, scratchGPR, MacroAssembler::TrustedImm64(ValueNull)));
1805 }
1806 jump(notTaken);
1807
1808 noResult(m_currentNode);
1809 }
1810
1811 void SpeculativeJIT::emitBranch(Node* node)
1812 {
1813 BlockIndex taken = node->takenBlockIndex();
1814 BlockIndex notTaken = node->notTakenBlockIndex();
1815
1816 switch (node->child1().useKind()) {
1817 case ObjectOrOtherUse: {
1818 emitObjectOrOtherBranch(node->child1(), taken, notTaken);
1819 return;
1820 }
1821
1822 case Int32Use:
1823 case NumberUse: {
1824 if (node->child1().useKind() == Int32Use) {
1825 bool invert = false;
1826
1827 if (taken == nextBlock()) {
1828 invert = true;
1829 BlockIndex tmp = taken;
1830 taken = notTaken;
1831 notTaken = tmp;
1832 }
1833
1834 SpeculateIntegerOperand value(this, node->child1());
1835 branchTest32(invert ? MacroAssembler::Zero : MacroAssembler::NonZero, value.gpr(), taken);
1836 } else {
1837 SpeculateDoubleOperand value(this, node->child1());
1838 FPRTemporary scratch(this);
1839 branchDoubleNonZero(value.fpr(), scratch.fpr(), taken);
1840 }
1841
1842 jump(notTaken);
1843
1844 noResult(node);
1845 return;
1846 }
1847
1848 case UntypedUse:
1849 case BooleanUse: {
1850 JSValueOperand value(this, node->child1(), ManualOperandSpeculation);
1851 GPRReg valueGPR = value.gpr();
1852
1853 if (node->child1().useKind() == BooleanUse) {
1854 if (!needsTypeCheck(node->child1(), SpecBoolean)) {
1855 MacroAssembler::ResultCondition condition = MacroAssembler::NonZero;
1856
1857 if (taken == nextBlock()) {
1858 condition = MacroAssembler::Zero;
1859 BlockIndex tmp = taken;
1860 taken = notTaken;
1861 notTaken = tmp;
1862 }
1863
1864 branchTest32(condition, valueGPR, TrustedImm32(true), taken);
1865 jump(notTaken);
1866 } else {
1867 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(false))), notTaken);
1868 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(true))), taken);
1869
1870 typeCheck(JSValueRegs(valueGPR), node->child1(), SpecBoolean, m_jit.jump());
1871 }
1872 value.use();
1873 } else {
1874 GPRTemporary result(this);
1875 GPRReg resultGPR = result.gpr();
1876
1877 if (node->child1()->prediction() & SpecInt32) {
1878 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsNumber(0))), notTaken);
1879 branch64(MacroAssembler::AboveOrEqual, valueGPR, GPRInfo::tagTypeNumberRegister, taken);
1880 }
1881
1882 if (node->child1()->prediction() & SpecBoolean) {
1883 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(false))), notTaken);
1884 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(true))), taken);
1885 }
1886
1887 value.use();
1888
1889 silentSpillAllRegisters(resultGPR);
1890 callOperation(dfgConvertJSValueToBoolean, resultGPR, valueGPR);
1891 silentFillAllRegisters(resultGPR);
1892
1893 branchTest32(MacroAssembler::NonZero, resultGPR, taken);
1894 jump(notTaken);
1895 }
1896
1897 noResult(node, UseChildrenCalledExplicitly);
1898 return;
1899 }
1900
1901 default:
1902 RELEASE_ASSERT_NOT_REACHED();
1903 }
1904 }
1905
1906 void SpeculativeJIT::compile(Node* node)
1907 {
1908 NodeType op = node->op();
1909
1910 #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
1911 m_jit.clearRegisterAllocationOffsets();
1912 #endif
1913
1914 switch (op) {
1915 case JSConstant:
1916 initConstantInfo(node);
1917 break;
1918
1919 case PhantomArguments:
1920 initConstantInfo(node);
1921 break;
1922
1923 case WeakJSConstant:
1924 m_jit.addWeakReference(node->weakConstant());
1925 initConstantInfo(node);
1926 break;
1927
1928 case Identity: {
1929 // CSE should always eliminate this.
1930 RELEASE_ASSERT_NOT_REACHED();
1931 break;
1932 }
1933
1934 case GetLocal: {
1935 SpeculatedType prediction = node->variableAccessData()->prediction();
1936 AbstractValue& value = m_state.variables().operand(node->local());
1937
1938 // If we have no prediction for this local, then don't attempt to compile.
1939 if (prediction == SpecNone) {
1940 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
1941 break;
1942 }
1943
1944 // If the CFA is tracking this variable and it found that the variable
1945 // cannot have been assigned, then don't attempt to proceed.
1946 if (value.isClear()) {
1947 // FIXME: We should trap instead.
1948 // https://bugs.webkit.org/show_bug.cgi?id=110383
1949 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
1950 break;
1951 }
1952
1953 if (node->variableAccessData()->shouldUseDoubleFormat()) {
1954 FPRTemporary result(this);
1955 m_jit.loadDouble(JITCompiler::addressFor(node->local()), result.fpr());
1956 VirtualRegister virtualRegister = node->virtualRegister();
1957 m_fprs.retain(result.fpr(), virtualRegister, SpillOrderDouble);
1958 m_generationInfo[virtualRegister].initDouble(node, node->refCount(), result.fpr());
1959 break;
1960 }
1961
1962 if (isInt32Speculation(value.m_type)) {
1963 GPRTemporary result(this);
1964 m_jit.load32(JITCompiler::payloadFor(node->local()), result.gpr());
1965
1966 // Like integerResult, but don't useChildren - our children are phi nodes,
1967 // and don't represent values within this dataflow with virtual registers.
1968 VirtualRegister virtualRegister = node->virtualRegister();
1969 m_gprs.retain(result.gpr(), virtualRegister, SpillOrderInteger);
1970 m_generationInfo[virtualRegister].initInteger(node, node->refCount(), result.gpr());
1971 break;
1972 }
1973
1974 GPRTemporary result(this);
1975 m_jit.load64(JITCompiler::addressFor(node->local()), result.gpr());
1976
1977 // Like jsValueResult, but don't useChildren - our children are phi nodes,
1978 // and don't represent values within this dataflow with virtual registers.
1979 VirtualRegister virtualRegister = node->virtualRegister();
1980 m_gprs.retain(result.gpr(), virtualRegister, SpillOrderJS);
1981
1982 DataFormat format;
1983 if (isCellSpeculation(value.m_type))
1984 format = DataFormatJSCell;
1985 else if (isBooleanSpeculation(value.m_type))
1986 format = DataFormatJSBoolean;
1987 else
1988 format = DataFormatJS;
1989
1990 m_generationInfo[virtualRegister].initJSValue(node, node->refCount(), result.gpr(), format);
1991 break;
1992 }
1993
1994 case GetLocalUnlinked: {
1995 GPRTemporary result(this);
1996
1997 m_jit.load64(JITCompiler::addressFor(node->unlinkedLocal()), result.gpr());
1998
1999 jsValueResult(result.gpr(), node);
2000 break;
2001 }
2002
2003 case MovHintAndCheck: {
2004 compileMovHintAndCheck(node);
2005 break;
2006 }
2007
2008 case InlineStart: {
2009 compileInlineStart(node);
2010 break;
2011 }
2012
2013 case MovHint:
2014 case ZombieHint: {
2015 RELEASE_ASSERT_NOT_REACHED();
2016 break;
2017 }
2018
2019 case SetLocal: {
2020 // SetLocal doubles as a hint as to where a node will be stored and
2021 // as a speculation point. So before we speculate make sure that we
2022 // know where the child of this node needs to go in the virtual
2023 // stack.
2024 compileMovHint(node);
2025
2026 if (node->variableAccessData()->shouldUnboxIfPossible()) {
2027 if (node->variableAccessData()->shouldUseDoubleFormat()) {
2028 SpeculateDoubleOperand value(this, node->child1());
2029 m_jit.storeDouble(value.fpr(), JITCompiler::addressFor(node->local()));
2030 noResult(node);
2031 // Indicate that it's no longer necessary to retrieve the value of
2032 // this bytecode variable from registers or other locations in the stack,
2033 // but that it is stored as a double.
2034 recordSetLocal(node->local(), ValueSource(DoubleInJSStack));
2035 break;
2036 }
2037
2038 SpeculatedType predictedType = node->variableAccessData()->argumentAwarePrediction();
2039 if (isInt32Speculation(predictedType)) {
2040 SpeculateIntegerOperand value(this, node->child1());
2041 m_jit.store32(value.gpr(), JITCompiler::payloadFor(node->local()));
2042 noResult(node);
2043 recordSetLocal(node->local(), ValueSource(Int32InJSStack));
2044 break;
2045 }
2046 if (isCellSpeculation(predictedType)) {
2047 SpeculateCellOperand cell(this, node->child1());
2048 GPRReg cellGPR = cell.gpr();
2049 m_jit.store64(cellGPR, JITCompiler::addressFor(node->local()));
2050 noResult(node);
2051 recordSetLocal(node->local(), ValueSource(CellInJSStack));
2052 break;
2053 }
2054 if (isBooleanSpeculation(predictedType)) {
2055 SpeculateBooleanOperand boolean(this, node->child1());
2056 m_jit.store64(boolean.gpr(), JITCompiler::addressFor(node->local()));
2057 noResult(node);
2058 recordSetLocal(node->local(), ValueSource(BooleanInJSStack));
2059 break;
2060 }
2061 }
2062
2063 JSValueOperand value(this, node->child1());
2064 m_jit.store64(value.gpr(), JITCompiler::addressFor(node->local()));
2065 noResult(node);
2066
2067 recordSetLocal(node->local(), ValueSource(ValueInJSStack));
2068
2069 // If we're storing an arguments object that has been optimized away,
2070 // our variable event stream for OSR exit now reflects the optimized
2071 // value (JSValue()). On the slow path, we want an arguments object
2072 // instead. We add an additional move hint to show OSR exit that it
2073 // needs to reconstruct the arguments object.
2074 if (node->child1()->op() == PhantomArguments)
2075 compileMovHint(node);
2076
2077 break;
2078 }
2079
2080 case SetArgument:
2081 // This is a no-op; it just marks the fact that the argument is being used.
2082 // But it may be profitable to use this as a hook to run speculation checks
2083 // on arguments, thereby allowing us to trivially eliminate such checks if
2084 // the argument is not used.
2085 break;
2086
2087 case BitAnd:
2088 case BitOr:
2089 case BitXor:
2090 if (isInt32Constant(node->child1().node())) {
2091 SpeculateIntegerOperand op2(this, node->child2());
2092 GPRTemporary result(this, op2);
2093
2094 bitOp(op, valueOfInt32Constant(node->child1().node()), op2.gpr(), result.gpr());
2095
2096 integerResult(result.gpr(), node);
2097 } else if (isInt32Constant(node->child2().node())) {
2098 SpeculateIntegerOperand op1(this, node->child1());
2099 GPRTemporary result(this, op1);
2100
2101 bitOp(op, valueOfInt32Constant(node->child2().node()), op1.gpr(), result.gpr());
2102
2103 integerResult(result.gpr(), node);
2104 } else {
2105 SpeculateIntegerOperand op1(this, node->child1());
2106 SpeculateIntegerOperand op2(this, node->child2());
2107 GPRTemporary result(this, op1, op2);
2108
2109 GPRReg reg1 = op1.gpr();
2110 GPRReg reg2 = op2.gpr();
2111 bitOp(op, reg1, reg2, result.gpr());
2112
2113 integerResult(result.gpr(), node);
2114 }
2115 break;
2116
2117 case BitRShift:
2118 case BitLShift:
2119 case BitURShift:
2120 if (isInt32Constant(node->child2().node())) {
2121 SpeculateIntegerOperand op1(this, node->child1());
2122 GPRTemporary result(this, op1);
2123
2124 shiftOp(op, op1.gpr(), valueOfInt32Constant(node->child2().node()) & 0x1f, result.gpr());
2125
2126 integerResult(result.gpr(), node);
2127 } else {
2128 // Do not allow shift amount to be used as the result, MacroAssembler does not permit this.
2129 SpeculateIntegerOperand op1(this, node->child1());
2130 SpeculateIntegerOperand op2(this, node->child2());
2131 GPRTemporary result(this, op1);
2132
2133 GPRReg reg1 = op1.gpr();
2134 GPRReg reg2 = op2.gpr();
2135 shiftOp(op, reg1, reg2, result.gpr());
2136
2137 integerResult(result.gpr(), node);
2138 }
2139 break;
2140
2141 case UInt32ToNumber: {
2142 compileUInt32ToNumber(node);
2143 break;
2144 }
2145
2146 case DoubleAsInt32: {
2147 compileDoubleAsInt32(node);
2148 break;
2149 }
2150
2151 case ValueToInt32: {
2152 compileValueToInt32(node);
2153 break;
2154 }
2155
2156 case Int32ToDouble:
2157 case ForwardInt32ToDouble: {
2158 compileInt32ToDouble(node);
2159 break;
2160 }
2161
2162 case ValueAdd:
2163 case ArithAdd:
2164 compileAdd(node);
2165 break;
2166
2167 case MakeRope:
2168 compileMakeRope(node);
2169 break;
2170
2171 case ArithSub:
2172 compileArithSub(node);
2173 break;
2174
2175 case ArithNegate:
2176 compileArithNegate(node);
2177 break;
2178
2179 case ArithMul:
2180 compileArithMul(node);
2181 break;
2182
2183 case ArithIMul:
2184 compileArithIMul(node);
2185 break;
2186
2187 case ArithDiv: {
2188 switch (node->binaryUseKind()) {
2189 case Int32Use: {
2190 #if CPU(X86) || CPU(X86_64)
2191 compileIntegerArithDivForX86(node);
2192 #elif CPU(ARM64)
2193 compileIntegerArithDivForARM64(node);
2194 #else
2195 // See DFGFixupPhase - on any architecture other than X86[_64] we'll force the prediction to double.
2196 ASSERT_NOT_REACHED();
2197 #endif
2198 break;
2199 }
2200
2201 case NumberUse: {
2202 SpeculateDoubleOperand op1(this, node->child1());
2203 SpeculateDoubleOperand op2(this, node->child2());
2204 FPRTemporary result(this, op1);
2205
2206 FPRReg reg1 = op1.fpr();
2207 FPRReg reg2 = op2.fpr();
2208 m_jit.divDouble(reg1, reg2, result.fpr());
2209
2210 doubleResult(result.fpr(), node);
2211 break;
2212 }
2213
2214 default:
2215 RELEASE_ASSERT_NOT_REACHED();
2216 break;
2217 }
2218 break;
2219 }
2220
2221 case ArithMod: {
2222 compileArithMod(node);
2223 break;
2224 }
2225
2226 case ArithAbs: {
2227 switch (node->child1().useKind()) {
2228 case Int32Use: {
2229 SpeculateIntegerOperand op1(this, node->child1());
2230 GPRTemporary result(this);
2231 GPRTemporary scratch(this);
2232
2233 m_jit.zeroExtend32ToPtr(op1.gpr(), result.gpr());
2234 m_jit.rshift32(result.gpr(), MacroAssembler::TrustedImm32(31), scratch.gpr());
2235 m_jit.add32(scratch.gpr(), result.gpr());
2236 m_jit.xor32(scratch.gpr(), result.gpr());
2237 speculationCheck(Overflow, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::Equal, result.gpr(), MacroAssembler::TrustedImm32(1 << 31)));
2238 integerResult(result.gpr(), node);
2239 break;
2240 }
2241
2242 case NumberUse: {
2243 SpeculateDoubleOperand op1(this, node->child1());
2244 FPRTemporary result(this);
2245
2246 m_jit.absDouble(op1.fpr(), result.fpr());
2247 doubleResult(result.fpr(), node);
2248 break;
2249 }
2250
2251 default:
2252 RELEASE_ASSERT_NOT_REACHED();
2253 break;
2254 }
2255 break;
2256 }
2257
2258 case ArithMin:
2259 case ArithMax: {
2260 switch (node->binaryUseKind()) {
2261 case Int32Use: {
2262 SpeculateStrictInt32Operand op1(this, node->child1());
2263 SpeculateStrictInt32Operand op2(this, node->child2());
2264 GPRTemporary result(this, op1);
2265
2266 MacroAssembler::Jump op1Less = m_jit.branch32(op == ArithMin ? MacroAssembler::LessThan : MacroAssembler::GreaterThan, op1.gpr(), op2.gpr());
2267 m_jit.move(op2.gpr(), result.gpr());
2268 if (op1.gpr() != result.gpr()) {
2269 MacroAssembler::Jump done = m_jit.jump();
2270 op1Less.link(&m_jit);
2271 m_jit.move(op1.gpr(), result.gpr());
2272 done.link(&m_jit);
2273 } else
2274 op1Less.link(&m_jit);
2275
2276 integerResult(result.gpr(), node);
2277 break;
2278 }
2279
2280 case NumberUse: {
2281 SpeculateDoubleOperand op1(this, node->child1());
2282 SpeculateDoubleOperand op2(this, node->child2());
2283 FPRTemporary result(this, op1);
2284
2285 FPRReg op1FPR = op1.fpr();
2286 FPRReg op2FPR = op2.fpr();
2287 FPRReg resultFPR = result.fpr();
2288
2289 MacroAssembler::JumpList done;
2290
2291 MacroAssembler::Jump op1Less = m_jit.branchDouble(op == ArithMin ? MacroAssembler::DoubleLessThan : MacroAssembler::DoubleGreaterThan, op1FPR, op2FPR);
2292
2293 // op2 is eather the lesser one or one of then is NaN
2294 MacroAssembler::Jump op2Less = m_jit.branchDouble(op == ArithMin ? MacroAssembler::DoubleGreaterThanOrEqual : MacroAssembler::DoubleLessThanOrEqual, op1FPR, op2FPR);
2295
2296 // Unordered case. We don't know which of op1, op2 is NaN. Manufacture NaN by adding
2297 // op1 + op2 and putting it into result.
2298 m_jit.addDouble(op1FPR, op2FPR, resultFPR);
2299 done.append(m_jit.jump());
2300
2301 op2Less.link(&m_jit);
2302 m_jit.moveDouble(op2FPR, resultFPR);
2303
2304 if (op1FPR != resultFPR) {
2305 done.append(m_jit.jump());
2306
2307 op1Less.link(&m_jit);
2308 m_jit.moveDouble(op1FPR, resultFPR);
2309 } else
2310 op1Less.link(&m_jit);
2311
2312 done.link(&m_jit);
2313
2314 doubleResult(resultFPR, node);
2315 break;
2316 }
2317
2318 default:
2319 RELEASE_ASSERT_NOT_REACHED();
2320 break;
2321 }
2322 break;
2323 }
2324
2325 case ArithSqrt: {
2326 SpeculateDoubleOperand op1(this, node->child1());
2327 FPRTemporary result(this, op1);
2328
2329 m_jit.sqrtDouble(op1.fpr(), result.fpr());
2330
2331 doubleResult(result.fpr(), node);
2332 break;
2333 }
2334
2335 case LogicalNot:
2336 compileLogicalNot(node);
2337 break;
2338
2339 case CompareLess:
2340 if (compare(node, JITCompiler::LessThan, JITCompiler::DoubleLessThan, operationCompareLess))
2341 return;
2342 break;
2343
2344 case CompareLessEq:
2345 if (compare(node, JITCompiler::LessThanOrEqual, JITCompiler::DoubleLessThanOrEqual, operationCompareLessEq))
2346 return;
2347 break;
2348
2349 case CompareGreater:
2350 if (compare(node, JITCompiler::GreaterThan, JITCompiler::DoubleGreaterThan, operationCompareGreater))
2351 return;
2352 break;
2353
2354 case CompareGreaterEq:
2355 if (compare(node, JITCompiler::GreaterThanOrEqual, JITCompiler::DoubleGreaterThanOrEqual, operationCompareGreaterEq))
2356 return;
2357 break;
2358
2359 case CompareEqConstant:
2360 ASSERT(isNullConstant(node->child2().node()));
2361 if (nonSpeculativeCompareNull(node, node->child1()))
2362 return;
2363 break;
2364
2365 case CompareEq:
2366 if (compare(node, JITCompiler::Equal, JITCompiler::DoubleEqual, operationCompareEq))
2367 return;
2368 break;
2369
2370 case CompareStrictEqConstant:
2371 if (compileStrictEqForConstant(node, node->child1(), valueOfJSConstant(node->child2().node())))
2372 return;
2373 break;
2374
2375 case CompareStrictEq:
2376 if (compileStrictEq(node))
2377 return;
2378 break;
2379
2380 case StringCharCodeAt: {
2381 compileGetCharCodeAt(node);
2382 break;
2383 }
2384
2385 case StringCharAt: {
2386 // Relies on StringCharAt node having same basic layout as GetByVal
2387 compileGetByValOnString(node);
2388 break;
2389 }
2390
2391 case StringFromCharCode: {
2392 compileFromCharCode(node);
2393 break;
2394 }
2395
2396 case CheckArray: {
2397 checkArray(node);
2398 break;
2399 }
2400
2401 case Arrayify:
2402 case ArrayifyToStructure: {
2403 arrayify(node);
2404 break;
2405 }
2406
2407 case GetByVal: {
2408 switch (node->arrayMode().type()) {
2409 case Array::SelectUsingPredictions:
2410 case Array::ForceExit:
2411 RELEASE_ASSERT_NOT_REACHED();
2412 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
2413 break;
2414 case Array::Generic: {
2415 JSValueOperand base(this, node->child1());
2416 JSValueOperand property(this, node->child2());
2417 GPRReg baseGPR = base.gpr();
2418 GPRReg propertyGPR = property.gpr();
2419
2420 flushRegisters();
2421 GPRResult result(this);
2422 callOperation(operationGetByVal, result.gpr(), baseGPR, propertyGPR);
2423
2424 jsValueResult(result.gpr(), node);
2425 break;
2426 }
2427 case Array::Int32:
2428 case Array::Contiguous: {
2429 if (node->arrayMode().isInBounds()) {
2430 SpeculateStrictInt32Operand property(this, node->child2());
2431 StorageOperand storage(this, node->child3());
2432
2433 GPRReg propertyReg = property.gpr();
2434 GPRReg storageReg = storage.gpr();
2435
2436 if (!m_compileOkay)
2437 return;
2438
2439 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2440
2441 GPRTemporary result(this);
2442 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), result.gpr());
2443 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchTest64(MacroAssembler::Zero, result.gpr()));
2444 jsValueResult(result.gpr(), node, node->arrayMode().type() == Array::Int32 ? DataFormatJSInteger : DataFormatJS);
2445 break;
2446 }
2447
2448 SpeculateCellOperand base(this, node->child1());
2449 SpeculateStrictInt32Operand property(this, node->child2());
2450 StorageOperand storage(this, node->child3());
2451
2452 GPRReg baseReg = base.gpr();
2453 GPRReg propertyReg = property.gpr();
2454 GPRReg storageReg = storage.gpr();
2455
2456 if (!m_compileOkay)
2457 return;
2458
2459 GPRTemporary result(this);
2460 GPRReg resultReg = result.gpr();
2461
2462 MacroAssembler::JumpList slowCases;
2463
2464 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2465
2466 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), resultReg);
2467 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, resultReg));
2468
2469 addSlowPathGenerator(
2470 slowPathCall(
2471 slowCases, this, operationGetByValArrayInt,
2472 result.gpr(), baseReg, propertyReg));
2473
2474 jsValueResult(resultReg, node);
2475 break;
2476 }
2477
2478 case Array::Double: {
2479 if (node->arrayMode().isInBounds()) {
2480 if (node->arrayMode().isSaneChain()) {
2481 JSGlobalObject* globalObject = m_jit.globalObjectFor(node->codeOrigin);
2482 ASSERT(globalObject->arrayPrototypeChainIsSane());
2483 globalObject->arrayPrototype()->structure()->addTransitionWatchpoint(speculationWatchpoint());
2484 globalObject->objectPrototype()->structure()->addTransitionWatchpoint(speculationWatchpoint());
2485 }
2486
2487 SpeculateStrictInt32Operand property(this, node->child2());
2488 StorageOperand storage(this, node->child3());
2489
2490 GPRReg propertyReg = property.gpr();
2491 GPRReg storageReg = storage.gpr();
2492
2493 if (!m_compileOkay)
2494 return;
2495
2496 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2497
2498 FPRTemporary result(this);
2499 m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), result.fpr());
2500 if (!node->arrayMode().isSaneChain())
2501 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, result.fpr(), result.fpr()));
2502 doubleResult(result.fpr(), node);
2503 break;
2504 }
2505
2506 SpeculateCellOperand base(this, node->child1());
2507 SpeculateStrictInt32Operand property(this, node->child2());
2508 StorageOperand storage(this, node->child3());
2509
2510 GPRReg baseReg = base.gpr();
2511 GPRReg propertyReg = property.gpr();
2512 GPRReg storageReg = storage.gpr();
2513
2514 if (!m_compileOkay)
2515 return;
2516
2517 GPRTemporary result(this);
2518 FPRTemporary temp(this);
2519 GPRReg resultReg = result.gpr();
2520 FPRReg tempReg = temp.fpr();
2521
2522 MacroAssembler::JumpList slowCases;
2523
2524 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2525
2526 m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), tempReg);
2527 slowCases.append(m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, tempReg, tempReg));
2528 boxDouble(tempReg, resultReg);
2529
2530 addSlowPathGenerator(
2531 slowPathCall(
2532 slowCases, this, operationGetByValArrayInt,
2533 result.gpr(), baseReg, propertyReg));
2534
2535 jsValueResult(resultReg, node);
2536 break;
2537 }
2538
2539 case Array::ArrayStorage:
2540 case Array::SlowPutArrayStorage: {
2541 if (node->arrayMode().isInBounds()) {
2542 SpeculateStrictInt32Operand property(this, node->child2());
2543 StorageOperand storage(this, node->child3());
2544
2545 GPRReg propertyReg = property.gpr();
2546 GPRReg storageReg = storage.gpr();
2547
2548 if (!m_compileOkay)
2549 return;
2550
2551 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset())));
2552
2553 GPRTemporary result(this);
2554 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), result.gpr());
2555 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchTest64(MacroAssembler::Zero, result.gpr()));
2556
2557 jsValueResult(result.gpr(), node);
2558 break;
2559 }
2560
2561 SpeculateCellOperand base(this, node->child1());
2562 SpeculateStrictInt32Operand property(this, node->child2());
2563 StorageOperand storage(this, node->child3());
2564
2565 GPRReg baseReg = base.gpr();
2566 GPRReg propertyReg = property.gpr();
2567 GPRReg storageReg = storage.gpr();
2568
2569 if (!m_compileOkay)
2570 return;
2571
2572 GPRTemporary result(this);
2573 GPRReg resultReg = result.gpr();
2574
2575 MacroAssembler::JumpList slowCases;
2576
2577 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset())));
2578
2579 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), resultReg);
2580 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, resultReg));
2581
2582 addSlowPathGenerator(
2583 slowPathCall(
2584 slowCases, this, operationGetByValArrayInt,
2585 result.gpr(), baseReg, propertyReg));
2586
2587 jsValueResult(resultReg, node);
2588 break;
2589 }
2590 case Array::String:
2591 compileGetByValOnString(node);
2592 break;
2593 case Array::Arguments:
2594 compileGetByValOnArguments(node);
2595 break;
2596 case Array::Int8Array:
2597 compileGetByValOnIntTypedArray(m_jit.vm()->int8ArrayDescriptor(), node, sizeof(int8_t), SignedTypedArray);
2598 break;
2599 case Array::Int16Array:
2600 compileGetByValOnIntTypedArray(m_jit.vm()->int16ArrayDescriptor(), node, sizeof(int16_t), SignedTypedArray);
2601 break;
2602 case Array::Int32Array:
2603 compileGetByValOnIntTypedArray(m_jit.vm()->int32ArrayDescriptor(), node, sizeof(int32_t), SignedTypedArray);
2604 break;
2605 case Array::Uint8Array:
2606 compileGetByValOnIntTypedArray(m_jit.vm()->uint8ArrayDescriptor(), node, sizeof(uint8_t), UnsignedTypedArray);
2607 break;
2608 case Array::Uint8ClampedArray:
2609 compileGetByValOnIntTypedArray(m_jit.vm()->uint8ClampedArrayDescriptor(), node, sizeof(uint8_t), UnsignedTypedArray);
2610 break;
2611 case Array::Uint16Array:
2612 compileGetByValOnIntTypedArray(m_jit.vm()->uint16ArrayDescriptor(), node, sizeof(uint16_t), UnsignedTypedArray);
2613 break;
2614 case Array::Uint32Array:
2615 compileGetByValOnIntTypedArray(m_jit.vm()->uint32ArrayDescriptor(), node, sizeof(uint32_t), UnsignedTypedArray);
2616 break;
2617 case Array::Float32Array:
2618 compileGetByValOnFloatTypedArray(m_jit.vm()->float32ArrayDescriptor(), node, sizeof(float));
2619 break;
2620 case Array::Float64Array:
2621 compileGetByValOnFloatTypedArray(m_jit.vm()->float64ArrayDescriptor(), node, sizeof(double));
2622 break;
2623 default:
2624 RELEASE_ASSERT_NOT_REACHED();
2625 break;
2626 }
2627 break;
2628 }
2629
2630 case PutByVal:
2631 case PutByValAlias: {
2632 Edge child1 = m_jit.graph().varArgChild(node, 0);
2633 Edge child2 = m_jit.graph().varArgChild(node, 1);
2634 Edge child3 = m_jit.graph().varArgChild(node, 2);
2635 Edge child4 = m_jit.graph().varArgChild(node, 3);
2636
2637 ArrayMode arrayMode = node->arrayMode().modeForPut();
2638 bool alreadyHandled = false;
2639
2640 switch (arrayMode.type()) {
2641 case Array::SelectUsingPredictions:
2642 case Array::ForceExit:
2643 RELEASE_ASSERT_NOT_REACHED();
2644 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
2645 alreadyHandled = true;
2646 break;
2647 case Array::Generic: {
2648 RELEASE_ASSERT(node->op() == PutByVal);
2649
2650 JSValueOperand arg1(this, child1);
2651 JSValueOperand arg2(this, child2);
2652 JSValueOperand arg3(this, child3);
2653 GPRReg arg1GPR = arg1.gpr();
2654 GPRReg arg2GPR = arg2.gpr();
2655 GPRReg arg3GPR = arg3.gpr();
2656 flushRegisters();
2657
2658 callOperation(m_jit.strictModeFor(node->codeOrigin) ? operationPutByValStrict : operationPutByValNonStrict, arg1GPR, arg2GPR, arg3GPR);
2659
2660 noResult(node);
2661 alreadyHandled = true;
2662 break;
2663 }
2664 default:
2665 break;
2666 }
2667
2668 if (alreadyHandled)
2669 break;
2670
2671 // FIXME: the base may not be necessary for some array access modes. But we have to
2672 // keep it alive to this point, so it's likely to be in a register anyway. Likely
2673 // no harm in locking it here.
2674 SpeculateCellOperand base(this, child1);
2675 SpeculateStrictInt32Operand property(this, child2);
2676
2677 GPRReg baseReg = base.gpr();
2678 GPRReg propertyReg = property.gpr();
2679
2680 switch (arrayMode.type()) {
2681 case Array::Int32:
2682 case Array::Contiguous: {
2683 JSValueOperand value(this, child3, ManualOperandSpeculation);
2684
2685 GPRReg valueReg = value.gpr();
2686
2687 if (!m_compileOkay)
2688 return;
2689
2690 if (arrayMode.type() == Array::Int32) {
2691 DFG_TYPE_CHECK(
2692 JSValueRegs(valueReg), child3, SpecInt32,
2693 m_jit.branch64(
2694 MacroAssembler::Below, valueReg, GPRInfo::tagTypeNumberRegister));
2695 }
2696
2697 if (arrayMode.type() == Array::Contiguous && Heap::isWriteBarrierEnabled()) {
2698 GPRTemporary scratch(this);
2699 writeBarrier(baseReg, value.gpr(), child3, WriteBarrierForPropertyAccess, scratch.gpr());
2700 }
2701
2702 StorageOperand storage(this, child4);
2703 GPRReg storageReg = storage.gpr();
2704
2705 if (node->op() == PutByValAlias) {
2706 // Store the value to the array.
2707 GPRReg propertyReg = property.gpr();
2708 GPRReg valueReg = value.gpr();
2709 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight));
2710
2711 noResult(node);
2712 break;
2713 }
2714
2715 GPRTemporary temporary;
2716 GPRReg temporaryReg = temporaryRegisterForPutByVal(temporary, node);
2717
2718 MacroAssembler::Jump slowCase;
2719
2720 if (arrayMode.isInBounds()) {
2721 speculationCheck(
2722 StoreToHoleOrOutOfBounds, JSValueRegs(), 0,
2723 m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())));
2724 } else {
2725 MacroAssembler::Jump inBounds = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()));
2726
2727 slowCase = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfVectorLength()));
2728
2729 if (!arrayMode.isOutOfBounds())
2730 speculationCheck(OutOfBounds, JSValueRegs(), 0, slowCase);
2731
2732 m_jit.add32(TrustedImm32(1), propertyReg, temporaryReg);
2733 m_jit.store32(temporaryReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()));
2734
2735 inBounds.link(&m_jit);
2736 }
2737
2738 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight));
2739
2740 base.use();
2741 property.use();
2742 value.use();
2743 storage.use();
2744
2745 if (arrayMode.isOutOfBounds()) {
2746 addSlowPathGenerator(
2747 slowPathCall(
2748 slowCase, this,
2749 m_jit.codeBlock()->isStrictMode() ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict,
2750 NoResult, baseReg, propertyReg, valueReg));
2751 }
2752
2753 noResult(node, UseChildrenCalledExplicitly);
2754 break;
2755 }
2756
2757 case Array::Double: {
2758 compileDoublePutByVal(node, base, property);
2759 break;
2760 }
2761
2762 case Array::ArrayStorage:
2763 case Array::SlowPutArrayStorage: {
2764 JSValueOperand value(this, child3);
2765
2766 GPRReg valueReg = value.gpr();
2767
2768 if (!m_compileOkay)
2769 return;
2770
2771 if (Heap::isWriteBarrierEnabled()) {
2772 GPRTemporary scratch(this);
2773 writeBarrier(baseReg, value.gpr(), child3, WriteBarrierForPropertyAccess, scratch.gpr());
2774 }
2775
2776 StorageOperand storage(this, child4);
2777 GPRReg storageReg = storage.gpr();
2778
2779 if (node->op() == PutByValAlias) {
2780 // Store the value to the array.
2781 GPRReg propertyReg = property.gpr();
2782 GPRReg valueReg = value.gpr();
2783 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
2784
2785 noResult(node);
2786 break;
2787 }
2788
2789 GPRTemporary temporary;
2790 GPRReg temporaryReg = temporaryRegisterForPutByVal(temporary, node);
2791
2792 MacroAssembler::JumpList slowCases;
2793
2794 MacroAssembler::Jump beyondArrayBounds = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset()));
2795 if (!arrayMode.isOutOfBounds())
2796 speculationCheck(OutOfBounds, JSValueRegs(), 0, beyondArrayBounds);
2797 else
2798 slowCases.append(beyondArrayBounds);
2799
2800 // Check if we're writing to a hole; if so increment m_numValuesInVector.
2801 if (arrayMode.isInBounds()) {
2802 speculationCheck(
2803 StoreToHole, JSValueRegs(), 0,
2804 m_jit.branchTest64(MacroAssembler::Zero, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))));
2805 } else {
2806 MacroAssembler::Jump notHoleValue = m_jit.branchTest64(MacroAssembler::NonZero, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
2807 if (arrayMode.isSlowPut()) {
2808 // This is sort of strange. If we wanted to optimize this code path, we would invert
2809 // the above branch. But it's simply not worth it since this only happens if we're
2810 // already having a bad time.
2811 slowCases.append(m_jit.jump());
2812 } else {
2813 m_jit.add32(TrustedImm32(1), MacroAssembler::Address(storageReg, ArrayStorage::numValuesInVectorOffset()));
2814
2815 // If we're writing to a hole we might be growing the array;
2816 MacroAssembler::Jump lengthDoesNotNeedUpdate = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset()));
2817 m_jit.add32(TrustedImm32(1), propertyReg, temporaryReg);
2818 m_jit.store32(temporaryReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset()));
2819
2820 lengthDoesNotNeedUpdate.link(&m_jit);
2821 }
2822 notHoleValue.link(&m_jit);
2823 }
2824
2825 // Store the value to the array.
2826 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
2827
2828 base.use();
2829 property.use();
2830 value.use();
2831 storage.use();
2832
2833 if (!slowCases.empty()) {
2834 addSlowPathGenerator(
2835 slowPathCall(
2836 slowCases, this,
2837 m_jit.codeBlock()->isStrictMode() ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict,
2838 NoResult, baseReg, propertyReg, valueReg));
2839 }
2840
2841 noResult(node, UseChildrenCalledExplicitly);
2842 break;
2843 }
2844
2845 case Array::Arguments: {
2846 JSValueOperand value(this, child3);
2847 GPRTemporary scratch(this);
2848 GPRTemporary scratch2(this);
2849
2850 GPRReg valueReg = value.gpr();
2851 GPRReg scratchReg = scratch.gpr();
2852 GPRReg scratch2Reg = scratch2.gpr();
2853
2854 if (!m_compileOkay)
2855 return;
2856
2857 // Two really lame checks.
2858 speculationCheck(
2859 Uncountable, JSValueSource(), 0,
2860 m_jit.branch32(
2861 MacroAssembler::AboveOrEqual, propertyReg,
2862 MacroAssembler::Address(baseReg, OBJECT_OFFSETOF(Arguments, m_numArguments))));
2863 speculationCheck(
2864 Uncountable, JSValueSource(), 0,
2865 m_jit.branchTestPtr(
2866 MacroAssembler::NonZero,
2867 MacroAssembler::Address(
2868 baseReg, OBJECT_OFFSETOF(Arguments, m_slowArguments))));
2869
2870 m_jit.move(propertyReg, scratch2Reg);
2871 m_jit.neg32(scratch2Reg);
2872 m_jit.signExtend32ToPtr(scratch2Reg, scratch2Reg);
2873 m_jit.loadPtr(
2874 MacroAssembler::Address(baseReg, OBJECT_OFFSETOF(Arguments, m_registers)),
2875 scratchReg);
2876
2877 m_jit.store64(
2878 valueReg,
2879 MacroAssembler::BaseIndex(
2880 scratchReg, scratch2Reg, MacroAssembler::TimesEight,
2881 CallFrame::thisArgumentOffset() * sizeof(Register) - sizeof(Register)));
2882
2883 noResult(node);
2884 break;
2885 }
2886
2887 case Array::Int8Array:
2888 compilePutByValForIntTypedArray(m_jit.vm()->int8ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int8_t), SignedTypedArray);
2889 break;
2890
2891 case Array::Int16Array:
2892 compilePutByValForIntTypedArray(m_jit.vm()->int16ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int16_t), SignedTypedArray);
2893 break;
2894
2895 case Array::Int32Array:
2896 compilePutByValForIntTypedArray(m_jit.vm()->int32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int32_t), SignedTypedArray);
2897 break;
2898
2899 case Array::Uint8Array:
2900 compilePutByValForIntTypedArray(m_jit.vm()->uint8ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint8_t), UnsignedTypedArray);
2901 break;
2902
2903 case Array::Uint8ClampedArray:
2904 compilePutByValForIntTypedArray(m_jit.vm()->uint8ClampedArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint8_t), UnsignedTypedArray, ClampRounding);
2905 break;
2906
2907 case Array::Uint16Array:
2908 compilePutByValForIntTypedArray(m_jit.vm()->uint16ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint16_t), UnsignedTypedArray);
2909 break;
2910
2911 case Array::Uint32Array:
2912 compilePutByValForIntTypedArray(m_jit.vm()->uint32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint32_t), UnsignedTypedArray);
2913 break;
2914
2915 case Array::Float32Array:
2916 compilePutByValForFloatTypedArray(m_jit.vm()->float32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(float));
2917 break;
2918
2919 case Array::Float64Array:
2920 compilePutByValForFloatTypedArray(m_jit.vm()->float64ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(double));
2921 break;
2922
2923 default:
2924 RELEASE_ASSERT_NOT_REACHED();
2925 break;
2926 }
2927
2928 break;
2929 }
2930
2931 case RegExpExec: {
2932 if (compileRegExpExec(node))
2933 return;
2934 if (!node->adjustedRefCount()) {
2935 SpeculateCellOperand base(this, node->child1());
2936 SpeculateCellOperand argument(this, node->child2());
2937 GPRReg baseGPR = base.gpr();
2938 GPRReg argumentGPR = argument.gpr();
2939
2940 flushRegisters();
2941 GPRResult result(this);
2942 callOperation(operationRegExpTest, result.gpr(), baseGPR, argumentGPR);
2943
2944 // Must use jsValueResult because otherwise we screw up register
2945 // allocation, which thinks that this node has a result.
2946 jsValueResult(result.gpr(), node);
2947 break;
2948 }
2949
2950 SpeculateCellOperand base(this, node->child1());
2951 SpeculateCellOperand argument(this, node->child2());
2952 GPRReg baseGPR = base.gpr();
2953 GPRReg argumentGPR = argument.gpr();
2954
2955 flushRegisters();
2956 GPRResult result(this);
2957 callOperation(operationRegExpExec, result.gpr(), baseGPR, argumentGPR);
2958
2959 jsValueResult(result.gpr(), node);
2960 break;
2961 }
2962
2963 case RegExpTest: {
2964 SpeculateCellOperand base(this, node->child1());
2965 SpeculateCellOperand argument(this, node->child2());
2966 GPRReg baseGPR = base.gpr();
2967 GPRReg argumentGPR = argument.gpr();
2968
2969 flushRegisters();
2970 GPRResult result(this);
2971 callOperation(operationRegExpTest, result.gpr(), baseGPR, argumentGPR);
2972
2973 // If we add a DataFormatBool, we should use it here.
2974 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
2975 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
2976 break;
2977 }
2978
2979 case ArrayPush: {
2980 ASSERT(node->arrayMode().isJSArray());
2981
2982 SpeculateCellOperand base(this, node->child1());
2983 GPRTemporary storageLength(this);
2984
2985 GPRReg baseGPR = base.gpr();
2986 GPRReg storageLengthGPR = storageLength.gpr();
2987
2988 StorageOperand storage(this, node->child3());
2989 GPRReg storageGPR = storage.gpr();
2990
2991 switch (node->arrayMode().type()) {
2992 case Array::Int32:
2993 case Array::Contiguous: {
2994 JSValueOperand value(this, node->child2(), ManualOperandSpeculation);
2995 GPRReg valueGPR = value.gpr();
2996
2997 if (node->arrayMode().type() == Array::Int32) {
2998 DFG_TYPE_CHECK(
2999 JSValueRegs(valueGPR), node->child2(), SpecInt32,
3000 m_jit.branch64(
3001 MacroAssembler::Below, valueGPR, GPRInfo::tagTypeNumberRegister));
3002 }
3003
3004 if (node->arrayMode().type() != Array::Int32 && Heap::isWriteBarrierEnabled()) {
3005 GPRTemporary scratch(this);
3006 writeBarrier(baseGPR, valueGPR, node->child2(), WriteBarrierForPropertyAccess, scratch.gpr(), storageLengthGPR);
3007 }
3008
3009 m_jit.load32(MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR);
3010 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength()));
3011 m_jit.store64(valueGPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3012 m_jit.add32(TrustedImm32(1), storageLengthGPR);
3013 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3014 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR);
3015
3016 addSlowPathGenerator(
3017 slowPathCall(
3018 slowPath, this, operationArrayPush, NoResult, storageLengthGPR,
3019 valueGPR, baseGPR));
3020
3021 jsValueResult(storageLengthGPR, node);
3022 break;
3023 }
3024
3025 case Array::Double: {
3026 SpeculateDoubleOperand value(this, node->child2());
3027 FPRReg valueFPR = value.fpr();
3028
3029 DFG_TYPE_CHECK(
3030 JSValueRegs(), node->child2(), SpecRealNumber,
3031 m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, valueFPR, valueFPR));
3032
3033 m_jit.load32(MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR);
3034 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength()));
3035 m_jit.storeDouble(valueFPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3036 m_jit.add32(TrustedImm32(1), storageLengthGPR);
3037 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3038 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR);
3039
3040 addSlowPathGenerator(
3041 slowPathCall(
3042 slowPath, this, operationArrayPushDouble, NoResult, storageLengthGPR,
3043 valueFPR, baseGPR));
3044
3045 jsValueResult(storageLengthGPR, node);
3046 break;
3047 }
3048
3049 case Array::ArrayStorage: {
3050 JSValueOperand value(this, node->child2());
3051 GPRReg valueGPR = value.gpr();
3052
3053 if (Heap::isWriteBarrierEnabled()) {
3054 GPRTemporary scratch(this);
3055 writeBarrier(baseGPR, valueGPR, node->child2(), WriteBarrierForPropertyAccess, scratch.gpr(), storageLengthGPR);
3056 }
3057
3058 m_jit.load32(MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()), storageLengthGPR);
3059
3060 // Refuse to handle bizarre lengths.
3061 speculationCheck(Uncountable, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::Above, storageLengthGPR, TrustedImm32(0x7ffffffe)));
3062
3063 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset()));
3064
3065 m_jit.store64(valueGPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
3066
3067 m_jit.add32(TrustedImm32(1), storageLengthGPR);
3068 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()));
3069 m_jit.add32(TrustedImm32(1), MacroAssembler::Address(storageGPR, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
3070 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR);
3071
3072 addSlowPathGenerator(
3073 slowPathCall(
3074 slowPath, this, operationArrayPush, NoResult, storageLengthGPR,
3075 valueGPR, baseGPR));
3076
3077 jsValueResult(storageLengthGPR, node);
3078 break;
3079 }
3080
3081 default:
3082 CRASH();
3083 break;
3084 }
3085 break;
3086 }
3087
3088 case ArrayPop: {
3089 ASSERT(node->arrayMode().isJSArray());
3090
3091 SpeculateCellOperand base(this, node->child1());
3092 StorageOperand storage(this, node->child2());
3093 GPRTemporary value(this);
3094 GPRTemporary storageLength(this);
3095 FPRTemporary temp(this); // This is kind of lame, since we don't always need it. I'm relying on the fact that we don't have FPR pressure, especially in code that uses pop().
3096
3097 GPRReg baseGPR = base.gpr();
3098 GPRReg storageGPR = storage.gpr();
3099 GPRReg valueGPR = value.gpr();
3100 GPRReg storageLengthGPR = storageLength.gpr();
3101 FPRReg tempFPR = temp.fpr();
3102
3103 switch (node->arrayMode().type()) {
3104 case Array::Int32:
3105 case Array::Double:
3106 case Array::Contiguous: {
3107 m_jit.load32(
3108 MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR);
3109 MacroAssembler::Jump undefinedCase =
3110 m_jit.branchTest32(MacroAssembler::Zero, storageLengthGPR);
3111 m_jit.sub32(TrustedImm32(1), storageLengthGPR);
3112 m_jit.store32(
3113 storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3114 MacroAssembler::Jump slowCase;
3115 if (node->arrayMode().type() == Array::Double) {
3116 m_jit.loadDouble(
3117 MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight),
3118 tempFPR);
3119 // FIXME: This would not have to be here if changing the publicLength also zeroed the values between the old
3120 // length and the new length.
3121 m_jit.store64(
3122 MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3123 slowCase = m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, tempFPR, tempFPR);
3124 boxDouble(tempFPR, valueGPR);
3125 } else {
3126 m_jit.load64(
3127 MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight),
3128 valueGPR);
3129 // FIXME: This would not have to be here if changing the publicLength also zeroed the values between the old
3130 // length and the new length.
3131 m_jit.store64(
3132 MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight));
3133 slowCase = m_jit.branchTest64(MacroAssembler::Zero, valueGPR);
3134 }
3135
3136 addSlowPathGenerator(
3137 slowPathMove(
3138 undefinedCase, this,
3139 MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined())), valueGPR));
3140 addSlowPathGenerator(
3141 slowPathCall(
3142 slowCase, this, operationArrayPopAndRecoverLength, valueGPR, baseGPR));
3143
3144 // We can't know for sure that the result is an int because of the slow paths. :-/
3145 jsValueResult(valueGPR, node);
3146 break;
3147 }
3148
3149 case Array::ArrayStorage: {
3150 m_jit.load32(MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()), storageLengthGPR);
3151
3152 JITCompiler::Jump undefinedCase =
3153 m_jit.branchTest32(MacroAssembler::Zero, storageLengthGPR);
3154
3155 m_jit.sub32(TrustedImm32(1), storageLengthGPR);
3156
3157 JITCompiler::JumpList slowCases;
3158 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset())));
3159
3160 m_jit.load64(MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), valueGPR);
3161 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, valueGPR));
3162
3163 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()));
3164
3165 m_jit.store64(MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
3166 m_jit.sub32(MacroAssembler::TrustedImm32(1), MacroAssembler::Address(storageGPR, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
3167
3168 addSlowPathGenerator(
3169 slowPathMove(
3170 undefinedCase, this,
3171 MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined())), valueGPR));
3172
3173 addSlowPathGenerator(
3174 slowPathCall(
3175 slowCases, this, operationArrayPop, valueGPR, baseGPR));
3176
3177 jsValueResult(valueGPR, node);
3178 break;
3179 }
3180
3181 default:
3182 CRASH();
3183 break;
3184 }
3185 break;
3186 }
3187
3188 case DFG::Jump: {
3189 BlockIndex taken = node->takenBlockIndex();
3190 jump(taken);
3191 noResult(node);
3192 break;
3193 }
3194
3195 case Branch:
3196 emitBranch(node);
3197 break;
3198
3199 case Return: {
3200 ASSERT(GPRInfo::callFrameRegister != GPRInfo::regT1);
3201 ASSERT(GPRInfo::regT1 != GPRInfo::returnValueGPR);
3202 ASSERT(GPRInfo::returnValueGPR != GPRInfo::callFrameRegister);
3203
3204 #if DFG_ENABLE(SUCCESS_STATS)
3205 static SamplingCounter counter("SpeculativeJIT");
3206 m_jit.emitCount(counter);
3207 #endif
3208
3209 // Return the result in returnValueGPR.
3210 JSValueOperand op1(this, node->child1());
3211 m_jit.move(op1.gpr(), GPRInfo::returnValueGPR);
3212
3213 // Grab the return address.
3214 m_jit.emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, GPRInfo::regT1);
3215 // Restore our caller's "r".
3216 m_jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, GPRInfo::callFrameRegister);
3217 // Return.
3218 m_jit.restoreReturnAddressBeforeReturn(GPRInfo::regT1);
3219 m_jit.ret();
3220
3221 noResult(node);
3222 break;
3223 }
3224
3225 case Throw:
3226 case ThrowReferenceError: {
3227 // We expect that throw statements are rare and are intended to exit the code block
3228 // anyway, so we just OSR back to the old JIT for now.
3229 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0);
3230 break;
3231 }
3232
3233 case ToPrimitive: {
3234 RELEASE_ASSERT(node->child1().useKind() == UntypedUse);
3235 JSValueOperand op1(this, node->child1());
3236 GPRTemporary result(this, op1);
3237
3238 GPRReg op1GPR = op1.gpr();
3239 GPRReg resultGPR = result.gpr();
3240
3241 op1.use();
3242
3243 if (!(m_state.forNode(node->child1()).m_type & ~(SpecNumber | SpecBoolean)))
3244 m_jit.move(op1GPR, resultGPR);
3245 else {
3246 MacroAssembler::Jump alreadyPrimitive = m_jit.branchTest64(MacroAssembler::NonZero, op1GPR, GPRInfo::tagMaskRegister);
3247 MacroAssembler::Jump notPrimitive = m_jit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(op1GPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()));
3248
3249 alreadyPrimitive.link(&m_jit);
3250 m_jit.move(op1GPR, resultGPR);
3251
3252 addSlowPathGenerator(
3253 slowPathCall(notPrimitive, this, operationToPrimitive, resultGPR, op1GPR));
3254 }
3255
3256 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3257 break;
3258 }
3259
3260 case ToString: {
3261 if (node->child1().useKind() == UntypedUse) {
3262 JSValueOperand op1(this, node->child1());
3263 GPRReg op1GPR = op1.gpr();
3264
3265 GPRResult result(this);
3266 GPRReg resultGPR = result.gpr();
3267
3268 flushRegisters();
3269
3270 JITCompiler::Jump done;
3271 if (node->child1()->prediction() & SpecString) {
3272 JITCompiler::Jump slowPath1 = m_jit.branchTest64(
3273 JITCompiler::NonZero, op1GPR, GPRInfo::tagMaskRegister);
3274 JITCompiler::Jump slowPath2 = m_jit.branchPtr(
3275 JITCompiler::NotEqual,
3276 JITCompiler::Address(op1GPR, JSCell::structureOffset()),
3277 TrustedImmPtr(m_jit.vm()->stringStructure.get()));
3278 m_jit.move(op1GPR, resultGPR);
3279 done = m_jit.jump();
3280 slowPath1.link(&m_jit);
3281 slowPath2.link(&m_jit);
3282 }
3283 callOperation(operationToString, resultGPR, op1GPR);
3284 if (done.isSet())
3285 done.link(&m_jit);
3286 cellResult(resultGPR, node);
3287 break;
3288 }
3289
3290 compileToStringOnCell(node);
3291 break;
3292 }
3293
3294 case NewStringObject: {
3295 compileNewStringObject(node);
3296 break;
3297 }
3298
3299 case NewArray: {
3300 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin);
3301 if (!globalObject->isHavingABadTime() && !hasArrayStorage(node->indexingType())) {
3302 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint());
3303
3304 Structure* structure = globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType());
3305 RELEASE_ASSERT(structure->indexingType() == node->indexingType());
3306 ASSERT(
3307 hasUndecided(structure->indexingType())
3308 || hasInt32(structure->indexingType())
3309 || hasDouble(structure->indexingType())
3310 || hasContiguous(structure->indexingType()));
3311
3312 unsigned numElements = node->numChildren();
3313
3314 GPRTemporary result(this);
3315 GPRTemporary storage(this);
3316
3317 GPRReg resultGPR = result.gpr();
3318 GPRReg storageGPR = storage.gpr();
3319
3320 emitAllocateJSArray(resultGPR, structure, storageGPR, numElements);
3321
3322 // At this point, one way or another, resultGPR and storageGPR have pointers to
3323 // the JSArray and the Butterfly, respectively.
3324
3325 ASSERT(!hasUndecided(structure->indexingType()) || !node->numChildren());
3326
3327 for (unsigned operandIdx = 0; operandIdx < node->numChildren(); ++operandIdx) {
3328 Edge use = m_jit.graph().m_varArgChildren[node->firstChild() + operandIdx];
3329 switch (node->indexingType()) {
3330 case ALL_BLANK_INDEXING_TYPES:
3331 case ALL_UNDECIDED_INDEXING_TYPES:
3332 CRASH();
3333 break;
3334 case ALL_DOUBLE_INDEXING_TYPES: {
3335 SpeculateDoubleOperand operand(this, use);
3336 FPRReg opFPR = operand.fpr();
3337 DFG_TYPE_CHECK(
3338 JSValueRegs(), use, SpecRealNumber,
3339 m_jit.branchDouble(
3340 MacroAssembler::DoubleNotEqualOrUnordered, opFPR, opFPR));
3341 m_jit.storeDouble(opFPR, MacroAssembler::Address(storageGPR, sizeof(double) * operandIdx));
3342 break;
3343 }
3344 case ALL_INT32_INDEXING_TYPES:
3345 case ALL_CONTIGUOUS_INDEXING_TYPES: {
3346 JSValueOperand operand(this, use, ManualOperandSpeculation);
3347 GPRReg opGPR = operand.gpr();
3348 if (hasInt32(node->indexingType())) {
3349 DFG_TYPE_CHECK(
3350 JSValueRegs(opGPR), use, SpecInt32,
3351 m_jit.branch64(
3352 MacroAssembler::Below, opGPR, GPRInfo::tagTypeNumberRegister));
3353 }
3354 m_jit.store64(opGPR, MacroAssembler::Address(storageGPR, sizeof(JSValue) * operandIdx));
3355 break;
3356 }
3357 default:
3358 CRASH();
3359 break;
3360 }
3361 }
3362
3363 // Yuck, we should *really* have a way of also returning the storageGPR. But
3364 // that's the least of what's wrong with this code. We really shouldn't be
3365 // allocating the array after having computed - and probably spilled to the
3366 // stack - all of the things that will go into the array. The solution to that
3367 // bigger problem will also likely fix the redundancy in reloading the storage
3368 // pointer that we currently have.
3369
3370 cellResult(resultGPR, node);
3371 break;
3372 }
3373
3374 if (!node->numChildren()) {
3375 flushRegisters();
3376 GPRResult result(this);
3377 callOperation(operationNewEmptyArray, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()));
3378 cellResult(result.gpr(), node);
3379 break;
3380 }
3381
3382 size_t scratchSize = sizeof(EncodedJSValue) * node->numChildren();
3383 ScratchBuffer* scratchBuffer = m_jit.vm()->scratchBufferForSize(scratchSize);
3384 EncodedJSValue* buffer = scratchBuffer ? static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) : 0;
3385
3386 for (unsigned operandIdx = 0; operandIdx < node->numChildren(); ++operandIdx) {
3387 // Need to perform the speculations that this node promises to perform. If we're
3388 // emitting code here and the indexing type is not array storage then there is
3389 // probably something hilarious going on and we're already failing at all the
3390 // things, but at least we're going to be sound.
3391 Edge use = m_jit.graph().m_varArgChildren[node->firstChild() + operandIdx];
3392 switch (node->indexingType()) {
3393 case ALL_BLANK_INDEXING_TYPES:
3394 case ALL_UNDECIDED_INDEXING_TYPES:
3395 CRASH();
3396 break;
3397 case ALL_DOUBLE_INDEXING_TYPES: {
3398 SpeculateDoubleOperand operand(this, use);
3399 GPRTemporary scratch(this);
3400 FPRReg opFPR = operand.fpr();
3401 GPRReg scratchGPR = scratch.gpr();
3402 DFG_TYPE_CHECK(
3403 JSValueRegs(), use, SpecRealNumber,
3404 m_jit.branchDouble(
3405 MacroAssembler::DoubleNotEqualOrUnordered, opFPR, opFPR));
3406 m_jit.boxDouble(opFPR, scratchGPR);
3407 m_jit.store64(scratchGPR, buffer + operandIdx);
3408 break;
3409 }
3410 case ALL_INT32_INDEXING_TYPES: {
3411 JSValueOperand operand(this, use, ManualOperandSpeculation);
3412 GPRReg opGPR = operand.gpr();
3413 if (hasInt32(node->indexingType())) {
3414 DFG_TYPE_CHECK(
3415 JSValueRegs(opGPR), use, SpecInt32,
3416 m_jit.branch64(
3417 MacroAssembler::Below, opGPR, GPRInfo::tagTypeNumberRegister));
3418 }
3419 m_jit.store64(opGPR, buffer + operandIdx);
3420 break;
3421 }
3422 case ALL_CONTIGUOUS_INDEXING_TYPES:
3423 case ALL_ARRAY_STORAGE_INDEXING_TYPES: {
3424 JSValueOperand operand(this, use);
3425 GPRReg opGPR = operand.gpr();
3426 m_jit.store64(opGPR, buffer + operandIdx);
3427 operand.use();
3428 break;
3429 }
3430 default:
3431 CRASH();
3432 break;
3433 }
3434 }
3435
3436 switch (node->indexingType()) {
3437 case ALL_DOUBLE_INDEXING_TYPES:
3438 case ALL_INT32_INDEXING_TYPES:
3439 useChildren(node);
3440 break;
3441 default:
3442 break;
3443 }
3444
3445 flushRegisters();
3446
3447 if (scratchSize) {
3448 GPRTemporary scratch(this);
3449
3450 // Tell GC mark phase how much of the scratch buffer is active during call.
3451 m_jit.move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), scratch.gpr());
3452 m_jit.storePtr(TrustedImmPtr(scratchSize), scratch.gpr());
3453 }
3454
3455 GPRResult result(this);
3456
3457 callOperation(
3458 operationNewArray, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()),
3459 static_cast<void*>(buffer), node->numChildren());
3460
3461 if (scratchSize) {
3462 GPRTemporary scratch(this);
3463
3464 m_jit.move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), scratch.gpr());
3465 m_jit.storePtr(TrustedImmPtr(0), scratch.gpr());
3466 }
3467
3468 cellResult(result.gpr(), node, UseChildrenCalledExplicitly);
3469 break;
3470 }
3471
3472 case NewArrayWithSize: {
3473 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin);
3474 if (!globalObject->isHavingABadTime() && !hasArrayStorage(node->indexingType())) {
3475 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint());
3476
3477 SpeculateStrictInt32Operand size(this, node->child1());
3478 GPRTemporary result(this);
3479 GPRTemporary storage(this);
3480 GPRTemporary scratch(this);
3481 GPRTemporary scratch2(this);
3482
3483 GPRReg sizeGPR = size.gpr();
3484 GPRReg resultGPR = result.gpr();
3485 GPRReg storageGPR = storage.gpr();
3486 GPRReg scratchGPR = scratch.gpr();
3487 GPRReg scratch2GPR = scratch2.gpr();
3488
3489 MacroAssembler::JumpList slowCases;
3490 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, sizeGPR, TrustedImm32(MIN_SPARSE_ARRAY_INDEX)));
3491
3492 ASSERT((1 << 3) == sizeof(JSValue));
3493 m_jit.move(sizeGPR, scratchGPR);
3494 m_jit.lshift32(TrustedImm32(3), scratchGPR);
3495 m_jit.add32(TrustedImm32(sizeof(IndexingHeader)), scratchGPR, resultGPR);
3496 slowCases.append(
3497 emitAllocateBasicStorage(resultGPR, storageGPR));
3498 m_jit.subPtr(scratchGPR, storageGPR);
3499 Structure* structure = globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType());
3500 emitAllocateJSObject<JSArray>(resultGPR, ImmPtr(structure), storageGPR, scratchGPR, scratch2GPR, slowCases);
3501
3502 m_jit.store32(sizeGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()));
3503 m_jit.store32(sizeGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength()));
3504
3505 if (hasDouble(node->indexingType())) {
3506 m_jit.move(TrustedImm64(bitwise_cast<int64_t>(QNaN)), scratchGPR);
3507 m_jit.move(sizeGPR, scratch2GPR);
3508 MacroAssembler::Jump done = m_jit.branchTest32(MacroAssembler::Zero, scratch2GPR);
3509 MacroAssembler::Label loop = m_jit.label();
3510 m_jit.sub32(TrustedImm32(1), scratch2GPR);
3511 m_jit.store64(scratchGPR, MacroAssembler::BaseIndex(storageGPR, scratch2GPR, MacroAssembler::TimesEight));
3512 m_jit.branchTest32(MacroAssembler::NonZero, scratch2GPR).linkTo(loop, &m_jit);
3513 done.link(&m_jit);
3514 }
3515
3516 addSlowPathGenerator(adoptPtr(
3517 new CallArrayAllocatorWithVariableSizeSlowPathGenerator(
3518 slowCases, this, operationNewArrayWithSize, resultGPR,
3519 globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()),
3520 globalObject->arrayStructureForIndexingTypeDuringAllocation(ArrayWithArrayStorage),
3521 sizeGPR)));
3522
3523 cellResult(resultGPR, node);
3524 break;
3525 }
3526
3527 SpeculateStrictInt32Operand size(this, node->child1());
3528 GPRReg sizeGPR = size.gpr();
3529 flushRegisters();
3530 GPRResult result(this);
3531 GPRReg resultGPR = result.gpr();
3532 GPRReg structureGPR = selectScratchGPR(sizeGPR);
3533 MacroAssembler::Jump bigLength = m_jit.branch32(MacroAssembler::AboveOrEqual, sizeGPR, TrustedImm32(MIN_SPARSE_ARRAY_INDEX));
3534 m_jit.move(TrustedImmPtr(globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType())), structureGPR);
3535 MacroAssembler::Jump done = m_jit.jump();
3536 bigLength.link(&m_jit);
3537 m_jit.move(TrustedImmPtr(globalObject->arrayStructureForIndexingTypeDuringAllocation(ArrayWithArrayStorage)), structureGPR);
3538 done.link(&m_jit);
3539 callOperation(operationNewArrayWithSize, resultGPR, structureGPR, sizeGPR);
3540 cellResult(resultGPR, node);
3541 break;
3542 }
3543
3544 case NewArrayBuffer: {
3545 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin);
3546 IndexingType indexingType = node->indexingType();
3547 if (!globalObject->isHavingABadTime() && !hasArrayStorage(indexingType)) {
3548 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint());
3549
3550 unsigned numElements = node->numConstants();
3551
3552 GPRTemporary result(this);
3553 GPRTemporary storage(this);
3554
3555 GPRReg resultGPR = result.gpr();
3556 GPRReg storageGPR = storage.gpr();
3557
3558 emitAllocateJSArray(resultGPR, globalObject->arrayStructureForIndexingTypeDuringAllocation(indexingType), storageGPR, numElements);
3559
3560 RELEASE_ASSERT(indexingType & IsArray);
3561 JSValue* data = m_jit.codeBlock()->constantBuffer(node->startConstant());
3562 if (indexingType == ArrayWithDouble) {
3563 for (unsigned index = 0; index < node->numConstants(); ++index) {
3564 double value = data[index].asNumber();
3565 m_jit.store64(
3566 Imm64(bitwise_cast<int64_t>(value)),
3567 MacroAssembler::Address(storageGPR, sizeof(double) * index));
3568 }
3569 } else {
3570 for (unsigned index = 0; index < node->numConstants(); ++index) {
3571 m_jit.store64(
3572 Imm64(JSValue::encode(data[index])),
3573 MacroAssembler::Address(storageGPR, sizeof(JSValue) * index));
3574 }
3575 }
3576
3577 cellResult(resultGPR, node);
3578 break;
3579 }
3580
3581 flushRegisters();
3582 GPRResult result(this);
3583
3584 callOperation(operationNewArrayBuffer, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()), node->startConstant(), node->numConstants());
3585
3586 cellResult(result.gpr(), node);
3587 break;
3588 }
3589
3590 case NewRegexp: {
3591 flushRegisters();
3592 GPRResult result(this);
3593
3594 callOperation(operationNewRegexp, result.gpr(), m_jit.codeBlock()->regexp(node->regexpIndex()));
3595
3596 cellResult(result.gpr(), node);
3597 break;
3598 }
3599
3600 case ConvertThis: {
3601 ASSERT(node->child1().useKind() == UntypedUse);
3602 JSValueOperand thisValue(this, node->child1());
3603 GPRReg thisValueGPR = thisValue.gpr();
3604
3605 flushRegisters();
3606
3607 GPRResult result(this);
3608 callOperation(operationConvertThis, result.gpr(), thisValueGPR);
3609
3610 cellResult(result.gpr(), node);
3611 break;
3612 }
3613
3614 case CreateThis: {
3615 // Note that there is not so much profit to speculate here. The only things we
3616 // speculate on are (1) that it's a cell, since that eliminates cell checks
3617 // later if the proto is reused, and (2) if we have a FinalObject prediction
3618 // then we speculate because we want to get recompiled if it isn't (since
3619 // otherwise we'd start taking slow path a lot).
3620
3621 SpeculateCellOperand callee(this, node->child1());
3622 GPRTemporary result(this);
3623 GPRTemporary allocator(this);
3624 GPRTemporary structure(this);
3625 GPRTemporary scratch(this);
3626
3627 GPRReg calleeGPR = callee.gpr();
3628 GPRReg resultGPR = result.gpr();
3629 GPRReg allocatorGPR = allocator.gpr();
3630 GPRReg structureGPR = structure.gpr();
3631 GPRReg scratchGPR = scratch.gpr();
3632
3633 MacroAssembler::JumpList slowPath;
3634
3635 m_jit.loadPtr(JITCompiler::Address(calleeGPR, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorGPR);
3636 m_jit.loadPtr(JITCompiler::Address(calleeGPR, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureGPR);
3637 slowPath.append(m_jit.branchTestPtr(MacroAssembler::Zero, allocatorGPR));
3638 emitAllocateJSObject(resultGPR, allocatorGPR, structureGPR, TrustedImmPtr(0), scratchGPR, slowPath);
3639
3640 addSlowPathGenerator(slowPathCall(slowPath, this, operationCreateThis, resultGPR, calleeGPR, node->inlineCapacity()));
3641
3642 cellResult(resultGPR, node);
3643 break;
3644 }
3645
3646 case AllocationProfileWatchpoint: {
3647 jsCast<JSFunction*>(node->function())->addAllocationProfileWatchpoint(speculationWatchpoint());
3648 noResult(node);
3649 break;
3650 }
3651
3652 case NewObject: {
3653 GPRTemporary result(this);
3654 GPRTemporary allocator(this);
3655 GPRTemporary scratch(this);
3656
3657 GPRReg resultGPR = result.gpr();
3658 GPRReg allocatorGPR = allocator.gpr();
3659 GPRReg scratchGPR = scratch.gpr();
3660
3661 MacroAssembler::JumpList slowPath;
3662
3663 Structure* structure = node->structure();
3664 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity());
3665 MarkedAllocator* allocatorPtr = &m_jit.vm()->heap.allocatorForObjectWithoutDestructor(allocationSize);
3666
3667 m_jit.move(TrustedImmPtr(allocatorPtr), allocatorGPR);
3668 emitAllocateJSObject(resultGPR, allocatorGPR, TrustedImmPtr(structure), TrustedImmPtr(0), scratchGPR, slowPath);
3669
3670 addSlowPathGenerator(slowPathCall(slowPath, this, operationNewObject, resultGPR, structure));
3671
3672 cellResult(resultGPR, node);
3673 break;
3674 }
3675
3676 case GetCallee: {
3677 GPRTemporary result(this);
3678 m_jit.loadPtr(JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::Callee))), result.gpr());
3679 cellResult(result.gpr(), node);
3680 break;
3681 }
3682
3683 case SetCallee: {
3684 SpeculateCellOperand callee(this, node->child1());
3685 m_jit.storePtr(callee.gpr(), JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::Callee))));
3686 noResult(node);
3687 break;
3688 }
3689
3690 case GetScope: {
3691 SpeculateCellOperand function(this, node->child1());
3692 GPRTemporary result(this, function);
3693 m_jit.loadPtr(JITCompiler::Address(function.gpr(), JSFunction::offsetOfScopeChain()), result.gpr());
3694 cellResult(result.gpr(), node);
3695 break;
3696 }
3697
3698 case GetMyScope: {
3699 GPRTemporary result(this);
3700 GPRReg resultGPR = result.gpr();
3701
3702 m_jit.loadPtr(JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::ScopeChain))), resultGPR);
3703 cellResult(resultGPR, node);
3704 break;
3705 }
3706
3707 case SetMyScope: {
3708 SpeculateCellOperand callee(this, node->child1());
3709 m_jit.storePtr(callee.gpr(), JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::ScopeChain))));
3710 noResult(node);
3711 break;
3712 }
3713
3714 case SkipTopScope: {
3715 SpeculateCellOperand scope(this, node->child1());
3716 GPRTemporary result(this, scope);
3717 GPRReg resultGPR = result.gpr();
3718 m_jit.move(scope.gpr(), resultGPR);
3719 JITCompiler::Jump activationNotCreated =
3720 m_jit.branchTest64(
3721 JITCompiler::Zero,
3722 JITCompiler::addressFor(
3723 static_cast<VirtualRegister>(m_jit.codeBlock()->activationRegister())));
3724 m_jit.loadPtr(JITCompiler::Address(resultGPR, JSScope::offsetOfNext()), resultGPR);
3725 activationNotCreated.link(&m_jit);
3726 cellResult(resultGPR, node);
3727 break;
3728 }
3729
3730 case SkipScope: {
3731 SpeculateCellOperand scope(this, node->child1());
3732 GPRTemporary result(this, scope);
3733 m_jit.loadPtr(JITCompiler::Address(scope.gpr(), JSScope::offsetOfNext()), result.gpr());
3734 cellResult(result.gpr(), node);
3735 break;
3736 }
3737
3738 case GetScopeRegisters: {
3739 SpeculateCellOperand scope(this, node->child1());
3740 GPRTemporary result(this);
3741 GPRReg scopeGPR = scope.gpr();
3742 GPRReg resultGPR = result.gpr();
3743
3744 m_jit.loadPtr(JITCompiler::Address(scopeGPR, JSVariableObject::offsetOfRegisters()), resultGPR);
3745 storageResult(resultGPR, node);
3746 break;
3747 }
3748 case GetScopedVar: {
3749 StorageOperand registers(this, node->child1());
3750 GPRTemporary result(this);
3751 GPRReg registersGPR = registers.gpr();
3752 GPRReg resultGPR = result.gpr();
3753
3754 m_jit.load64(JITCompiler::Address(registersGPR, node->varNumber() * sizeof(Register)), resultGPR);
3755 jsValueResult(resultGPR, node);
3756 break;
3757 }
3758 case PutScopedVar: {
3759 SpeculateCellOperand scope(this, node->child1());
3760 StorageOperand registers(this, node->child2());
3761 JSValueOperand value(this, node->child3());
3762 GPRTemporary scratchRegister(this);
3763
3764 GPRReg scopeGPR = scope.gpr();
3765 GPRReg registersGPR = registers.gpr();
3766 GPRReg valueGPR = value.gpr();
3767 GPRReg scratchGPR = scratchRegister.gpr();
3768
3769 m_jit.store64(valueGPR, JITCompiler::Address(registersGPR, node->varNumber() * sizeof(Register)));
3770 writeBarrier(scopeGPR, valueGPR, node->child3(), WriteBarrierForVariableAccess, scratchGPR);
3771 noResult(node);
3772 break;
3773 }
3774 case GetById: {
3775 if (!node->prediction()) {
3776 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
3777 break;
3778 }
3779
3780 switch (node->child1().useKind()) {
3781 case CellUse: {
3782 SpeculateCellOperand base(this, node->child1());
3783 GPRTemporary result(this, base);
3784
3785 GPRReg baseGPR = base.gpr();
3786 GPRReg resultGPR = result.gpr();
3787
3788 base.use();
3789
3790 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber());
3791
3792 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3793 break;
3794 }
3795
3796 case UntypedUse: {
3797 JSValueOperand base(this, node->child1());
3798 GPRTemporary result(this, base);
3799
3800 GPRReg baseGPR = base.gpr();
3801 GPRReg resultGPR = result.gpr();
3802
3803 base.use();
3804
3805 JITCompiler::Jump notCell = m_jit.branchTest64(JITCompiler::NonZero, baseGPR, GPRInfo::tagMaskRegister);
3806
3807 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), notCell);
3808
3809 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3810 break;
3811 }
3812
3813 default:
3814 RELEASE_ASSERT_NOT_REACHED();
3815 break;
3816 }
3817 break;
3818 }
3819
3820 case GetByIdFlush: {
3821 if (!node->prediction()) {
3822 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
3823 break;
3824 }
3825
3826 switch (node->child1().useKind()) {
3827 case CellUse: {
3828 SpeculateCellOperand base(this, node->child1());
3829 GPRReg baseGPR = base.gpr();
3830
3831 GPRResult result(this);
3832
3833 GPRReg resultGPR = result.gpr();
3834
3835 base.use();
3836
3837 flushRegisters();
3838
3839 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), JITCompiler::Jump(), DontSpill);
3840
3841 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3842 break;
3843 }
3844
3845 case UntypedUse: {
3846 JSValueOperand base(this, node->child1());
3847 GPRReg baseGPR = base.gpr();
3848
3849 GPRResult result(this);
3850 GPRReg resultGPR = result.gpr();
3851
3852 base.use();
3853 flushRegisters();
3854
3855 JITCompiler::Jump notCell = m_jit.branchTest64(JITCompiler::NonZero, baseGPR, GPRInfo::tagMaskRegister);
3856
3857 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), notCell, DontSpill);
3858
3859 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly);
3860 break;
3861 }
3862
3863 default:
3864 RELEASE_ASSERT_NOT_REACHED();
3865 break;
3866 }
3867 break;
3868 }
3869
3870 case GetArrayLength:
3871 compileGetArrayLength(node);
3872 break;
3873
3874 case CheckFunction: {
3875 SpeculateCellOperand function(this, node->child1());
3876 speculationCheck(BadFunction, JSValueSource::unboxedCell(function.gpr()), node->child1(), m_jit.branchWeakPtr(JITCompiler::NotEqual, function.gpr(), node->function()));
3877 noResult(node);
3878 break;
3879 }
3880
3881 case CheckExecutable: {
3882 SpeculateCellOperand function(this, node->child1());
3883 speculationCheck(BadExecutable, JSValueSource::unboxedCell(function.gpr()), node->child1(), m_jit.branchWeakPtr(JITCompiler::NotEqual, JITCompiler::Address(function.gpr(), JSFunction::offsetOfExecutable()), node->executable()));
3884 noResult(node);
3885 break;
3886 }
3887
3888 case CheckStructure:
3889 case ForwardCheckStructure: {
3890 SpeculateCellOperand base(this, node->child1());
3891
3892 ASSERT(node->structureSet().size());
3893
3894 ExitKind exitKind;
3895 if (node->child1()->op() == WeakJSConstant)
3896 exitKind = BadWeakConstantCache;
3897 else
3898 exitKind = BadCache;
3899
3900 if (node->structureSet().size() == 1) {
3901 speculationCheck(
3902 exitKind, JSValueSource::unboxedCell(base.gpr()), 0,
3903 m_jit.branchWeakPtr(
3904 JITCompiler::NotEqual,
3905 JITCompiler::Address(base.gpr(), JSCell::structureOffset()),
3906 node->structureSet()[0]));
3907 } else {
3908 GPRTemporary structure(this);
3909
3910 m_jit.loadPtr(JITCompiler::Address(base.gpr(), JSCell::structureOffset()), structure.gpr());
3911
3912 JITCompiler::JumpList done;
3913
3914 for (size_t i = 0; i < node->structureSet().size() - 1; ++i)
3915 done.append(m_jit.branchWeakPtr(JITCompiler::Equal, structure.gpr(), node->structureSet()[i]));
3916
3917 speculationCheck(
3918 exitKind, JSValueSource::unboxedCell(base.gpr()), 0,
3919 m_jit.branchWeakPtr(
3920 JITCompiler::NotEqual, structure.gpr(), node->structureSet().last()));
3921
3922 done.link(&m_jit);
3923 }
3924
3925 noResult(node);
3926 break;
3927 }
3928
3929 case StructureTransitionWatchpoint:
3930 case ForwardStructureTransitionWatchpoint: {
3931 // There is a fascinating question here of what to do about array profiling.
3932 // We *could* try to tell the OSR exit about where the base of the access is.
3933 // The DFG will have kept it alive, though it may not be in a register, and
3934 // we shouldn't really load it since that could be a waste. For now though,
3935 // we'll just rely on the fact that when a watchpoint fires then that's
3936 // quite a hint already.
3937
3938 m_jit.addWeakReference(node->structure());
3939 node->structure()->addTransitionWatchpoint(
3940 speculationWatchpoint(
3941 node->child1()->op() == WeakJSConstant ? BadWeakConstantCache : BadCache));
3942
3943 #if !ASSERT_DISABLED
3944 SpeculateCellOperand op1(this, node->child1());
3945 JITCompiler::Jump isOK = m_jit.branchPtr(JITCompiler::Equal, JITCompiler::Address(op1.gpr(), JSCell::structureOffset()), TrustedImmPtr(node->structure()));
3946 m_jit.breakpoint();
3947 isOK.link(&m_jit);
3948 #else
3949 speculateCell(node->child1());
3950 #endif
3951
3952 noResult(node);
3953 break;
3954 }
3955
3956 case PhantomPutStructure: {
3957 ASSERT(isKnownCell(node->child1().node()));
3958
3959 ASSERT(node->structureTransitionData().previousStructure->transitionWatchpointSetHasBeenInvalidated());
3960 m_jit.addWeakReferenceTransition(
3961 node->codeOrigin.codeOriginOwner(),
3962 node->structureTransitionData().previousStructure,
3963 node->structureTransitionData().newStructure);
3964 noResult(node);
3965 break;
3966 }
3967
3968 case PutStructure: {
3969 ASSERT(node->structureTransitionData().previousStructure->transitionWatchpointSetHasBeenInvalidated());
3970
3971 SpeculateCellOperand base(this, node->child1());
3972 GPRReg baseGPR = base.gpr();
3973
3974 m_jit.addWeakReferenceTransition(
3975 node->codeOrigin.codeOriginOwner(),
3976 node->structureTransitionData().previousStructure,
3977 node->structureTransitionData().newStructure);
3978
3979 #if ENABLE(WRITE_BARRIER_PROFILING)
3980 // Must always emit this write barrier as the structure transition itself requires it
3981 writeBarrier(baseGPR, node->structureTransitionData().newStructure, WriteBarrierForGenericAccess);
3982 #endif
3983
3984 m_jit.storePtr(MacroAssembler::TrustedImmPtr(node->structureTransitionData().newStructure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
3985
3986 noResult(node);
3987 break;
3988 }
3989
3990 case AllocatePropertyStorage:
3991 compileAllocatePropertyStorage(node);
3992 break;
3993
3994 case ReallocatePropertyStorage:
3995 compileReallocatePropertyStorage(node);
3996 break;
3997
3998 case GetButterfly: {
3999 SpeculateCellOperand base(this, node->child1());
4000 GPRTemporary result(this, base);
4001
4002 GPRReg baseGPR = base.gpr();
4003 GPRReg resultGPR = result.gpr();
4004
4005 m_jit.loadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
4006
4007 storageResult(resultGPR, node);
4008 break;
4009 }
4010
4011 case GetIndexedPropertyStorage: {
4012 compileGetIndexedPropertyStorage(node);
4013 break;
4014 }
4015
4016 case GetByOffset: {
4017 StorageOperand storage(this, node->child1());
4018 GPRTemporary result(this, storage);
4019
4020 GPRReg storageGPR = storage.gpr();
4021 GPRReg resultGPR = result.gpr();
4022
4023 StorageAccessData& storageAccessData = m_jit.graph().m_storageAccessData[node->storageAccessDataIndex()];
4024
4025 m_jit.load64(JITCompiler::Address(storageGPR, storageAccessData.offset * sizeof(EncodedJSValue)), resultGPR);
4026
4027 jsValueResult(resultGPR, node);
4028 break;
4029 }
4030
4031 case PutByOffset: {
4032 #if ENABLE(WRITE_BARRIER_PROFILING)
4033 SpeculateCellOperand base(this, node->child2());
4034 #endif
4035 StorageOperand storage(this, node->child1());
4036 JSValueOperand value(this, node->child3());
4037
4038 GPRReg storageGPR = storage.gpr();
4039 GPRReg valueGPR = value.gpr();
4040
4041 #if ENABLE(WRITE_BARRIER_PROFILING)
4042 writeBarrier(base.gpr(), value.gpr(), node->child3(), WriteBarrierForPropertyAccess);
4043 #endif
4044
4045 StorageAccessData& storageAccessData = m_jit.graph().m_storageAccessData[node->storageAccessDataIndex()];
4046
4047 m_jit.store64(valueGPR, JITCompiler::Address(storageGPR, storageAccessData.offset * sizeof(EncodedJSValue)));
4048
4049 noResult(node);
4050 break;
4051 }
4052
4053 case PutById: {
4054 SpeculateCellOperand base(this, node->child1());
4055 JSValueOperand value(this, node->child2());
4056 GPRTemporary scratch(this);
4057
4058 GPRReg baseGPR = base.gpr();
4059 GPRReg valueGPR = value.gpr();
4060 GPRReg scratchGPR = scratch.gpr();
4061
4062 base.use();
4063 value.use();
4064
4065 cachedPutById(node->codeOrigin, baseGPR, valueGPR, node->child2(), scratchGPR, node->identifierNumber(), NotDirect);
4066
4067 noResult(node, UseChildrenCalledExplicitly);
4068 break;
4069 }
4070
4071 case PutByIdDirect: {
4072 SpeculateCellOperand base(this, node->child1());
4073 JSValueOperand value(this, node->child2());
4074 GPRTemporary scratch(this);
4075
4076 GPRReg baseGPR = base.gpr();
4077 GPRReg valueGPR = value.gpr();
4078 GPRReg scratchGPR = scratch.gpr();
4079
4080 base.use();
4081 value.use();
4082
4083 cachedPutById(node->codeOrigin, baseGPR, valueGPR, node->child2(), scratchGPR, node->identifierNumber(), Direct);
4084
4085 noResult(node, UseChildrenCalledExplicitly);
4086 break;
4087 }
4088
4089 case GetGlobalVar: {
4090 GPRTemporary result(this);
4091
4092 m_jit.load64(node->registerPointer(), result.gpr());
4093
4094 jsValueResult(result.gpr(), node);
4095 break;
4096 }
4097
4098 case PutGlobalVar: {
4099 JSValueOperand value(this, node->child1());
4100
4101 if (Heap::isWriteBarrierEnabled()) {
4102 GPRTemporary scratch(this);
4103 GPRReg scratchReg = scratch.gpr();
4104
4105 writeBarrier(m_jit.globalObjectFor(node->codeOrigin), value.gpr(), node->child1(), WriteBarrierForVariableAccess, scratchReg);
4106 }
4107
4108 m_jit.store64(value.gpr(), node->registerPointer());
4109
4110 noResult(node);
4111 break;
4112 }
4113
4114 case PutGlobalVarCheck: {
4115 JSValueOperand value(this, node->child1());
4116
4117 WatchpointSet* watchpointSet =
4118 m_jit.globalObjectFor(node->codeOrigin)->symbolTable()->get(
4119 identifier(node->identifierNumberForCheck())->impl()).watchpointSet();
4120 addSlowPathGenerator(
4121 slowPathCall(
4122 m_jit.branchTest8(
4123 JITCompiler::NonZero,
4124 JITCompiler::AbsoluteAddress(watchpointSet->addressOfIsWatched())),
4125 this, operationNotifyGlobalVarWrite, NoResult, watchpointSet));
4126
4127 if (Heap::isWriteBarrierEnabled()) {
4128 GPRTemporary scratch(this);
4129 GPRReg scratchReg = scratch.gpr();
4130
4131 writeBarrier(m_jit.globalObjectFor(node->codeOrigin), value.gpr(), node->child1(), WriteBarrierForVariableAccess, scratchReg);
4132 }
4133
4134 m_jit.store64(value.gpr(), node->registerPointer());
4135
4136 noResult(node);
4137 break;
4138 }
4139
4140 case GlobalVarWatchpoint: {
4141 m_jit.globalObjectFor(node->codeOrigin)->symbolTable()->get(
4142 identifier(node->identifierNumberForCheck())->impl()).addWatchpoint(
4143 speculationWatchpoint());
4144
4145 #if DFG_ENABLE(JIT_ASSERT)
4146 GPRTemporary scratch(this);
4147 GPRReg scratchGPR = scratch.gpr();
4148 m_jit.load64(node->registerPointer(), scratchGPR);
4149 JITCompiler::Jump ok = m_jit.branch64(
4150 JITCompiler::Equal, scratchGPR,
4151 TrustedImm64(JSValue::encode(node->registerPointer()->get())));
4152 m_jit.breakpoint();
4153 ok.link(&m_jit);
4154 #endif
4155
4156 noResult(node);
4157 break;
4158 }
4159
4160 case CheckHasInstance: {
4161 SpeculateCellOperand base(this, node->child1());
4162 GPRTemporary structure(this);
4163
4164 // Speculate that base 'ImplementsDefaultHasInstance'.
4165 m_jit.loadPtr(MacroAssembler::Address(base.gpr(), JSCell::structureOffset()), structure.gpr());
4166 speculationCheck(Uncountable, JSValueRegs(), 0, m_jit.branchTest8(MacroAssembler::Zero, MacroAssembler::Address(structure.gpr(), Structure::typeInfoFlagsOffset()), MacroAssembler::TrustedImm32(ImplementsDefaultHasInstance)));
4167
4168 noResult(node);
4169 break;
4170 }
4171
4172 case InstanceOf: {
4173 compileInstanceOf(node);
4174 break;
4175 }
4176
4177 case IsUndefined: {
4178 JSValueOperand value(this, node->child1());
4179 GPRTemporary result(this);
4180 GPRTemporary localGlobalObject(this);
4181 GPRTemporary remoteGlobalObject(this);
4182
4183 JITCompiler::Jump isCell = m_jit.branchTest64(JITCompiler::Zero, value.gpr(), GPRInfo::tagMaskRegister);
4184
4185 m_jit.compare64(JITCompiler::Equal, value.gpr(), TrustedImm32(ValueUndefined), result.gpr());
4186 JITCompiler::Jump done = m_jit.jump();
4187
4188 isCell.link(&m_jit);
4189 JITCompiler::Jump notMasqueradesAsUndefined;
4190 if (m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) {
4191 m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint());
4192 m_jit.move(TrustedImm32(0), result.gpr());
4193 notMasqueradesAsUndefined = m_jit.jump();
4194 } else {
4195 m_jit.loadPtr(JITCompiler::Address(value.gpr(), JSCell::structureOffset()), result.gpr());
4196 JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::NonZero, JITCompiler::Address(result.gpr(), Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
4197 m_jit.move(TrustedImm32(0), result.gpr());
4198 notMasqueradesAsUndefined = m_jit.jump();
4199
4200 isMasqueradesAsUndefined.link(&m_jit);
4201 GPRReg localGlobalObjectGPR = localGlobalObject.gpr();
4202 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr();
4203 m_jit.move(TrustedImmPtr(m_jit.globalObjectFor(node->codeOrigin)), localGlobalObjectGPR);
4204 m_jit.loadPtr(JITCompiler::Address(result.gpr(), Structure::globalObjectOffset()), remoteGlobalObjectGPR);
4205 m_jit.comparePtr(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, result.gpr());
4206 }
4207
4208 notMasqueradesAsUndefined.link(&m_jit);
4209 done.link(&m_jit);
4210 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4211 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4212 break;
4213 }
4214
4215 case IsBoolean: {
4216 JSValueOperand value(this, node->child1());
4217 GPRTemporary result(this, value);
4218
4219 m_jit.move(value.gpr(), result.gpr());
4220 m_jit.xor64(JITCompiler::TrustedImm32(ValueFalse), result.gpr());
4221 m_jit.test64(JITCompiler::Zero, result.gpr(), JITCompiler::TrustedImm32(static_cast<int32_t>(~1)), result.gpr());
4222 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4223 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4224 break;
4225 }
4226
4227 case IsNumber: {
4228 JSValueOperand value(this, node->child1());
4229 GPRTemporary result(this, value);
4230
4231 m_jit.test64(JITCompiler::NonZero, value.gpr(), GPRInfo::tagTypeNumberRegister, result.gpr());
4232 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4233 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4234 break;
4235 }
4236
4237 case IsString: {
4238 JSValueOperand value(this, node->child1());
4239 GPRTemporary result(this, value);
4240
4241 JITCompiler::Jump isNotCell = m_jit.branchTest64(JITCompiler::NonZero, value.gpr(), GPRInfo::tagMaskRegister);
4242
4243 m_jit.loadPtr(JITCompiler::Address(value.gpr(), JSCell::structureOffset()), result.gpr());
4244 m_jit.compare8(JITCompiler::Equal, JITCompiler::Address(result.gpr(), Structure::typeInfoTypeOffset()), TrustedImm32(StringType), result.gpr());
4245 m_jit.or32(TrustedImm32(ValueFalse), result.gpr());
4246 JITCompiler::Jump done = m_jit.jump();
4247
4248 isNotCell.link(&m_jit);
4249 m_jit.move(TrustedImm32(ValueFalse), result.gpr());
4250
4251 done.link(&m_jit);
4252 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4253 break;
4254 }
4255
4256 case IsObject: {
4257 JSValueOperand value(this, node->child1());
4258 GPRReg valueGPR = value.gpr();
4259 GPRResult result(this);
4260 GPRReg resultGPR = result.gpr();
4261 flushRegisters();
4262 callOperation(operationIsObject, resultGPR, valueGPR);
4263 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
4264 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4265 break;
4266 }
4267
4268 case IsFunction: {
4269 JSValueOperand value(this, node->child1());
4270 GPRReg valueGPR = value.gpr();
4271 GPRResult result(this);
4272 GPRReg resultGPR = result.gpr();
4273 flushRegisters();
4274 callOperation(operationIsFunction, resultGPR, valueGPR);
4275 m_jit.or32(TrustedImm32(ValueFalse), resultGPR);
4276 jsValueResult(result.gpr(), node, DataFormatJSBoolean);
4277 break;
4278 }
4279
4280 case TypeOf: {
4281 JSValueOperand value(this, node->child1(), ManualOperandSpeculation);
4282 GPRReg valueGPR = value.gpr();
4283 GPRTemporary temp(this);
4284 GPRReg tempGPR = temp.gpr();
4285 GPRResult result(this);
4286 GPRReg resultGPR = result.gpr();
4287 JITCompiler::JumpList doneJumps;
4288
4289 flushRegisters();
4290
4291 ASSERT(node->child1().useKind() == UntypedUse || node->child1().useKind() == CellUse || node->child1().useKind() == StringUse);
4292
4293 JITCompiler::Jump isNotCell = m_jit.branchTest64(JITCompiler::NonZero, valueGPR, GPRInfo::tagMaskRegister);
4294 if (node->child1().useKind() != UntypedUse)
4295 DFG_TYPE_CHECK(JSValueSource(valueGPR), node->child1(), SpecCell, isNotCell);
4296
4297 if (!node->child1()->shouldSpeculateObject() || node->child1().useKind() == StringUse) {
4298 m_jit.loadPtr(JITCompiler::Address(valueGPR, JSCell::structureOffset()), tempGPR);
4299 JITCompiler::Jump notString = m_jit.branch8(JITCompiler::NotEqual, JITCompiler::Address(tempGPR, Structure::typeInfoTypeOffset()), TrustedImm32(StringType));
4300 if (node->child1().useKind() == StringUse)
4301 DFG_TYPE_CHECK(JSValueSource(valueGPR), node->child1(), SpecString, notString);
4302 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.stringString()), resultGPR);
4303 doneJumps.append(m_jit.jump());
4304 if (node->child1().useKind() != StringUse) {
4305 notString.link(&m_jit);
4306 callOperation(operationTypeOf, resultGPR, valueGPR);
4307 doneJumps.append(m_jit.jump());
4308 }
4309 } else {
4310 callOperation(operationTypeOf, resultGPR, valueGPR);
4311 doneJumps.append(m_jit.jump());
4312 }
4313
4314 if (node->child1().useKind() == UntypedUse) {
4315 isNotCell.link(&m_jit);
4316 JITCompiler::Jump notNumber = m_jit.branchTest64(JITCompiler::Zero, valueGPR, GPRInfo::tagTypeNumberRegister);
4317 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.numberString()), resultGPR);
4318 doneJumps.append(m_jit.jump());
4319 notNumber.link(&m_jit);
4320
4321 JITCompiler::Jump notUndefined = m_jit.branch64(JITCompiler::NotEqual, valueGPR, JITCompiler::TrustedImm64(ValueUndefined));
4322 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.undefinedString()), resultGPR);
4323 doneJumps.append(m_jit.jump());
4324 notUndefined.link(&m_jit);
4325
4326 JITCompiler::Jump notNull = m_jit.branch64(JITCompiler::NotEqual, valueGPR, JITCompiler::TrustedImm64(ValueNull));
4327 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.objectString()), resultGPR);
4328 doneJumps.append(m_jit.jump());
4329 notNull.link(&m_jit);
4330
4331 // Only boolean left
4332 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.booleanString()), resultGPR);
4333 }
4334 doneJumps.link(&m_jit);
4335 cellResult(resultGPR, node);
4336 break;
4337 }
4338
4339 case Flush:
4340 case Phi:
4341 break;
4342
4343 case Breakpoint:
4344 #if ENABLE(DEBUG_WITH_BREAKPOINT)
4345 m_jit.breakpoint();
4346 #else
4347 RELEASE_ASSERT_NOT_REACHED();
4348 #endif
4349 break;
4350
4351 case Call:
4352 case Construct:
4353 emitCall(node);
4354 break;
4355
4356 case Resolve: {
4357 flushRegisters();
4358 GPRResult result(this);
4359 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()];
4360 callOperation(operationResolve, result.gpr(), identifier(data.identifierNumber), data.resolveOperations);
4361 jsValueResult(result.gpr(), node);
4362 break;
4363 }
4364
4365 case ResolveBase: {
4366 flushRegisters();
4367 GPRResult result(this);
4368 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()];
4369 callOperation(operationResolveBase, result.gpr(), identifier(data.identifierNumber), data.resolveOperations, data.putToBaseOperation);
4370 jsValueResult(result.gpr(), node);
4371 break;
4372 }
4373
4374 case ResolveBaseStrictPut: {
4375 flushRegisters();
4376 GPRResult result(this);
4377 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()];
4378 callOperation(operationResolveBaseStrictPut, result.gpr(), identifier(data.identifierNumber), data.resolveOperations, data.putToBaseOperation);
4379 jsValueResult(result.gpr(), node);
4380 break;
4381 }
4382
4383 case ResolveGlobal: {
4384 GPRTemporary globalObject(this);
4385 GPRTemporary resolveInfo(this);
4386 GPRTemporary result(this);
4387
4388 GPRReg globalObjectGPR = globalObject.gpr();
4389 GPRReg resolveInfoGPR = resolveInfo.gpr();
4390 GPRReg resultGPR = result.gpr();
4391
4392 ResolveGlobalData& data = m_jit.graph().m_resolveGlobalData[node->resolveGlobalDataIndex()];
4393 ResolveOperation* resolveOperationAddress = &(data.resolveOperations->data()[data.resolvePropertyIndex]);
4394
4395 // Check Structure of global object
4396 m_jit.move(JITCompiler::TrustedImmPtr(m_jit.globalObjectFor(node->codeOrigin)), globalObjectGPR);
4397 m_jit.move(JITCompiler::TrustedImmPtr(resolveOperationAddress), resolveInfoGPR);
4398 m_jit.loadPtr(JITCompiler::Address(resolveInfoGPR, OBJECT_OFFSETOF(ResolveOperation, m_structure)), resultGPR);
4399 JITCompiler::Jump structuresDontMatch = m_jit.branchPtr(JITCompiler::NotEqual, resultGPR, JITCompiler::Address(globalObjectGPR, JSCell::structureOffset()));
4400
4401 // Fast case
4402 m_jit.load32(JITCompiler::Address(resolveInfoGPR, OBJECT_OFFSETOF(ResolveOperation, m_offset)), resolveInfoGPR);
4403 #if DFG_ENABLE(JIT_ASSERT)
4404 JITCompiler::Jump isOutOfLine = m_jit.branch32(JITCompiler::GreaterThanOrEqual, resolveInfoGPR, TrustedImm32(firstOutOfLineOffset));
4405 m_jit.breakpoint();
4406 isOutOfLine.link(&m_jit);
4407 #endif
4408 m_jit.neg32(resolveInfoGPR);
4409 m_jit.signExtend32ToPtr(resolveInfoGPR, resolveInfoGPR);
4410 m_jit.loadPtr(JITCompiler::Address(globalObjectGPR, JSObject::butterflyOffset()), resultGPR);
4411 m_jit.load64(JITCompiler::BaseIndex(resultGPR, resolveInfoGPR, JITCompiler::TimesEight, (firstOutOfLineOffset - 2) * static_cast<ptrdiff_t>(sizeof(JSValue))), resultGPR);
4412
4413 addSlowPathGenerator(
4414 slowPathCall(
4415 structuresDontMatch, this, operationResolveGlobal,
4416 resultGPR, resolveInfoGPR, globalObjectGPR,
4417 &m_jit.codeBlock()->identifier(data.identifierNumber)));
4418
4419 jsValueResult(resultGPR, node);
4420 break;
4421 }
4422
4423 case CreateActivation: {
4424 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame);
4425
4426 JSValueOperand value(this, node->child1());
4427 GPRTemporary result(this, value);
4428
4429 GPRReg valueGPR = value.gpr();
4430 GPRReg resultGPR = result.gpr();
4431
4432 m_jit.move(valueGPR, resultGPR);
4433
4434 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR);
4435
4436 addSlowPathGenerator(
4437 slowPathCall(notCreated, this, operationCreateActivation, resultGPR));
4438
4439 cellResult(resultGPR, node);
4440 break;
4441 }
4442
4443 case CreateArguments: {
4444 JSValueOperand value(this, node->child1());
4445 GPRTemporary result(this, value);
4446
4447 GPRReg valueGPR = value.gpr();
4448 GPRReg resultGPR = result.gpr();
4449
4450 m_jit.move(valueGPR, resultGPR);
4451
4452 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR);
4453
4454 if (node->codeOrigin.inlineCallFrame) {
4455 addSlowPathGenerator(
4456 slowPathCall(
4457 notCreated, this, operationCreateInlinedArguments, resultGPR,
4458 node->codeOrigin.inlineCallFrame));
4459 } else {
4460 addSlowPathGenerator(
4461 slowPathCall(notCreated, this, operationCreateArguments, resultGPR));
4462 }
4463
4464 cellResult(resultGPR, node);
4465 break;
4466 }
4467
4468 case TearOffActivation: {
4469 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame);
4470
4471 JSValueOperand activationValue(this, node->child1());
4472 GPRTemporary scratch(this);
4473 GPRReg activationValueGPR = activationValue.gpr();
4474 GPRReg scratchGPR = scratch.gpr();
4475
4476 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, activationValueGPR);
4477
4478 SharedSymbolTable* symbolTable = m_jit.symbolTableFor(node->codeOrigin);
4479 int registersOffset = JSActivation::registersOffset(symbolTable);
4480
4481 int captureEnd = symbolTable->captureEnd();
4482 for (int i = symbolTable->captureStart(); i < captureEnd; ++i) {
4483 m_jit.load64(
4484 JITCompiler::Address(
4485 GPRInfo::callFrameRegister, i * sizeof(Register)), scratchGPR);
4486 m_jit.store64(
4487 scratchGPR, JITCompiler::Address(
4488 activationValueGPR, registersOffset + i * sizeof(Register)));
4489 }
4490 m_jit.addPtr(TrustedImm32(registersOffset), activationValueGPR, scratchGPR);
4491 m_jit.storePtr(scratchGPR, JITCompiler::Address(activationValueGPR, JSActivation::offsetOfRegisters()));
4492
4493 notCreated.link(&m_jit);
4494 noResult(node);
4495 break;
4496 }
4497
4498 case TearOffArguments: {
4499 JSValueOperand unmodifiedArgumentsValue(this, node->child1());
4500 JSValueOperand activationValue(this, node->child2());
4501 GPRReg unmodifiedArgumentsValueGPR = unmodifiedArgumentsValue.gpr();
4502 GPRReg activationValueGPR = activationValue.gpr();
4503
4504 JITCompiler::Jump created = m_jit.branchTest64(JITCompiler::NonZero, unmodifiedArgumentsValueGPR);
4505
4506 if (node->codeOrigin.inlineCallFrame) {
4507 addSlowPathGenerator(
4508 slowPathCall(
4509 created, this, operationTearOffInlinedArguments, NoResult,
4510 unmodifiedArgumentsValueGPR, activationValueGPR, node->codeOrigin.inlineCallFrame));
4511 } else {
4512 addSlowPathGenerator(
4513 slowPathCall(
4514 created, this, operationTearOffArguments, NoResult, unmodifiedArgumentsValueGPR, activationValueGPR));
4515 }
4516
4517 noResult(node);
4518 break;
4519 }
4520
4521 case GetMyArgumentsLength: {
4522 GPRTemporary result(this);
4523 GPRReg resultGPR = result.gpr();
4524
4525 if (!isEmptySpeculation(
4526 m_state.variables().operand(
4527 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type)) {
4528 speculationCheck(
4529 ArgumentsEscaped, JSValueRegs(), 0,
4530 m_jit.branchTest64(
4531 JITCompiler::NonZero,
4532 JITCompiler::addressFor(
4533 m_jit.argumentsRegisterFor(node->codeOrigin))));
4534 }
4535
4536 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame);
4537 m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), resultGPR);
4538 m_jit.sub32(TrustedImm32(1), resultGPR);
4539 integerResult(resultGPR, node);
4540 break;
4541 }
4542
4543 case GetMyArgumentsLengthSafe: {
4544 GPRTemporary result(this);
4545 GPRReg resultGPR = result.gpr();
4546
4547 JITCompiler::Jump created = m_jit.branchTest64(
4548 JITCompiler::NonZero,
4549 JITCompiler::addressFor(
4550 m_jit.argumentsRegisterFor(node->codeOrigin)));
4551
4552 if (node->codeOrigin.inlineCallFrame) {
4553 m_jit.move(
4554 Imm64(JSValue::encode(jsNumber(node->codeOrigin.inlineCallFrame->arguments.size() - 1))),
4555 resultGPR);
4556 } else {
4557 m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), resultGPR);
4558 m_jit.sub32(TrustedImm32(1), resultGPR);
4559 m_jit.or64(GPRInfo::tagTypeNumberRegister, resultGPR);
4560 }
4561
4562 // FIXME: the slow path generator should perform a forward speculation that the
4563 // result is an integer. For now we postpone the speculation by having this return
4564 // a JSValue.
4565
4566 addSlowPathGenerator(
4567 slowPathCall(
4568 created, this, operationGetArgumentsLength, resultGPR,
4569 m_jit.argumentsRegisterFor(node->codeOrigin)));
4570
4571 jsValueResult(resultGPR, node);
4572 break;
4573 }
4574
4575 case GetMyArgumentByVal: {
4576 SpeculateStrictInt32Operand index(this, node->child1());
4577 GPRTemporary result(this);
4578 GPRReg indexGPR = index.gpr();
4579 GPRReg resultGPR = result.gpr();
4580
4581 if (!isEmptySpeculation(
4582 m_state.variables().operand(
4583 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type)) {
4584 speculationCheck(
4585 ArgumentsEscaped, JSValueRegs(), 0,
4586 m_jit.branchTest64(
4587 JITCompiler::NonZero,
4588 JITCompiler::addressFor(
4589 m_jit.argumentsRegisterFor(node->codeOrigin))));
4590 }
4591
4592 m_jit.add32(TrustedImm32(1), indexGPR, resultGPR);
4593 if (node->codeOrigin.inlineCallFrame) {
4594 speculationCheck(
4595 Uncountable, JSValueRegs(), 0,
4596 m_jit.branch32(
4597 JITCompiler::AboveOrEqual,
4598 resultGPR,
4599 Imm32(node->codeOrigin.inlineCallFrame->arguments.size())));
4600 } else {
4601 speculationCheck(
4602 Uncountable, JSValueRegs(), 0,
4603 m_jit.branch32(
4604 JITCompiler::AboveOrEqual,
4605 resultGPR,
4606 JITCompiler::payloadFor(JSStack::ArgumentCount)));
4607 }
4608
4609 JITCompiler::JumpList slowArgument;
4610 JITCompiler::JumpList slowArgumentOutOfBounds;
4611 if (const SlowArgument* slowArguments = m_jit.symbolTableFor(node->codeOrigin)->slowArguments()) {
4612 slowArgumentOutOfBounds.append(
4613 m_jit.branch32(
4614 JITCompiler::AboveOrEqual, indexGPR,
4615 Imm32(m_jit.symbolTableFor(node->codeOrigin)->parameterCount())));
4616
4617 COMPILE_ASSERT(sizeof(SlowArgument) == 8, SlowArgument_size_is_eight_bytes);
4618 m_jit.move(ImmPtr(slowArguments), resultGPR);
4619 m_jit.load32(
4620 JITCompiler::BaseIndex(
4621 resultGPR, indexGPR, JITCompiler::TimesEight,
4622 OBJECT_OFFSETOF(SlowArgument, index)),
4623 resultGPR);
4624 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4625 m_jit.load64(
4626 JITCompiler::BaseIndex(
4627 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfLocals(node->codeOrigin)),
4628 resultGPR);
4629 slowArgument.append(m_jit.jump());
4630 }
4631 slowArgumentOutOfBounds.link(&m_jit);
4632
4633 m_jit.neg32(resultGPR);
4634 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4635
4636 m_jit.load64(
4637 JITCompiler::BaseIndex(
4638 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfArgumentsIncludingThis(node->codeOrigin)),
4639 resultGPR);
4640
4641 slowArgument.link(&m_jit);
4642 jsValueResult(resultGPR, node);
4643 break;
4644 }
4645
4646 case GetMyArgumentByValSafe: {
4647 SpeculateStrictInt32Operand index(this, node->child1());
4648 GPRTemporary result(this);
4649 GPRReg indexGPR = index.gpr();
4650 GPRReg resultGPR = result.gpr();
4651
4652 JITCompiler::JumpList slowPath;
4653 slowPath.append(
4654 m_jit.branchTest64(
4655 JITCompiler::NonZero,
4656 JITCompiler::addressFor(
4657 m_jit.argumentsRegisterFor(node->codeOrigin))));
4658
4659 m_jit.add32(TrustedImm32(1), indexGPR, resultGPR);
4660 if (node->codeOrigin.inlineCallFrame) {
4661 slowPath.append(
4662 m_jit.branch32(
4663 JITCompiler::AboveOrEqual,
4664 resultGPR,
4665 Imm32(node->codeOrigin.inlineCallFrame->arguments.size())));
4666 } else {
4667 slowPath.append(
4668 m_jit.branch32(
4669 JITCompiler::AboveOrEqual,
4670 resultGPR,
4671 JITCompiler::payloadFor(JSStack::ArgumentCount)));
4672 }
4673
4674 JITCompiler::JumpList slowArgument;
4675 JITCompiler::JumpList slowArgumentOutOfBounds;
4676 if (const SlowArgument* slowArguments = m_jit.symbolTableFor(node->codeOrigin)->slowArguments()) {
4677 slowArgumentOutOfBounds.append(
4678 m_jit.branch32(
4679 JITCompiler::AboveOrEqual, indexGPR,
4680 Imm32(m_jit.symbolTableFor(node->codeOrigin)->parameterCount())));
4681
4682 COMPILE_ASSERT(sizeof(SlowArgument) == 8, SlowArgument_size_is_eight_bytes);
4683 m_jit.move(ImmPtr(slowArguments), resultGPR);
4684 m_jit.load32(
4685 JITCompiler::BaseIndex(
4686 resultGPR, indexGPR, JITCompiler::TimesEight,
4687 OBJECT_OFFSETOF(SlowArgument, index)),
4688 resultGPR);
4689 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4690 m_jit.load64(
4691 JITCompiler::BaseIndex(
4692 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfLocals(node->codeOrigin)),
4693 resultGPR);
4694 slowArgument.append(m_jit.jump());
4695 }
4696 slowArgumentOutOfBounds.link(&m_jit);
4697
4698 m_jit.neg32(resultGPR);
4699 m_jit.signExtend32ToPtr(resultGPR, resultGPR);
4700
4701 m_jit.load64(
4702 JITCompiler::BaseIndex(
4703 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfArgumentsIncludingThis(node->codeOrigin)),
4704 resultGPR);
4705
4706 if (node->codeOrigin.inlineCallFrame) {
4707 addSlowPathGenerator(
4708 slowPathCall(
4709 slowPath, this, operationGetInlinedArgumentByVal, resultGPR,
4710 m_jit.argumentsRegisterFor(node->codeOrigin),
4711 node->codeOrigin.inlineCallFrame,
4712 indexGPR));
4713 } else {
4714 addSlowPathGenerator(
4715 slowPathCall(
4716 slowPath, this, operationGetArgumentByVal, resultGPR,
4717 m_jit.argumentsRegisterFor(node->codeOrigin),
4718 indexGPR));
4719 }
4720
4721 slowArgument.link(&m_jit);
4722 jsValueResult(resultGPR, node);
4723 break;
4724 }
4725
4726 case CheckArgumentsNotCreated: {
4727 ASSERT(!isEmptySpeculation(
4728 m_state.variables().operand(
4729 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type));
4730 speculationCheck(
4731 ArgumentsEscaped, JSValueRegs(), 0,
4732 m_jit.branchTest64(
4733 JITCompiler::NonZero,
4734 JITCompiler::addressFor(
4735 m_jit.argumentsRegisterFor(node->codeOrigin))));
4736 noResult(node);
4737 break;
4738 }
4739
4740 case NewFunctionNoCheck:
4741 compileNewFunctionNoCheck(node);
4742 break;
4743
4744 case NewFunction: {
4745 JSValueOperand value(this, node->child1());
4746 GPRTemporary result(this, value);
4747
4748 GPRReg valueGPR = value.gpr();
4749 GPRReg resultGPR = result.gpr();
4750
4751 m_jit.move(valueGPR, resultGPR);
4752
4753 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR);
4754
4755 addSlowPathGenerator(
4756 slowPathCall(
4757 notCreated, this, operationNewFunction,
4758 resultGPR, m_jit.codeBlock()->functionDecl(node->functionDeclIndex())));
4759
4760 jsValueResult(resultGPR, node);
4761 break;
4762 }
4763
4764 case NewFunctionExpression:
4765 compileNewFunctionExpression(node);
4766 break;
4767
4768 case CountExecution:
4769 m_jit.add64(TrustedImm32(1), MacroAssembler::AbsoluteAddress(node->executionCounter()->address()));
4770 break;
4771
4772 case GarbageValue:
4773 // We should never get to the point of code emission for a GarbageValue
4774 CRASH();
4775 break;
4776
4777 case ForceOSRExit: {
4778 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0);
4779 break;
4780 }
4781
4782 case CheckWatchdogTimer:
4783 speculationCheck(
4784 WatchdogTimerFired, JSValueRegs(), 0,
4785 m_jit.branchTest8(
4786 JITCompiler::NonZero,
4787 JITCompiler::AbsoluteAddress(m_jit.vm()->watchdog.timerDidFireAddress())));
4788 break;
4789
4790 case Phantom:
4791 DFG_NODE_DO_TO_CHILDREN(m_jit.graph(), node, speculate);
4792 noResult(node);
4793 break;
4794
4795 case PhantomLocal:
4796 // This is a no-op.
4797 noResult(node);
4798 break;
4799
4800 case Unreachable:
4801 RELEASE_ASSERT_NOT_REACHED();
4802 break;
4803
4804 case Nop:
4805 RELEASE_ASSERT_NOT_REACHED();
4806 break;
4807
4808 case LastNodeType:
4809 RELEASE_ASSERT_NOT_REACHED();
4810 break;
4811 }
4812
4813 #if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION)
4814 m_jit.clearRegisterAllocationOffsets();
4815 #endif
4816
4817 if (!m_compileOkay)
4818 return;
4819
4820 if (node->hasResult() && node->mustGenerate())
4821 use(node);
4822 }
4823
4824 #endif
4825
4826 } } // namespace JSC::DFG
4827
4828 #endif