]>
Commit | Line | Data |
---|---|---|
81345200 | 1 | # Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved. |
6fe7ccc8 A |
2 | # |
3 | # Redistribution and use in source and binary forms, with or without | |
4 | # modification, are permitted provided that the following conditions | |
5 | # are met: | |
6 | # 1. Redistributions of source code must retain the above copyright | |
7 | # notice, this list of conditions and the following disclaimer. | |
8 | # 2. Redistributions in binary form must reproduce the above copyright | |
9 | # notice, this list of conditions and the following disclaimer in the | |
10 | # documentation and/or other materials provided with the distribution. | |
11 | # | |
12 | # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' | |
13 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, | |
14 | # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
15 | # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS | |
16 | # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
17 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
18 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
19 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
20 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
21 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF | |
22 | # THE POSSIBILITY OF SUCH DAMAGE. | |
23 | ||
93a37866 A |
24 | # First come the common protocols that both interpreters use. Note that each |
25 | # of these must have an ASSERT() in LLIntData.cpp | |
26 | ||
6fe7ccc8 A |
27 | # Work-around for the fact that the toolchain's awareness of armv7s results in |
28 | # a separate slab in the fat binary, yet the offlineasm doesn't know to expect | |
29 | # it. | |
30 | if ARMv7s | |
31 | end | |
32 | ||
93a37866 | 33 | # These declarations must match interpreter/JSStack.h. |
6fe7ccc8 | 34 | |
81345200 A |
35 | if JSVALUE64 |
36 | const PtrSize = 8 | |
37 | const CallFrameHeaderSlots = 6 | |
38 | else | |
39 | const PtrSize = 4 | |
40 | const CallFrameHeaderSlots = 5 | |
41 | const CallFrameAlignSlots = 1 | |
42 | end | |
43 | const SlotSize = 8 | |
44 | ||
45 | const CallerFrameAndPCSize = 2 * PtrSize | |
46 | ||
47 | const CallerFrame = 0 | |
48 | const ReturnPC = CallerFrame + PtrSize | |
49 | const CodeBlock = ReturnPC + PtrSize | |
50 | const ScopeChain = CodeBlock + SlotSize | |
51 | const Callee = ScopeChain + SlotSize | |
52 | const ArgumentCount = Callee + SlotSize | |
53 | const ThisArgumentOffset = ArgumentCount + SlotSize | |
54 | const CallFrameHeaderSize = ThisArgumentOffset | |
55 | ||
56 | # Some value representation constants. | |
57 | if JSVALUE64 | |
58 | const TagBitTypeOther = 0x2 | |
59 | const TagBitBool = 0x4 | |
60 | const TagBitUndefined = 0x8 | |
61 | const ValueEmpty = 0x0 | |
62 | const ValueFalse = TagBitTypeOther | TagBitBool | |
63 | const ValueTrue = TagBitTypeOther | TagBitBool | 1 | |
64 | const ValueUndefined = TagBitTypeOther | TagBitUndefined | |
65 | const ValueNull = TagBitTypeOther | |
40a37d08 A |
66 | const TagTypeNumber = 0xffff000000000000 |
67 | const TagMask = TagTypeNumber | TagBitTypeOther | |
81345200 A |
68 | else |
69 | const Int32Tag = -1 | |
70 | const BooleanTag = -2 | |
71 | const NullTag = -3 | |
72 | const UndefinedTag = -4 | |
73 | const CellTag = -5 | |
74 | const EmptyValueTag = -6 | |
75 | const DeletedValueTag = -7 | |
76 | const LowestTag = DeletedValueTag | |
77 | end | |
78 | ||
79 | const CallOpCodeSize = 9 | |
80 | ||
81 | if X86_64 or ARM64 or C_LOOP | |
82 | const maxFrameExtentForSlowPathCall = 0 | |
83 | elsif ARM or ARMv7_TRADITIONAL or ARMv7 or SH4 | |
84 | const maxFrameExtentForSlowPathCall = 24 | |
85 | elsif X86 or X86_WIN | |
86 | const maxFrameExtentForSlowPathCall = 40 | |
87 | elsif MIPS | |
88 | const maxFrameExtentForSlowPathCall = 40 | |
89 | elsif X86_64_WIN | |
90 | const maxFrameExtentForSlowPathCall = 64 | |
91 | end | |
92 | ||
93 | # Watchpoint states | |
94 | const ClearWatchpoint = 0 | |
95 | const IsWatched = 1 | |
96 | const IsInvalidated = 2 | |
6fe7ccc8 A |
97 | |
98 | # Some register conventions. | |
99 | if JSVALUE64 | |
100 | # - Use a pair of registers to represent the PC: one register for the | |
81345200 | 101 | # base of the bytecodes, and one register for the index. |
6fe7ccc8 A |
102 | # - The PC base (or PB for short) should be stored in the csr. It will |
103 | # get clobbered on calls to other JS code, but will get saved on calls | |
104 | # to C functions. | |
105 | # - C calls are still given the Instruction* rather than the PC index. | |
106 | # This requires an add before the call, and a sub after. | |
81345200 | 107 | const PC = t5 |
6fe7ccc8 A |
108 | const PB = t6 |
109 | const tagTypeNumber = csr1 | |
110 | const tagMask = csr2 | |
93a37866 A |
111 | |
112 | macro loadisFromInstruction(offset, dest) | |
113 | loadis offset * 8[PB, PC, 8], dest | |
114 | end | |
115 | ||
116 | macro loadpFromInstruction(offset, dest) | |
117 | loadp offset * 8[PB, PC, 8], dest | |
118 | end | |
119 | ||
120 | macro storepToInstruction(value, offset) | |
121 | storep value, offset * 8[PB, PC, 8] | |
122 | end | |
123 | ||
6fe7ccc8 | 124 | else |
81345200 | 125 | const PC = t5 |
93a37866 A |
126 | macro loadisFromInstruction(offset, dest) |
127 | loadis offset * 4[PC], dest | |
128 | end | |
129 | ||
130 | macro loadpFromInstruction(offset, dest) | |
131 | loadp offset * 4[PC], dest | |
132 | end | |
6fe7ccc8 A |
133 | end |
134 | ||
135 | # Constants for reasoning about value representation. | |
136 | if BIG_ENDIAN | |
137 | const TagOffset = 0 | |
138 | const PayloadOffset = 4 | |
139 | else | |
140 | const TagOffset = 4 | |
141 | const PayloadOffset = 0 | |
142 | end | |
143 | ||
93a37866 A |
144 | # Constant for reasoning about butterflies. |
145 | const IsArray = 1 | |
146 | const IndexingShapeMask = 30 | |
147 | const NoIndexingShape = 0 | |
148 | const Int32Shape = 20 | |
149 | const DoubleShape = 22 | |
150 | const ContiguousShape = 26 | |
151 | const ArrayStorageShape = 28 | |
152 | const SlowPutArrayStorageShape = 30 | |
153 | ||
6fe7ccc8 A |
154 | # Type constants. |
155 | const StringType = 5 | |
81345200 A |
156 | const ObjectType = 18 |
157 | const FinalObjectType = 19 | |
6fe7ccc8 A |
158 | |
159 | # Type flags constants. | |
160 | const MasqueradesAsUndefined = 1 | |
161 | const ImplementsHasInstance = 2 | |
162 | const ImplementsDefaultHasInstance = 8 | |
163 | ||
164 | # Bytecode operand constants. | |
165 | const FirstConstantRegisterIndex = 0x40000000 | |
166 | ||
167 | # Code type constants. | |
168 | const GlobalCode = 0 | |
169 | const EvalCode = 1 | |
170 | const FunctionCode = 2 | |
171 | ||
172 | # The interpreter steals the tag word of the argument count. | |
173 | const LLIntReturnPC = ArgumentCount + TagOffset | |
174 | ||
175 | # String flags. | |
81345200 | 176 | const HashFlags8BitBuffer = 32 |
6fe7ccc8 | 177 | |
93a37866 A |
178 | # Copied from PropertyOffset.h |
179 | const firstOutOfLineOffset = 100 | |
180 | ||
81345200 A |
181 | # ResolveType |
182 | const GlobalProperty = 0 | |
183 | const GlobalVar = 1 | |
184 | const ClosureVar = 2 | |
185 | const GlobalPropertyWithVarInjectionChecks = 3 | |
186 | const GlobalVarWithVarInjectionChecks = 4 | |
187 | const ClosureVarWithVarInjectionChecks = 5 | |
188 | const Dynamic = 6 | |
189 | ||
190 | const ResolveModeMask = 0xffff | |
191 | ||
192 | const MarkedBlockSize = 64 * 1024 | |
193 | const MarkedBlockMask = ~(MarkedBlockSize - 1) | |
194 | # Constants for checking mark bits. | |
195 | const AtomNumberShift = 3 | |
196 | const BitMapWordShift = 4 | |
93a37866 | 197 | |
6fe7ccc8 A |
198 | # Allocation constants |
199 | if JSVALUE64 | |
200 | const JSFinalObjectSizeClassIndex = 1 | |
201 | else | |
202 | const JSFinalObjectSizeClassIndex = 3 | |
203 | end | |
204 | ||
205 | # This must match wtf/Vector.h | |
93a37866 | 206 | const VectorBufferOffset = 0 |
6fe7ccc8 | 207 | if JSVALUE64 |
93a37866 | 208 | const VectorSizeOffset = 12 |
6fe7ccc8 | 209 | else |
93a37866 | 210 | const VectorSizeOffset = 8 |
6fe7ccc8 A |
211 | end |
212 | ||
6fe7ccc8 A |
213 | # Some common utilities. |
214 | macro crash() | |
93a37866 A |
215 | if C_LOOP |
216 | cloopCrash | |
217 | else | |
81345200 | 218 | call _llint_crash |
93a37866 | 219 | end |
6fe7ccc8 A |
220 | end |
221 | ||
222 | macro assert(assertion) | |
223 | if ASSERT_ENABLED | |
224 | assertion(.ok) | |
225 | crash() | |
226 | .ok: | |
227 | end | |
228 | end | |
229 | ||
81345200 A |
230 | macro checkStackPointerAlignment(tempReg, location) |
231 | if ARM64 or C_LOOP or SH4 | |
232 | # ARM64 will check for us! | |
233 | # C_LOOP does not need the alignment, and can use a little perf | |
234 | # improvement from avoiding useless work. | |
235 | # SH4 does not need specific alignment (4 bytes). | |
236 | else | |
237 | if ARM or ARMv7 or ARMv7_TRADITIONAL | |
238 | # ARM can't do logical ops with the sp as a source | |
239 | move sp, tempReg | |
240 | andp 0xf, tempReg | |
241 | else | |
242 | andp sp, 0xf, tempReg | |
243 | end | |
244 | btpz tempReg, .stackPointerOkay | |
245 | move location, tempReg | |
246 | break | |
247 | .stackPointerOkay: | |
248 | end | |
249 | end | |
250 | ||
251 | macro preserveCallerPCAndCFR() | |
252 | if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS or SH4 | |
253 | push lr | |
254 | push cfr | |
255 | elsif X86 or X86_WIN or X86_64 or X86_64_WIN | |
256 | push cfr | |
257 | elsif ARM64 | |
258 | pushLRAndFP | |
259 | else | |
260 | error | |
261 | end | |
262 | move sp, cfr | |
263 | end | |
264 | ||
265 | macro restoreCallerPCAndCFR() | |
266 | move cfr, sp | |
267 | if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS or SH4 | |
268 | pop cfr | |
269 | pop lr | |
270 | elsif X86 or X86_WIN or X86_64 or X86_64_WIN | |
271 | pop cfr | |
272 | elsif ARM64 | |
273 | popLRAndFP | |
274 | end | |
275 | end | |
276 | ||
6fe7ccc8 | 277 | macro preserveReturnAddressAfterCall(destinationRegister) |
81345200 | 278 | if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS or SH4 |
93a37866 | 279 | # In C_LOOP case, we're only preserving the bytecode vPC. |
6fe7ccc8 | 280 | move lr, destinationRegister |
81345200 | 281 | elsif X86 or X86_WIN or X86_64 or X86_64_WIN |
6fe7ccc8 A |
282 | pop destinationRegister |
283 | else | |
284 | error | |
285 | end | |
286 | end | |
287 | ||
288 | macro restoreReturnAddressBeforeReturn(sourceRegister) | |
81345200 | 289 | if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS or SH4 |
93a37866 | 290 | # In C_LOOP case, we're only restoring the bytecode vPC. |
6fe7ccc8 | 291 | move sourceRegister, lr |
81345200 | 292 | elsif X86 or X86_WIN or X86_64 or X86_64_WIN |
6fe7ccc8 A |
293 | push sourceRegister |
294 | else | |
295 | error | |
296 | end | |
297 | end | |
298 | ||
81345200 A |
299 | macro functionPrologue() |
300 | if X86 or X86_WIN or X86_64 or X86_64_WIN | |
301 | push cfr | |
302 | elsif ARM64 | |
303 | pushLRAndFP | |
304 | elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS or SH4 | |
305 | push lr | |
306 | push cfr | |
307 | end | |
308 | move sp, cfr | |
309 | end | |
310 | ||
311 | macro functionEpilogue() | |
312 | if X86 or X86_WIN or X86_64 or X86_64_WIN | |
313 | pop cfr | |
314 | elsif ARM64 | |
315 | popLRAndFP | |
316 | elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS or SH4 | |
317 | pop cfr | |
318 | pop lr | |
319 | end | |
320 | end | |
321 | ||
322 | macro callToJavaScriptPrologue() | |
323 | if X86_64 or X86_64_WIN | |
324 | push cfr | |
325 | push t0 | |
326 | elsif X86 or X86_WIN | |
327 | push cfr | |
328 | elsif ARM64 | |
329 | pushLRAndFP | |
330 | elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS or SH4 | |
331 | push lr | |
332 | push cfr | |
333 | end | |
334 | pushCalleeSaves | |
335 | if X86 | |
336 | subp 12, sp | |
337 | elsif X86_WIN | |
338 | subp 16, sp | |
339 | move sp, t4 | |
340 | move t4, t0 | |
341 | move t4, t2 | |
342 | andp 0xf, t2 | |
343 | andp 0xfffffff0, t0 | |
344 | move t0, sp | |
345 | storep t4, [sp] | |
346 | elsif ARM or ARMv7 or ARMv7_TRADITIONAL | |
347 | subp 4, sp | |
348 | move sp, t4 | |
349 | clrbp t4, 0xf, t5 | |
350 | move t5, sp | |
351 | storep t4, [sp] | |
352 | end | |
353 | end | |
354 | ||
355 | macro callToJavaScriptEpilogue() | |
356 | if ARMv7 | |
357 | addp CallFrameHeaderSlots * 8, cfr, t4 | |
358 | move t4, sp | |
359 | else | |
360 | addp CallFrameHeaderSlots * 8, cfr, sp | |
361 | end | |
362 | ||
363 | loadp CallerFrame[cfr], cfr | |
364 | ||
365 | if X86 | |
366 | addp 12, sp | |
367 | elsif X86_WIN | |
368 | pop t4 | |
369 | move t4, sp | |
370 | addp 16, sp | |
371 | elsif ARM or ARMv7 or ARMv7_TRADITIONAL | |
372 | pop t4 | |
373 | move t4, sp | |
374 | addp 4, sp | |
375 | end | |
376 | ||
377 | popCalleeSaves | |
378 | if X86_64 or X86_64_WIN | |
379 | pop t2 | |
380 | pop cfr | |
381 | elsif X86 or X86_WIN | |
382 | pop cfr | |
383 | elsif ARM64 | |
384 | popLRAndFP | |
385 | elsif C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS or SH4 | |
386 | pop cfr | |
387 | pop lr | |
388 | end | |
389 | end | |
390 | ||
391 | macro moveStackPointerForCodeBlock(codeBlock, scratch) | |
392 | loadi CodeBlock::m_numCalleeRegisters[codeBlock], scratch | |
393 | lshiftp 3, scratch | |
394 | addp maxFrameExtentForSlowPathCall, scratch | |
395 | if ARMv7 | |
396 | subp cfr, scratch, scratch | |
397 | move scratch, sp | |
398 | else | |
399 | subp cfr, scratch, sp | |
400 | end | |
401 | end | |
402 | ||
403 | macro restoreStackPointerAfterCall() | |
404 | loadp CodeBlock[cfr], t2 | |
405 | moveStackPointerForCodeBlock(t2, t4) | |
406 | end | |
407 | ||
6fe7ccc8 A |
408 | macro traceExecution() |
409 | if EXECUTION_TRACING | |
410 | callSlowPath(_llint_trace) | |
411 | end | |
412 | end | |
413 | ||
81345200 A |
414 | macro callTargetFunction(callLinkInfo, calleeFramePtr) |
415 | move calleeFramePtr, sp | |
93a37866 A |
416 | if C_LOOP |
417 | cloopCallJSFunction LLIntCallLinkInfo::machineCodeTarget[callLinkInfo] | |
418 | else | |
419 | call LLIntCallLinkInfo::machineCodeTarget[callLinkInfo] | |
93a37866 | 420 | end |
81345200 A |
421 | restoreStackPointerAfterCall() |
422 | dispatchAfterCall() | |
93a37866 A |
423 | end |
424 | ||
81345200 | 425 | macro slowPathForCall(slowPath) |
6fe7ccc8 | 426 | callCallSlowPath( |
6fe7ccc8 A |
427 | slowPath, |
428 | macro (callee) | |
81345200 A |
429 | btpz t1, .dontUpdateSP |
430 | if ARMv7 | |
431 | addp CallerFrameAndPCSize, t1, t1 | |
432 | move t1, sp | |
433 | else | |
434 | addp CallerFrameAndPCSize, t1, sp | |
435 | end | |
436 | .dontUpdateSP: | |
93a37866 A |
437 | if C_LOOP |
438 | cloopCallJSFunction callee | |
439 | else | |
440 | call callee | |
93a37866 | 441 | end |
81345200 A |
442 | restoreStackPointerAfterCall() |
443 | dispatchAfterCall() | |
6fe7ccc8 A |
444 | end) |
445 | end | |
446 | ||
81345200 A |
447 | macro arrayProfile(cellAndIndexingType, profile, scratch) |
448 | const cell = cellAndIndexingType | |
449 | const indexingType = cellAndIndexingType | |
450 | loadi JSCell::m_structureID[cell], scratch | |
451 | storei scratch, ArrayProfile::m_lastSeenStructureID[profile] | |
452 | loadb JSCell::m_indexingType[cell], indexingType | |
453 | end | |
454 | ||
455 | macro checkMarkByte(cell, scratch1, scratch2, continuation) | |
456 | loadb JSCell::m_gcData[cell], scratch1 | |
457 | continuation(scratch1) | |
93a37866 A |
458 | end |
459 | ||
6fe7ccc8 | 460 | macro checkSwitchToJIT(increment, action) |
81345200 A |
461 | loadp CodeBlock[cfr], t0 |
462 | baddis increment, CodeBlock::m_llintExecuteCounter + BaselineExecutionCounter::m_counter[t0], .continue | |
463 | action() | |
6fe7ccc8 | 464 | .continue: |
6fe7ccc8 A |
465 | end |
466 | ||
467 | macro checkSwitchToJITForEpilogue() | |
468 | checkSwitchToJIT( | |
469 | 10, | |
470 | macro () | |
471 | callSlowPath(_llint_replace) | |
472 | end) | |
473 | end | |
474 | ||
475 | macro assertNotConstant(index) | |
476 | assert(macro (ok) bilt index, FirstConstantRegisterIndex, ok end) | |
477 | end | |
478 | ||
479 | macro functionForCallCodeBlockGetter(targetRegister) | |
480 | loadp Callee[cfr], targetRegister | |
481 | loadp JSFunction::m_executable[targetRegister], targetRegister | |
482 | loadp FunctionExecutable::m_codeBlockForCall[targetRegister], targetRegister | |
483 | end | |
484 | ||
485 | macro functionForConstructCodeBlockGetter(targetRegister) | |
486 | loadp Callee[cfr], targetRegister | |
487 | loadp JSFunction::m_executable[targetRegister], targetRegister | |
488 | loadp FunctionExecutable::m_codeBlockForConstruct[targetRegister], targetRegister | |
489 | end | |
490 | ||
491 | macro notFunctionCodeBlockGetter(targetRegister) | |
492 | loadp CodeBlock[cfr], targetRegister | |
493 | end | |
494 | ||
495 | macro functionCodeBlockSetter(sourceRegister) | |
496 | storep sourceRegister, CodeBlock[cfr] | |
497 | end | |
498 | ||
499 | macro notFunctionCodeBlockSetter(sourceRegister) | |
500 | # Nothing to do! | |
501 | end | |
502 | ||
503 | # Do the bare minimum required to execute code. Sets up the PC, leave the CodeBlock* | |
504 | # in t1. May also trigger prologue entry OSR. | |
505 | macro prologue(codeBlockGetter, codeBlockSetter, osrSlowPath, traceSlowPath) | |
6fe7ccc8 | 506 | # Set up the call frame and check if we should OSR. |
81345200 A |
507 | preserveCallerPCAndCFR() |
508 | ||
6fe7ccc8 | 509 | if EXECUTION_TRACING |
81345200 | 510 | subp maxFrameExtentForSlowPathCall, sp |
6fe7ccc8 | 511 | callSlowPath(traceSlowPath) |
81345200 | 512 | addp maxFrameExtentForSlowPathCall, sp |
6fe7ccc8 A |
513 | end |
514 | codeBlockGetter(t1) | |
81345200 A |
515 | if C_LOOP |
516 | else | |
517 | baddis 5, CodeBlock::m_llintExecuteCounter + BaselineExecutionCounter::m_counter[t1], .continue | |
518 | if JSVALUE64 | |
6fe7ccc8 | 519 | cCall2(osrSlowPath, cfr, PC) |
81345200 A |
520 | else |
521 | # We are after the function prologue, but before we have set up sp from the CodeBlock. | |
522 | # Temporarily align stack pointer for this call. | |
523 | subp 8, sp | |
524 | cCall2(osrSlowPath, cfr, PC) | |
525 | addp 8, sp | |
6fe7ccc8 | 526 | end |
81345200 A |
527 | btpz t0, .recover |
528 | move cfr, sp # restore the previous sp | |
529 | # pop the callerFrame since we will jump to a function that wants to save it | |
530 | if ARM64 | |
531 | popLRAndFP | |
532 | elsif ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS or SH4 | |
533 | pop cfr | |
534 | pop lr | |
535 | else | |
536 | pop cfr | |
537 | end | |
538 | jmp t0 | |
539 | .recover: | |
540 | codeBlockGetter(t1) | |
541 | .continue: | |
542 | end | |
543 | ||
6fe7ccc8 A |
544 | codeBlockSetter(t1) |
545 | ||
81345200 A |
546 | moveStackPointerForCodeBlock(t1, t2) |
547 | ||
6fe7ccc8 A |
548 | # Set up the PC. |
549 | if JSVALUE64 | |
550 | loadp CodeBlock::m_instructions[t1], PB | |
551 | move 0, PC | |
552 | else | |
553 | loadp CodeBlock::m_instructions[t1], PC | |
554 | end | |
555 | end | |
556 | ||
557 | # Expects that CodeBlock is in t1, which is what prologue() leaves behind. | |
558 | # Must call dispatch(0) after calling this. | |
559 | macro functionInitialization(profileArgSkip) | |
81345200 A |
560 | # Profile the arguments. Unfortunately, we have no choice but to do this. This |
561 | # code is pretty horrendous because of the difference in ordering between | |
562 | # arguments and value profiles, the desire to have a simple loop-down-to-zero | |
563 | # loop, and the desire to use only three registers so as to preserve the PC and | |
564 | # the code block. It is likely that this code should be rewritten in a more | |
565 | # optimal way for architectures that have more than five registers available | |
566 | # for arbitrary use in the interpreter. | |
567 | loadi CodeBlock::m_numParameters[t1], t0 | |
568 | addp -profileArgSkip, t0 # Use addi because that's what has the peephole | |
569 | assert(macro (ok) bpgteq t0, 0, ok end) | |
570 | btpz t0, .argumentProfileDone | |
571 | loadp CodeBlock::m_argumentValueProfiles + VectorBufferOffset[t1], t3 | |
572 | mulp sizeof ValueProfile, t0, t2 # Aaaaahhhh! Need strength reduction! | |
573 | lshiftp 3, t0 | |
574 | addp t2, t3 | |
575 | .argumentProfileLoop: | |
576 | if JSVALUE64 | |
577 | loadq ThisArgumentOffset - 8 + profileArgSkip * 8[cfr, t0], t2 | |
578 | subp sizeof ValueProfile, t3 | |
579 | storeq t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets[t3] | |
580 | else | |
581 | loadi ThisArgumentOffset + TagOffset - 8 + profileArgSkip * 8[cfr, t0], t2 | |
582 | subp sizeof ValueProfile, t3 | |
583 | storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + TagOffset[t3] | |
584 | loadi ThisArgumentOffset + PayloadOffset - 8 + profileArgSkip * 8[cfr, t0], t2 | |
585 | storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + PayloadOffset[t3] | |
6fe7ccc8 | 586 | end |
81345200 A |
587 | baddpnz -8, t0, .argumentProfileLoop |
588 | .argumentProfileDone: | |
6fe7ccc8 A |
589 | |
590 | # Check stack height. | |
591 | loadi CodeBlock::m_numCalleeRegisters[t1], t0 | |
93a37866 | 592 | loadp CodeBlock::m_vm[t1], t2 |
81345200 A |
593 | lshiftp 3, t0 |
594 | addi maxFrameExtentForSlowPathCall, t0 | |
595 | subp cfr, t0, t0 | |
596 | bpbeq VM::m_jsStackLimit[t2], t0, .stackHeightOK | |
6fe7ccc8 A |
597 | |
598 | # Stack height check failed - need to call a slow_path. | |
93a37866 | 599 | callSlowPath(_llint_stack_check) |
81345200 A |
600 | bpeq t1, 0, .stackHeightOK |
601 | move t1, cfr | |
6fe7ccc8 A |
602 | .stackHeightOK: |
603 | end | |
604 | ||
93a37866 | 605 | macro allocateJSObject(allocator, structure, result, scratch1, slowCase) |
6fe7ccc8 A |
606 | if ALWAYS_ALLOCATE_SLOW |
607 | jmp slowCase | |
608 | else | |
6fe7ccc8 A |
609 | const offsetOfFirstFreeCell = |
610 | MarkedAllocator::m_freeList + | |
611 | MarkedBlock::FreeList::head | |
612 | ||
6fe7ccc8 | 613 | # Get the object from the free list. |
93a37866 | 614 | loadp offsetOfFirstFreeCell[allocator], result |
6fe7ccc8 A |
615 | btpz result, slowCase |
616 | ||
617 | # Remove the object from the free list. | |
93a37866 A |
618 | loadp [result], scratch1 |
619 | storep scratch1, offsetOfFirstFreeCell[allocator] | |
6fe7ccc8 A |
620 | |
621 | # Initialize the object. | |
93a37866 | 622 | storep 0, JSObject::m_butterfly[result] |
81345200 | 623 | storeStructureWithTypeInfo(result, structure, scratch1) |
6fe7ccc8 A |
624 | end |
625 | end | |
626 | ||
627 | macro doReturn() | |
81345200 | 628 | restoreCallerPCAndCFR() |
6fe7ccc8 A |
629 | ret |
630 | end | |
631 | ||
81345200 A |
632 | # stub to call into JavaScript or Native functions |
633 | # EncodedJSValue callToJavaScript(void* code, ExecState** vmTopCallFrame, ProtoCallFrame* protoFrame) | |
634 | # EncodedJSValue callToNativeFunction(void* code, ExecState** vmTopCallFrame, ProtoCallFrame* protoFrame) | |
635 | ||
636 | if C_LOOP | |
637 | _llint_call_to_javascript: | |
638 | else | |
639 | global _callToJavaScript | |
640 | _callToJavaScript: | |
641 | end | |
642 | doCallToJavaScript(makeJavaScriptCall) | |
643 | ||
6fe7ccc8 | 644 | |
81345200 A |
645 | if C_LOOP |
646 | _llint_call_to_native_function: | |
647 | else | |
648 | global _callToNativeFunction | |
649 | _callToNativeFunction: | |
650 | end | |
651 | doCallToJavaScript(makeHostFunctionCall) | |
652 | ||
653 | ||
654 | if C_LOOP | |
655 | else | |
656 | # void sanitizeStackForVMImpl(VM* vm) | |
657 | global _sanitizeStackForVMImpl | |
658 | _sanitizeStackForVMImpl: | |
659 | if X86_64 | |
660 | const vm = t4 | |
661 | const address = t1 | |
662 | const zeroValue = t0 | |
663 | elsif X86_64_WIN | |
664 | const vm = t2 | |
665 | const address = t1 | |
666 | const zeroValue = t0 | |
667 | elsif X86 or X86_WIN | |
668 | const vm = t2 | |
669 | const address = t1 | |
670 | const zeroValue = t0 | |
671 | else | |
672 | const vm = a0 | |
673 | const address = t1 | |
674 | const zeroValue = t2 | |
675 | end | |
676 | ||
677 | if X86 or X86_WIN | |
678 | loadp 4[sp], vm | |
679 | end | |
680 | ||
681 | loadp VM::m_lastStackTop[vm], address | |
682 | bpbeq sp, address, .zeroFillDone | |
683 | ||
684 | move 0, zeroValue | |
685 | .zeroFillLoop: | |
686 | storep zeroValue, [address] | |
687 | addp PtrSize, address | |
688 | bpa sp, address, .zeroFillLoop | |
689 | ||
690 | .zeroFillDone: | |
691 | move sp, address | |
692 | storep address, VM::m_lastStackTop[vm] | |
693 | ret | |
694 | end | |
695 | ||
696 | ||
697 | if C_LOOP | |
698 | # Dummy entry point the C Loop uses to initialize. | |
699 | _llint_entry: | |
6fe7ccc8 | 700 | crash() |
81345200 A |
701 | else |
702 | macro initPCRelative(pcBase) | |
703 | if X86_64 or X86_64_WIN | |
704 | call _relativePCBase | |
705 | _relativePCBase: | |
706 | pop pcBase | |
707 | elsif X86 or X86_WIN | |
708 | call _relativePCBase | |
709 | _relativePCBase: | |
710 | pop pcBase | |
711 | loadp 20[sp], t4 | |
712 | elsif ARM64 | |
713 | elsif ARMv7 | |
714 | _relativePCBase: | |
715 | move pc, pcBase | |
716 | subp 3, pcBase # Need to back up the PC and set the Thumb2 bit | |
717 | elsif ARM or ARMv7_TRADITIONAL | |
718 | _relativePCBase: | |
719 | move pc, pcBase | |
720 | subp 8, pcBase | |
721 | elsif MIPS | |
722 | crash() # Need to replace with any initialization steps needed to step up PC relative address calculation | |
723 | elsif SH4 | |
724 | mova _relativePCBase, t0 | |
725 | move t0, pcBase | |
726 | alignformova | |
727 | _relativePCBase: | |
728 | end | |
729 | end | |
6fe7ccc8 | 730 | |
81345200 A |
731 | macro setEntryAddress(index, label) |
732 | if X86_64 | |
733 | leap (label - _relativePCBase)[t1], t0 | |
734 | move index, t2 | |
735 | storep t0, [t4, t2, 8] | |
736 | elsif X86_64_WIN | |
737 | leap (label - _relativePCBase)[t1], t0 | |
738 | move index, t4 | |
739 | storep t0, [t2, t4, 8] | |
740 | elsif X86 or X86_WIN | |
741 | leap (label - _relativePCBase)[t1], t0 | |
742 | move index, t2 | |
743 | storep t0, [t4, t2, 4] | |
744 | elsif ARM64 | |
745 | pcrtoaddr label, t1 | |
746 | move index, t2 | |
747 | storep t1, [a0, t2, 8] | |
748 | elsif ARM or ARMv7 or ARMv7_TRADITIONAL | |
749 | mvlbl (label - _relativePCBase), t2 | |
750 | addp t2, t1, t2 | |
751 | move index, t3 | |
752 | storep t2, [a0, t3, 4] | |
753 | elsif SH4 | |
754 | move (label - _relativePCBase), t2 | |
755 | addp t2, t1, t2 | |
756 | move index, t3 | |
757 | storep t2, [a0, t3, 4] | |
758 | flushcp # Force constant pool flush to avoid "pcrel too far" link error. | |
759 | elsif MIPS | |
760 | crash() # Need to replace with code to turn label into and absolute address and save at index | |
761 | end | |
762 | end | |
763 | ||
764 | global _llint_entry | |
765 | # Entry point for the llint to initialize. | |
766 | _llint_entry: | |
767 | functionPrologue() | |
768 | pushCalleeSaves | |
769 | initPCRelative(t1) | |
770 | ||
771 | # Include generated bytecode initialization file. | |
772 | include InitBytecodes | |
773 | ||
774 | popCalleeSaves | |
775 | functionEpilogue() | |
776 | ret | |
777 | end | |
6fe7ccc8 A |
778 | |
779 | _llint_program_prologue: | |
780 | prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue) | |
781 | dispatch(0) | |
782 | ||
783 | ||
784 | _llint_eval_prologue: | |
785 | prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue) | |
786 | dispatch(0) | |
787 | ||
788 | ||
789 | _llint_function_for_call_prologue: | |
790 | prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call, _llint_trace_prologue_function_for_call) | |
6fe7ccc8 A |
791 | functionInitialization(0) |
792 | dispatch(0) | |
793 | ||
794 | ||
795 | _llint_function_for_construct_prologue: | |
796 | prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct, _llint_trace_prologue_function_for_construct) | |
6fe7ccc8 A |
797 | functionInitialization(1) |
798 | dispatch(0) | |
799 | ||
800 | ||
801 | _llint_function_for_call_arity_check: | |
802 | prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call_arityCheck, _llint_trace_arityCheck_for_call) | |
81345200 A |
803 | functionArityCheck(.functionForCallBegin, _slow_path_call_arityCheck) |
804 | .functionForCallBegin: | |
805 | functionInitialization(0) | |
806 | dispatch(0) | |
6fe7ccc8 A |
807 | |
808 | ||
809 | _llint_function_for_construct_arity_check: | |
810 | prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct_arityCheck, _llint_trace_arityCheck_for_construct) | |
81345200 A |
811 | functionArityCheck(.functionForConstructBegin, _slow_path_construct_arityCheck) |
812 | .functionForConstructBegin: | |
813 | functionInitialization(1) | |
814 | dispatch(0) | |
6fe7ccc8 A |
815 | |
816 | ||
817 | # Value-representation-specific code. | |
818 | if JSVALUE64 | |
819 | include LowLevelInterpreter64 | |
820 | else | |
821 | include LowLevelInterpreter32_64 | |
822 | end | |
823 | ||
824 | ||
825 | # Value-representation-agnostic code. | |
81345200 A |
826 | _llint_op_touch_entry: |
827 | traceExecution() | |
828 | callSlowPath(_slow_path_touch_entry) | |
829 | dispatch(1) | |
830 | ||
831 | ||
6fe7ccc8 A |
832 | _llint_op_new_array: |
833 | traceExecution() | |
834 | callSlowPath(_llint_slow_path_new_array) | |
93a37866 A |
835 | dispatch(5) |
836 | ||
837 | ||
838 | _llint_op_new_array_with_size: | |
839 | traceExecution() | |
840 | callSlowPath(_llint_slow_path_new_array_with_size) | |
6fe7ccc8 A |
841 | dispatch(4) |
842 | ||
843 | ||
844 | _llint_op_new_array_buffer: | |
845 | traceExecution() | |
846 | callSlowPath(_llint_slow_path_new_array_buffer) | |
93a37866 | 847 | dispatch(5) |
6fe7ccc8 A |
848 | |
849 | ||
850 | _llint_op_new_regexp: | |
851 | traceExecution() | |
852 | callSlowPath(_llint_slow_path_new_regexp) | |
853 | dispatch(3) | |
854 | ||
855 | ||
856 | _llint_op_less: | |
857 | traceExecution() | |
81345200 | 858 | callSlowPath(_slow_path_less) |
6fe7ccc8 A |
859 | dispatch(4) |
860 | ||
861 | ||
862 | _llint_op_lesseq: | |
863 | traceExecution() | |
81345200 | 864 | callSlowPath(_slow_path_lesseq) |
6fe7ccc8 A |
865 | dispatch(4) |
866 | ||
867 | ||
868 | _llint_op_greater: | |
869 | traceExecution() | |
81345200 | 870 | callSlowPath(_slow_path_greater) |
6fe7ccc8 A |
871 | dispatch(4) |
872 | ||
873 | ||
874 | _llint_op_greatereq: | |
875 | traceExecution() | |
81345200 | 876 | callSlowPath(_slow_path_greatereq) |
6fe7ccc8 A |
877 | dispatch(4) |
878 | ||
879 | ||
880 | _llint_op_mod: | |
881 | traceExecution() | |
81345200 | 882 | callSlowPath(_slow_path_mod) |
6fe7ccc8 A |
883 | dispatch(4) |
884 | ||
885 | ||
886 | _llint_op_typeof: | |
887 | traceExecution() | |
81345200 | 888 | callSlowPath(_slow_path_typeof) |
6fe7ccc8 A |
889 | dispatch(3) |
890 | ||
891 | ||
892 | _llint_op_is_object: | |
893 | traceExecution() | |
81345200 | 894 | callSlowPath(_slow_path_is_object) |
6fe7ccc8 A |
895 | dispatch(3) |
896 | ||
897 | ||
898 | _llint_op_is_function: | |
899 | traceExecution() | |
81345200 | 900 | callSlowPath(_slow_path_is_function) |
6fe7ccc8 A |
901 | dispatch(3) |
902 | ||
903 | ||
904 | _llint_op_in: | |
905 | traceExecution() | |
81345200 | 906 | callSlowPath(_slow_path_in) |
6fe7ccc8 A |
907 | dispatch(4) |
908 | ||
93a37866 A |
909 | macro withInlineStorage(object, propertyStorage, continuation) |
910 | # Indicate that the object is the property storage, and that the | |
911 | # property storage register is unused. | |
912 | continuation(object, propertyStorage) | |
913 | end | |
914 | ||
915 | macro withOutOfLineStorage(object, propertyStorage, continuation) | |
916 | loadp JSObject::m_butterfly[object], propertyStorage | |
917 | # Indicate that the propertyStorage register now points to the | |
918 | # property storage, and that the object register may be reused | |
919 | # if the object pointer is not needed anymore. | |
920 | continuation(propertyStorage, object) | |
921 | end | |
6fe7ccc8 A |
922 | |
923 | ||
924 | _llint_op_del_by_id: | |
925 | traceExecution() | |
926 | callSlowPath(_llint_slow_path_del_by_id) | |
927 | dispatch(4) | |
928 | ||
929 | ||
930 | _llint_op_del_by_val: | |
931 | traceExecution() | |
932 | callSlowPath(_llint_slow_path_del_by_val) | |
933 | dispatch(4) | |
934 | ||
935 | ||
936 | _llint_op_put_by_index: | |
937 | traceExecution() | |
938 | callSlowPath(_llint_slow_path_put_by_index) | |
939 | dispatch(4) | |
940 | ||
941 | ||
942 | _llint_op_put_getter_setter: | |
943 | traceExecution() | |
944 | callSlowPath(_llint_slow_path_put_getter_setter) | |
945 | dispatch(5) | |
946 | ||
947 | ||
6fe7ccc8 A |
948 | _llint_op_jtrue: |
949 | traceExecution() | |
950 | jumpTrueOrFalse( | |
951 | macro (value, target) btinz value, target end, | |
952 | _llint_slow_path_jtrue) | |
953 | ||
954 | ||
6fe7ccc8 A |
955 | _llint_op_jfalse: |
956 | traceExecution() | |
957 | jumpTrueOrFalse( | |
958 | macro (value, target) btiz value, target end, | |
959 | _llint_slow_path_jfalse) | |
960 | ||
961 | ||
6fe7ccc8 A |
962 | _llint_op_jless: |
963 | traceExecution() | |
964 | compare( | |
965 | macro (left, right, target) bilt left, right, target end, | |
966 | macro (left, right, target) bdlt left, right, target end, | |
967 | _llint_slow_path_jless) | |
968 | ||
969 | ||
970 | _llint_op_jnless: | |
971 | traceExecution() | |
972 | compare( | |
973 | macro (left, right, target) bigteq left, right, target end, | |
974 | macro (left, right, target) bdgtequn left, right, target end, | |
975 | _llint_slow_path_jnless) | |
976 | ||
977 | ||
6fe7ccc8 A |
978 | _llint_op_jgreater: |
979 | traceExecution() | |
980 | compare( | |
981 | macro (left, right, target) bigt left, right, target end, | |
982 | macro (left, right, target) bdgt left, right, target end, | |
983 | _llint_slow_path_jgreater) | |
984 | ||
985 | ||
986 | _llint_op_jngreater: | |
987 | traceExecution() | |
988 | compare( | |
989 | macro (left, right, target) bilteq left, right, target end, | |
990 | macro (left, right, target) bdltequn left, right, target end, | |
991 | _llint_slow_path_jngreater) | |
992 | ||
993 | ||
6fe7ccc8 A |
994 | _llint_op_jlesseq: |
995 | traceExecution() | |
996 | compare( | |
997 | macro (left, right, target) bilteq left, right, target end, | |
998 | macro (left, right, target) bdlteq left, right, target end, | |
999 | _llint_slow_path_jlesseq) | |
1000 | ||
1001 | ||
1002 | _llint_op_jnlesseq: | |
1003 | traceExecution() | |
1004 | compare( | |
1005 | macro (left, right, target) bigt left, right, target end, | |
1006 | macro (left, right, target) bdgtun left, right, target end, | |
1007 | _llint_slow_path_jnlesseq) | |
1008 | ||
1009 | ||
6fe7ccc8 A |
1010 | _llint_op_jgreatereq: |
1011 | traceExecution() | |
1012 | compare( | |
1013 | macro (left, right, target) bigteq left, right, target end, | |
1014 | macro (left, right, target) bdgteq left, right, target end, | |
1015 | _llint_slow_path_jgreatereq) | |
1016 | ||
1017 | ||
1018 | _llint_op_jngreatereq: | |
1019 | traceExecution() | |
1020 | compare( | |
1021 | macro (left, right, target) bilt left, right, target end, | |
1022 | macro (left, right, target) bdltun left, right, target end, | |
1023 | _llint_slow_path_jngreatereq) | |
1024 | ||
1025 | ||
1026 | _llint_op_loop_hint: | |
1027 | traceExecution() | |
81345200 A |
1028 | loadp CodeBlock[cfr], t1 |
1029 | loadp CodeBlock::m_vm[t1], t1 | |
93a37866 A |
1030 | loadb VM::watchdog+Watchdog::m_timerDidFire[t1], t0 |
1031 | btbnz t0, .handleWatchdogTimer | |
1032 | .afterWatchdogTimerCheck: | |
6fe7ccc8 A |
1033 | checkSwitchToJITForLoop() |
1034 | dispatch(1) | |
93a37866 A |
1035 | .handleWatchdogTimer: |
1036 | callWatchdogTimerHandler(.throwHandler) | |
1037 | jmp .afterWatchdogTimerCheck | |
1038 | .throwHandler: | |
1039 | jmp _llint_throw_from_slow_path_trampoline | |
6fe7ccc8 A |
1040 | |
1041 | _llint_op_switch_string: | |
1042 | traceExecution() | |
1043 | callSlowPath(_llint_slow_path_switch_string) | |
1044 | dispatch(0) | |
1045 | ||
1046 | ||
1047 | _llint_op_new_func_exp: | |
1048 | traceExecution() | |
1049 | callSlowPath(_llint_slow_path_new_func_exp) | |
1050 | dispatch(3) | |
1051 | ||
1052 | ||
1053 | _llint_op_call: | |
1054 | traceExecution() | |
93a37866 | 1055 | arrayProfileForCall() |
6fe7ccc8 A |
1056 | doCall(_llint_slow_path_call) |
1057 | ||
1058 | ||
1059 | _llint_op_construct: | |
1060 | traceExecution() | |
1061 | doCall(_llint_slow_path_construct) | |
1062 | ||
1063 | ||
1064 | _llint_op_call_varargs: | |
1065 | traceExecution() | |
81345200 A |
1066 | callSlowPath(_llint_slow_path_size_frame_for_varargs) |
1067 | branchIfException(_llint_throw_from_slow_path_trampoline) | |
1068 | # calleeFrame in t1 | |
1069 | if JSVALUE64 | |
1070 | move t1, sp | |
1071 | else | |
1072 | # The calleeFrame is not stack aligned, move down by CallerFrameAndPCSize to align | |
1073 | if ARMv7 | |
1074 | subp t1, CallerFrameAndPCSize, t2 | |
1075 | move t2, sp | |
1076 | else | |
1077 | subp t1, CallerFrameAndPCSize, sp | |
1078 | end | |
1079 | end | |
1080 | slowPathForCall(_llint_slow_path_call_varargs) | |
1081 | ||
1082 | _llint_op_construct_varargs: | |
1083 | traceExecution() | |
1084 | callSlowPath(_llint_slow_path_size_frame_for_varargs) | |
1085 | branchIfException(_llint_throw_from_slow_path_trampoline) | |
1086 | # calleeFrame in t1 | |
1087 | if JSVALUE64 | |
1088 | move t1, sp | |
1089 | else | |
1090 | # The calleeFrame is not stack aligned, move down by CallerFrameAndPCSize to align | |
1091 | if ARMv7 | |
1092 | subp t1, CallerFrameAndPCSize, t2 | |
1093 | move t2, sp | |
1094 | else | |
1095 | subp t1, CallerFrameAndPCSize, sp | |
1096 | end | |
1097 | end | |
1098 | slowPathForCall(_llint_slow_path_construct_varargs) | |
6fe7ccc8 A |
1099 | |
1100 | ||
1101 | _llint_op_call_eval: | |
1102 | traceExecution() | |
1103 | ||
1104 | # Eval is executed in one of two modes: | |
1105 | # | |
1106 | # 1) We find that we're really invoking eval() in which case the | |
1107 | # execution is perfomed entirely inside the slow_path, and it | |
1108 | # returns the PC of a function that just returns the return value | |
1109 | # that the eval returned. | |
1110 | # | |
1111 | # 2) We find that we're invoking something called eval() that is not | |
1112 | # the real eval. Then the slow_path returns the PC of the thing to | |
1113 | # call, and we call it. | |
1114 | # | |
1115 | # This allows us to handle two cases, which would require a total of | |
1116 | # up to four pieces of state that cannot be easily packed into two | |
1117 | # registers (C functions can return up to two registers, easily): | |
1118 | # | |
1119 | # - The call frame register. This may or may not have been modified | |
1120 | # by the slow_path, but the convention is that it returns it. It's not | |
1121 | # totally clear if that's necessary, since the cfr is callee save. | |
1122 | # But that's our style in this here interpreter so we stick with it. | |
1123 | # | |
1124 | # - A bit to say if the slow_path successfully executed the eval and has | |
1125 | # the return value, or did not execute the eval but has a PC for us | |
1126 | # to call. | |
1127 | # | |
1128 | # - Either: | |
1129 | # - The JS return value (two registers), or | |
1130 | # | |
1131 | # - The PC to call. | |
1132 | # | |
1133 | # It turns out to be easier to just always have this return the cfr | |
1134 | # and a PC to call, and that PC may be a dummy thunk that just | |
1135 | # returns the JS value that the eval returned. | |
1136 | ||
81345200 | 1137 | slowPathForCall(_llint_slow_path_call_eval) |
6fe7ccc8 A |
1138 | |
1139 | ||
1140 | _llint_generic_return_point: | |
1141 | dispatchAfterCall() | |
1142 | ||
1143 | ||
1144 | _llint_op_strcat: | |
1145 | traceExecution() | |
81345200 | 1146 | callSlowPath(_slow_path_strcat) |
6fe7ccc8 A |
1147 | dispatch(4) |
1148 | ||
1149 | ||
6fe7ccc8 A |
1150 | _llint_op_get_pnames: |
1151 | traceExecution() | |
1152 | callSlowPath(_llint_slow_path_get_pnames) | |
1153 | dispatch(0) # The slow_path either advances the PC or jumps us to somewhere else. | |
1154 | ||
1155 | ||
93a37866 | 1156 | _llint_op_push_with_scope: |
6fe7ccc8 | 1157 | traceExecution() |
93a37866 | 1158 | callSlowPath(_llint_slow_path_push_with_scope) |
6fe7ccc8 A |
1159 | dispatch(2) |
1160 | ||
1161 | ||
1162 | _llint_op_pop_scope: | |
1163 | traceExecution() | |
1164 | callSlowPath(_llint_slow_path_pop_scope) | |
1165 | dispatch(1) | |
1166 | ||
1167 | ||
93a37866 | 1168 | _llint_op_push_name_scope: |
6fe7ccc8 | 1169 | traceExecution() |
93a37866 | 1170 | callSlowPath(_llint_slow_path_push_name_scope) |
6fe7ccc8 A |
1171 | dispatch(4) |
1172 | ||
1173 | ||
1174 | _llint_op_throw: | |
1175 | traceExecution() | |
1176 | callSlowPath(_llint_slow_path_throw) | |
1177 | dispatch(2) | |
1178 | ||
1179 | ||
93a37866 | 1180 | _llint_op_throw_static_error: |
6fe7ccc8 | 1181 | traceExecution() |
93a37866 A |
1182 | callSlowPath(_llint_slow_path_throw_static_error) |
1183 | dispatch(3) | |
6fe7ccc8 A |
1184 | |
1185 | ||
1186 | _llint_op_profile_will_call: | |
1187 | traceExecution() | |
81345200 A |
1188 | loadp CodeBlock[cfr], t0 |
1189 | loadp CodeBlock::m_vm[t0], t0 | |
1190 | loadi VM::m_enabledProfiler[t0], t0 | |
1191 | btpz t0, .opProfilerWillCallDone | |
6fe7ccc8 | 1192 | callSlowPath(_llint_slow_path_profile_will_call) |
81345200 | 1193 | .opProfilerWillCallDone: |
6fe7ccc8 A |
1194 | dispatch(2) |
1195 | ||
1196 | ||
1197 | _llint_op_profile_did_call: | |
1198 | traceExecution() | |
81345200 A |
1199 | loadp CodeBlock[cfr], t0 |
1200 | loadp CodeBlock::m_vm[t0], t0 | |
1201 | loadi VM::m_enabledProfiler[t0], t0 | |
1202 | btpz t0, .opProfilerDidCallDone | |
6fe7ccc8 | 1203 | callSlowPath(_llint_slow_path_profile_did_call) |
81345200 | 1204 | .opProfilerDidCallDone: |
6fe7ccc8 A |
1205 | dispatch(2) |
1206 | ||
1207 | ||
1208 | _llint_op_debug: | |
1209 | traceExecution() | |
81345200 A |
1210 | loadp CodeBlock[cfr], t0 |
1211 | loadi CodeBlock::m_debuggerRequests[t0], t0 | |
1212 | btiz t0, .opDebugDone | |
6fe7ccc8 | 1213 | callSlowPath(_llint_slow_path_debug) |
81345200 A |
1214 | .opDebugDone: |
1215 | dispatch(3) | |
6fe7ccc8 A |
1216 | |
1217 | ||
1218 | _llint_native_call_trampoline: | |
1219 | nativeCallTrampoline(NativeExecutable::m_function) | |
1220 | ||
1221 | ||
1222 | _llint_native_construct_trampoline: | |
1223 | nativeCallTrampoline(NativeExecutable::m_constructor) | |
1224 | ||
1225 | ||
1226 | # Lastly, make sure that we can link even though we don't support all opcodes. | |
1227 | # These opcodes should never arise when using LLInt or either JIT. We assert | |
1228 | # as much. | |
1229 | ||
1230 | macro notSupported() | |
1231 | if ASSERT_ENABLED | |
1232 | crash() | |
1233 | else | |
1234 | # We should use whatever the smallest possible instruction is, just to | |
1235 | # ensure that there is a gap between instruction labels. If multiple | |
1236 | # smallest instructions exist, we should pick the one that is most | |
1237 | # likely result in execution being halted. Currently that is the break | |
1238 | # instruction on all architectures we're interested in. (Break is int3 | |
1239 | # on Intel, which is 1 byte, and bkpt on ARMv7, which is 2 bytes.) | |
1240 | break | |
1241 | end | |
1242 | end | |
1243 | ||
93a37866 A |
1244 | _llint_op_init_global_const_nop: |
1245 | dispatch(5) | |
6fe7ccc8 | 1246 |