]>
Commit | Line | Data |
---|---|---|
6fe7ccc8 A |
1 | # Copyright (C) 2011, 2012 Apple Inc. All rights reserved. |
2 | # | |
3 | # Redistribution and use in source and binary forms, with or without | |
4 | # modification, are permitted provided that the following conditions | |
5 | # are met: | |
6 | # 1. Redistributions of source code must retain the above copyright | |
7 | # notice, this list of conditions and the following disclaimer. | |
8 | # 2. Redistributions in binary form must reproduce the above copyright | |
9 | # notice, this list of conditions and the following disclaimer in the | |
10 | # documentation and/or other materials provided with the distribution. | |
11 | # | |
12 | # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' | |
13 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, | |
14 | # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
15 | # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS | |
16 | # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
17 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
18 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
19 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
20 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
21 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF | |
22 | # THE POSSIBILITY OF SUCH DAMAGE. | |
23 | ||
93a37866 A |
24 | # First come the common protocols that both interpreters use. Note that each |
25 | # of these must have an ASSERT() in LLIntData.cpp | |
26 | ||
6fe7ccc8 A |
27 | # Work-around for the fact that the toolchain's awareness of armv7s results in |
28 | # a separate slab in the fat binary, yet the offlineasm doesn't know to expect | |
29 | # it. | |
30 | if ARMv7s | |
31 | end | |
32 | ||
93a37866 | 33 | # These declarations must match interpreter/JSStack.h. |
6fe7ccc8 A |
34 | const CallFrameHeaderSize = 48 |
35 | const ArgumentCount = -48 | |
36 | const CallerFrame = -40 | |
37 | const Callee = -32 | |
38 | const ScopeChain = -24 | |
39 | const ReturnPC = -16 | |
40 | const CodeBlock = -8 | |
41 | ||
42 | const ThisArgumentOffset = -CallFrameHeaderSize - 8 | |
43 | ||
44 | # Some register conventions. | |
45 | if JSVALUE64 | |
46 | # - Use a pair of registers to represent the PC: one register for the | |
93a37866 | 47 | # base of the stack, and one register for the index. |
6fe7ccc8 A |
48 | # - The PC base (or PB for short) should be stored in the csr. It will |
49 | # get clobbered on calls to other JS code, but will get saved on calls | |
50 | # to C functions. | |
51 | # - C calls are still given the Instruction* rather than the PC index. | |
52 | # This requires an add before the call, and a sub after. | |
53 | const PC = t4 | |
54 | const PB = t6 | |
55 | const tagTypeNumber = csr1 | |
56 | const tagMask = csr2 | |
93a37866 A |
57 | |
58 | macro loadisFromInstruction(offset, dest) | |
59 | loadis offset * 8[PB, PC, 8], dest | |
60 | end | |
61 | ||
62 | macro loadpFromInstruction(offset, dest) | |
63 | loadp offset * 8[PB, PC, 8], dest | |
64 | end | |
65 | ||
66 | macro storepToInstruction(value, offset) | |
67 | storep value, offset * 8[PB, PC, 8] | |
68 | end | |
69 | ||
6fe7ccc8 A |
70 | else |
71 | const PC = t4 | |
93a37866 A |
72 | macro loadisFromInstruction(offset, dest) |
73 | loadis offset * 4[PC], dest | |
74 | end | |
75 | ||
76 | macro loadpFromInstruction(offset, dest) | |
77 | loadp offset * 4[PC], dest | |
78 | end | |
6fe7ccc8 A |
79 | end |
80 | ||
81 | # Constants for reasoning about value representation. | |
82 | if BIG_ENDIAN | |
83 | const TagOffset = 0 | |
84 | const PayloadOffset = 4 | |
85 | else | |
86 | const TagOffset = 4 | |
87 | const PayloadOffset = 0 | |
88 | end | |
89 | ||
93a37866 A |
90 | # Constant for reasoning about butterflies. |
91 | const IsArray = 1 | |
92 | const IndexingShapeMask = 30 | |
93 | const NoIndexingShape = 0 | |
94 | const Int32Shape = 20 | |
95 | const DoubleShape = 22 | |
96 | const ContiguousShape = 26 | |
97 | const ArrayStorageShape = 28 | |
98 | const SlowPutArrayStorageShape = 30 | |
99 | ||
6fe7ccc8 A |
100 | # Type constants. |
101 | const StringType = 5 | |
93a37866 | 102 | const ObjectType = 17 |
6fe7ccc8 A |
103 | |
104 | # Type flags constants. | |
105 | const MasqueradesAsUndefined = 1 | |
106 | const ImplementsHasInstance = 2 | |
107 | const ImplementsDefaultHasInstance = 8 | |
108 | ||
109 | # Bytecode operand constants. | |
110 | const FirstConstantRegisterIndex = 0x40000000 | |
111 | ||
112 | # Code type constants. | |
113 | const GlobalCode = 0 | |
114 | const EvalCode = 1 | |
115 | const FunctionCode = 2 | |
116 | ||
117 | # The interpreter steals the tag word of the argument count. | |
118 | const LLIntReturnPC = ArgumentCount + TagOffset | |
119 | ||
120 | # String flags. | |
121 | const HashFlags8BitBuffer = 64 | |
122 | ||
93a37866 A |
123 | # Copied from PropertyOffset.h |
124 | const firstOutOfLineOffset = 100 | |
125 | ||
126 | # From ResolveOperations.h | |
127 | const ResolveOperationFail = 0 | |
128 | const ResolveOperationSetBaseToUndefined = 1 | |
129 | const ResolveOperationReturnScopeAsBase = 2 | |
130 | const ResolveOperationSetBaseToScope = 3 | |
131 | const ResolveOperationSetBaseToGlobal = 4 | |
132 | const ResolveOperationGetAndReturnScopedVar = 5 | |
133 | const ResolveOperationGetAndReturnGlobalVar = 6 | |
134 | const ResolveOperationGetAndReturnGlobalVarWatchable = 7 | |
135 | const ResolveOperationSkipTopScopeNode = 8 | |
136 | const ResolveOperationSkipScopes = 9 | |
137 | const ResolveOperationReturnGlobalObjectAsBase = 10 | |
138 | const ResolveOperationGetAndReturnGlobalProperty = 11 | |
139 | const ResolveOperationCheckForDynamicEntriesBeforeGlobalScope = 12 | |
140 | ||
141 | const PutToBaseOperationKindUninitialised = 0 | |
142 | const PutToBaseOperationKindGeneric = 1 | |
143 | const PutToBaseOperationKindReadonly = 2 | |
144 | const PutToBaseOperationKindGlobalVariablePut = 3 | |
145 | const PutToBaseOperationKindGlobalVariablePutChecked = 4 | |
146 | const PutToBaseOperationKindGlobalPropertyPut = 5 | |
147 | const PutToBaseOperationKindVariablePut = 6 | |
148 | ||
6fe7ccc8 A |
149 | # Allocation constants |
150 | if JSVALUE64 | |
151 | const JSFinalObjectSizeClassIndex = 1 | |
152 | else | |
153 | const JSFinalObjectSizeClassIndex = 3 | |
154 | end | |
155 | ||
156 | # This must match wtf/Vector.h | |
93a37866 | 157 | const VectorBufferOffset = 0 |
6fe7ccc8 | 158 | if JSVALUE64 |
93a37866 | 159 | const VectorSizeOffset = 12 |
6fe7ccc8 | 160 | else |
93a37866 | 161 | const VectorSizeOffset = 8 |
6fe7ccc8 A |
162 | end |
163 | ||
164 | ||
165 | # Some common utilities. | |
166 | macro crash() | |
93a37866 A |
167 | if C_LOOP |
168 | cloopCrash | |
169 | else | |
170 | storei t0, 0xbbadbeef[] | |
171 | move 0, t0 | |
172 | call t0 | |
173 | end | |
6fe7ccc8 A |
174 | end |
175 | ||
176 | macro assert(assertion) | |
177 | if ASSERT_ENABLED | |
178 | assertion(.ok) | |
179 | crash() | |
180 | .ok: | |
181 | end | |
182 | end | |
183 | ||
184 | macro preserveReturnAddressAfterCall(destinationRegister) | |
93a37866 A |
185 | if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS |
186 | # In C_LOOP case, we're only preserving the bytecode vPC. | |
6fe7ccc8 | 187 | move lr, destinationRegister |
93a37866 A |
188 | elsif SH4 |
189 | stspr destinationRegister | |
6fe7ccc8 A |
190 | elsif X86 or X86_64 |
191 | pop destinationRegister | |
192 | else | |
193 | error | |
194 | end | |
195 | end | |
196 | ||
197 | macro restoreReturnAddressBeforeReturn(sourceRegister) | |
93a37866 A |
198 | if C_LOOP or ARM or ARMv7 or ARMv7_TRADITIONAL or ARM64 or MIPS |
199 | # In C_LOOP case, we're only restoring the bytecode vPC. | |
6fe7ccc8 | 200 | move sourceRegister, lr |
93a37866 A |
201 | elsif SH4 |
202 | ldspr sourceRegister | |
6fe7ccc8 A |
203 | elsif X86 or X86_64 |
204 | push sourceRegister | |
205 | else | |
206 | error | |
207 | end | |
208 | end | |
209 | ||
210 | macro traceExecution() | |
211 | if EXECUTION_TRACING | |
212 | callSlowPath(_llint_trace) | |
213 | end | |
214 | end | |
215 | ||
93a37866 A |
216 | macro callTargetFunction(callLinkInfo) |
217 | if C_LOOP | |
218 | cloopCallJSFunction LLIntCallLinkInfo::machineCodeTarget[callLinkInfo] | |
219 | else | |
220 | call LLIntCallLinkInfo::machineCodeTarget[callLinkInfo] | |
221 | dispatchAfterCall() | |
222 | end | |
223 | end | |
224 | ||
6fe7ccc8 A |
225 | macro slowPathForCall(advance, slowPath) |
226 | callCallSlowPath( | |
227 | advance, | |
228 | slowPath, | |
229 | macro (callee) | |
93a37866 A |
230 | if C_LOOP |
231 | cloopCallJSFunction callee | |
232 | else | |
233 | call callee | |
234 | dispatchAfterCall() | |
235 | end | |
6fe7ccc8 A |
236 | end) |
237 | end | |
238 | ||
93a37866 A |
239 | macro arrayProfile(structureAndIndexingType, profile, scratch) |
240 | const structure = structureAndIndexingType | |
241 | const indexingType = structureAndIndexingType | |
242 | if VALUE_PROFILER | |
243 | storep structure, ArrayProfile::m_lastSeenStructure[profile] | |
244 | end | |
245 | loadb Structure::m_indexingType[structure], indexingType | |
246 | end | |
247 | ||
6fe7ccc8 A |
248 | macro checkSwitchToJIT(increment, action) |
249 | if JIT_ENABLED | |
250 | loadp CodeBlock[cfr], t0 | |
251 | baddis increment, CodeBlock::m_llintExecuteCounter + ExecutionCounter::m_counter[t0], .continue | |
252 | action() | |
253 | .continue: | |
254 | end | |
255 | end | |
256 | ||
257 | macro checkSwitchToJITForEpilogue() | |
258 | checkSwitchToJIT( | |
259 | 10, | |
260 | macro () | |
261 | callSlowPath(_llint_replace) | |
262 | end) | |
263 | end | |
264 | ||
265 | macro assertNotConstant(index) | |
266 | assert(macro (ok) bilt index, FirstConstantRegisterIndex, ok end) | |
267 | end | |
268 | ||
269 | macro functionForCallCodeBlockGetter(targetRegister) | |
270 | loadp Callee[cfr], targetRegister | |
271 | loadp JSFunction::m_executable[targetRegister], targetRegister | |
272 | loadp FunctionExecutable::m_codeBlockForCall[targetRegister], targetRegister | |
273 | end | |
274 | ||
275 | macro functionForConstructCodeBlockGetter(targetRegister) | |
276 | loadp Callee[cfr], targetRegister | |
277 | loadp JSFunction::m_executable[targetRegister], targetRegister | |
278 | loadp FunctionExecutable::m_codeBlockForConstruct[targetRegister], targetRegister | |
279 | end | |
280 | ||
281 | macro notFunctionCodeBlockGetter(targetRegister) | |
282 | loadp CodeBlock[cfr], targetRegister | |
283 | end | |
284 | ||
285 | macro functionCodeBlockSetter(sourceRegister) | |
286 | storep sourceRegister, CodeBlock[cfr] | |
287 | end | |
288 | ||
289 | macro notFunctionCodeBlockSetter(sourceRegister) | |
290 | # Nothing to do! | |
291 | end | |
292 | ||
293 | # Do the bare minimum required to execute code. Sets up the PC, leave the CodeBlock* | |
294 | # in t1. May also trigger prologue entry OSR. | |
295 | macro prologue(codeBlockGetter, codeBlockSetter, osrSlowPath, traceSlowPath) | |
296 | preserveReturnAddressAfterCall(t2) | |
297 | ||
298 | # Set up the call frame and check if we should OSR. | |
299 | storep t2, ReturnPC[cfr] | |
300 | if EXECUTION_TRACING | |
301 | callSlowPath(traceSlowPath) | |
302 | end | |
303 | codeBlockGetter(t1) | |
304 | if JIT_ENABLED | |
305 | baddis 5, CodeBlock::m_llintExecuteCounter + ExecutionCounter::m_counter[t1], .continue | |
306 | cCall2(osrSlowPath, cfr, PC) | |
307 | move t1, cfr | |
308 | btpz t0, .recover | |
309 | loadp ReturnPC[cfr], t2 | |
310 | restoreReturnAddressBeforeReturn(t2) | |
311 | jmp t0 | |
312 | .recover: | |
313 | codeBlockGetter(t1) | |
314 | .continue: | |
315 | end | |
316 | codeBlockSetter(t1) | |
317 | ||
318 | # Set up the PC. | |
319 | if JSVALUE64 | |
320 | loadp CodeBlock::m_instructions[t1], PB | |
321 | move 0, PC | |
322 | else | |
323 | loadp CodeBlock::m_instructions[t1], PC | |
324 | end | |
325 | end | |
326 | ||
327 | # Expects that CodeBlock is in t1, which is what prologue() leaves behind. | |
328 | # Must call dispatch(0) after calling this. | |
329 | macro functionInitialization(profileArgSkip) | |
330 | if VALUE_PROFILER | |
331 | # Profile the arguments. Unfortunately, we have no choice but to do this. This | |
332 | # code is pretty horrendous because of the difference in ordering between | |
333 | # arguments and value profiles, the desire to have a simple loop-down-to-zero | |
334 | # loop, and the desire to use only three registers so as to preserve the PC and | |
335 | # the code block. It is likely that this code should be rewritten in a more | |
336 | # optimal way for architectures that have more than five registers available | |
337 | # for arbitrary use in the interpreter. | |
338 | loadi CodeBlock::m_numParameters[t1], t0 | |
339 | addp -profileArgSkip, t0 # Use addi because that's what has the peephole | |
340 | assert(macro (ok) bpgteq t0, 0, ok end) | |
341 | btpz t0, .argumentProfileDone | |
342 | loadp CodeBlock::m_argumentValueProfiles + VectorBufferOffset[t1], t3 | |
343 | mulp sizeof ValueProfile, t0, t2 # Aaaaahhhh! Need strength reduction! | |
344 | negp t0 | |
345 | lshiftp 3, t0 | |
346 | addp t2, t3 | |
347 | .argumentProfileLoop: | |
348 | if JSVALUE64 | |
93a37866 | 349 | loadq ThisArgumentOffset + 8 - profileArgSkip * 8[cfr, t0], t2 |
6fe7ccc8 | 350 | subp sizeof ValueProfile, t3 |
93a37866 | 351 | storeq t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets[t3] |
6fe7ccc8 A |
352 | else |
353 | loadi ThisArgumentOffset + TagOffset + 8 - profileArgSkip * 8[cfr, t0], t2 | |
354 | subp sizeof ValueProfile, t3 | |
355 | storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + TagOffset[t3] | |
356 | loadi ThisArgumentOffset + PayloadOffset + 8 - profileArgSkip * 8[cfr, t0], t2 | |
357 | storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + PayloadOffset[t3] | |
358 | end | |
359 | baddpnz 8, t0, .argumentProfileLoop | |
360 | .argumentProfileDone: | |
361 | end | |
362 | ||
363 | # Check stack height. | |
364 | loadi CodeBlock::m_numCalleeRegisters[t1], t0 | |
93a37866 A |
365 | loadp CodeBlock::m_vm[t1], t2 |
366 | loadp VM::interpreter[t2], t2 # FIXME: Can get to the JSStack from the JITStackFrame | |
6fe7ccc8 A |
367 | lshifti 3, t0 |
368 | addp t0, cfr, t0 | |
93a37866 | 369 | bpaeq Interpreter::m_stack + JSStack::m_end[t2], t0, .stackHeightOK |
6fe7ccc8 A |
370 | |
371 | # Stack height check failed - need to call a slow_path. | |
93a37866 | 372 | callSlowPath(_llint_stack_check) |
6fe7ccc8 A |
373 | .stackHeightOK: |
374 | end | |
375 | ||
93a37866 | 376 | macro allocateJSObject(allocator, structure, result, scratch1, slowCase) |
6fe7ccc8 A |
377 | if ALWAYS_ALLOCATE_SLOW |
378 | jmp slowCase | |
379 | else | |
6fe7ccc8 A |
380 | const offsetOfFirstFreeCell = |
381 | MarkedAllocator::m_freeList + | |
382 | MarkedBlock::FreeList::head | |
383 | ||
6fe7ccc8 | 384 | # Get the object from the free list. |
93a37866 | 385 | loadp offsetOfFirstFreeCell[allocator], result |
6fe7ccc8 A |
386 | btpz result, slowCase |
387 | ||
388 | # Remove the object from the free list. | |
93a37866 A |
389 | loadp [result], scratch1 |
390 | storep scratch1, offsetOfFirstFreeCell[allocator] | |
6fe7ccc8 A |
391 | |
392 | # Initialize the object. | |
6fe7ccc8 | 393 | storep structure, JSCell::m_structure[result] |
93a37866 | 394 | storep 0, JSObject::m_butterfly[result] |
6fe7ccc8 A |
395 | end |
396 | end | |
397 | ||
398 | macro doReturn() | |
399 | loadp ReturnPC[cfr], t2 | |
400 | loadp CallerFrame[cfr], cfr | |
401 | restoreReturnAddressBeforeReturn(t2) | |
402 | ret | |
403 | end | |
404 | ||
405 | ||
406 | # Indicate the beginning of LLInt. | |
407 | _llint_begin: | |
408 | crash() | |
409 | ||
410 | ||
411 | _llint_program_prologue: | |
412 | prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue) | |
413 | dispatch(0) | |
414 | ||
415 | ||
416 | _llint_eval_prologue: | |
417 | prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue) | |
418 | dispatch(0) | |
419 | ||
420 | ||
421 | _llint_function_for_call_prologue: | |
422 | prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call, _llint_trace_prologue_function_for_call) | |
423 | .functionForCallBegin: | |
424 | functionInitialization(0) | |
425 | dispatch(0) | |
426 | ||
427 | ||
428 | _llint_function_for_construct_prologue: | |
429 | prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct, _llint_trace_prologue_function_for_construct) | |
430 | .functionForConstructBegin: | |
431 | functionInitialization(1) | |
432 | dispatch(0) | |
433 | ||
434 | ||
435 | _llint_function_for_call_arity_check: | |
436 | prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call_arityCheck, _llint_trace_arityCheck_for_call) | |
437 | functionArityCheck(.functionForCallBegin, _llint_slow_path_call_arityCheck) | |
438 | ||
439 | ||
440 | _llint_function_for_construct_arity_check: | |
441 | prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct_arityCheck, _llint_trace_arityCheck_for_construct) | |
442 | functionArityCheck(.functionForConstructBegin, _llint_slow_path_construct_arityCheck) | |
443 | ||
444 | ||
445 | # Value-representation-specific code. | |
446 | if JSVALUE64 | |
447 | include LowLevelInterpreter64 | |
448 | else | |
449 | include LowLevelInterpreter32_64 | |
450 | end | |
451 | ||
452 | ||
453 | # Value-representation-agnostic code. | |
454 | _llint_op_new_array: | |
455 | traceExecution() | |
456 | callSlowPath(_llint_slow_path_new_array) | |
93a37866 A |
457 | dispatch(5) |
458 | ||
459 | ||
460 | _llint_op_new_array_with_size: | |
461 | traceExecution() | |
462 | callSlowPath(_llint_slow_path_new_array_with_size) | |
6fe7ccc8 A |
463 | dispatch(4) |
464 | ||
465 | ||
466 | _llint_op_new_array_buffer: | |
467 | traceExecution() | |
468 | callSlowPath(_llint_slow_path_new_array_buffer) | |
93a37866 | 469 | dispatch(5) |
6fe7ccc8 A |
470 | |
471 | ||
472 | _llint_op_new_regexp: | |
473 | traceExecution() | |
474 | callSlowPath(_llint_slow_path_new_regexp) | |
475 | dispatch(3) | |
476 | ||
477 | ||
478 | _llint_op_less: | |
479 | traceExecution() | |
480 | callSlowPath(_llint_slow_path_less) | |
481 | dispatch(4) | |
482 | ||
483 | ||
484 | _llint_op_lesseq: | |
485 | traceExecution() | |
486 | callSlowPath(_llint_slow_path_lesseq) | |
487 | dispatch(4) | |
488 | ||
489 | ||
490 | _llint_op_greater: | |
491 | traceExecution() | |
492 | callSlowPath(_llint_slow_path_greater) | |
493 | dispatch(4) | |
494 | ||
495 | ||
496 | _llint_op_greatereq: | |
497 | traceExecution() | |
498 | callSlowPath(_llint_slow_path_greatereq) | |
499 | dispatch(4) | |
500 | ||
501 | ||
502 | _llint_op_mod: | |
503 | traceExecution() | |
504 | callSlowPath(_llint_slow_path_mod) | |
505 | dispatch(4) | |
506 | ||
507 | ||
508 | _llint_op_typeof: | |
509 | traceExecution() | |
510 | callSlowPath(_llint_slow_path_typeof) | |
511 | dispatch(3) | |
512 | ||
513 | ||
514 | _llint_op_is_object: | |
515 | traceExecution() | |
516 | callSlowPath(_llint_slow_path_is_object) | |
517 | dispatch(3) | |
518 | ||
519 | ||
520 | _llint_op_is_function: | |
521 | traceExecution() | |
522 | callSlowPath(_llint_slow_path_is_function) | |
523 | dispatch(3) | |
524 | ||
525 | ||
526 | _llint_op_in: | |
527 | traceExecution() | |
528 | callSlowPath(_llint_slow_path_in) | |
529 | dispatch(4) | |
530 | ||
93a37866 A |
531 | macro getPutToBaseOperationField(scratch, scratch1, fieldOffset, fieldGetter) |
532 | loadpFromInstruction(4, scratch) | |
533 | fieldGetter(fieldOffset[scratch]) | |
534 | end | |
535 | ||
536 | macro moveJSValueFromRegisterWithoutProfiling(value, destBuffer, destOffsetReg) | |
537 | storeq value, [destBuffer, destOffsetReg, 8] | |
538 | end | |
539 | ||
540 | ||
541 | macro moveJSValueFromRegistersWithoutProfiling(tag, payload, destBuffer, destOffsetReg) | |
542 | storei tag, TagOffset[destBuffer, destOffsetReg, 8] | |
543 | storei payload, PayloadOffset[destBuffer, destOffsetReg, 8] | |
544 | end | |
545 | ||
546 | macro putToBaseVariableBody(variableOffset, scratch1, scratch2, scratch3) | |
547 | loadisFromInstruction(1, scratch1) | |
548 | loadp PayloadOffset[cfr, scratch1, 8], scratch1 | |
549 | loadp JSVariableObject::m_registers[scratch1], scratch1 | |
550 | loadisFromInstruction(3, scratch2) | |
551 | if JSVALUE64 | |
552 | loadConstantOrVariable(scratch2, scratch3) | |
553 | moveJSValueFromRegisterWithoutProfiling(scratch3, scratch1, variableOffset) | |
554 | else | |
555 | loadConstantOrVariable2Reg(scratch2, scratch3, scratch2) # scratch3=tag, scratch2=payload | |
556 | moveJSValueFromRegistersWithoutProfiling(scratch3, scratch2, scratch1, variableOffset) | |
557 | end | |
558 | end | |
559 | ||
560 | _llint_op_put_to_base_variable: | |
561 | traceExecution() | |
562 | getPutToBaseOperationField(t0, t1, PutToBaseOperation::m_offset, macro(addr) | |
563 | loadis addr, t0 | |
564 | end) | |
565 | putToBaseVariableBody(t0, t1, t2, t3) | |
566 | dispatch(5) | |
567 | ||
568 | _llint_op_put_to_base: | |
569 | traceExecution() | |
570 | getPutToBaseOperationField(t0, t1, 0, macro(addr) | |
571 | leap addr, t0 | |
572 | bbneq PutToBaseOperation::m_kindAsUint8[t0], PutToBaseOperationKindVariablePut, .notPutToBaseVariable | |
573 | loadis PutToBaseOperation::m_offset[t0], t0 | |
574 | putToBaseVariableBody(t0, t1, t2, t3) | |
575 | dispatch(5) | |
576 | .notPutToBaseVariable: | |
577 | end) | |
578 | callSlowPath(_llint_slow_path_put_to_base) | |
579 | dispatch(5) | |
580 | ||
581 | macro getResolveOperation(resolveOperationIndex, dest) | |
582 | loadpFromInstruction(resolveOperationIndex, dest) | |
583 | loadp VectorBufferOffset[dest], dest | |
584 | end | |
585 | ||
586 | macro getScope(loadInitialScope, scopeCount, dest, scratch) | |
587 | loadInitialScope(dest) | |
588 | loadi scopeCount, scratch | |
589 | ||
590 | btiz scratch, .done | |
591 | .loop: | |
592 | loadp JSScope::m_next[dest], dest | |
593 | subi 1, scratch | |
594 | btinz scratch, .loop | |
595 | ||
596 | .done: | |
597 | end | |
598 | ||
599 | macro moveJSValue(sourceBuffer, sourceOffsetReg, destBuffer, destOffsetReg, profileOffset, scratchRegister) | |
600 | if JSVALUE64 | |
601 | loadq [sourceBuffer, sourceOffsetReg, 8], scratchRegister | |
602 | storeq scratchRegister, [destBuffer, destOffsetReg, 8] | |
603 | loadpFromInstruction(profileOffset, destOffsetReg) | |
604 | valueProfile(scratchRegister, destOffsetReg) | |
605 | else | |
606 | loadi PayloadOffset[sourceBuffer, sourceOffsetReg, 8], scratchRegister | |
607 | storei scratchRegister, PayloadOffset[destBuffer, destOffsetReg, 8] | |
608 | loadi TagOffset[sourceBuffer, sourceOffsetReg, 8], sourceOffsetReg | |
609 | storei sourceOffsetReg, TagOffset[destBuffer, destOffsetReg, 8] | |
610 | loadpFromInstruction(profileOffset, destOffsetReg) | |
611 | valueProfile(sourceOffsetReg, scratchRegister, destOffsetReg) | |
612 | end | |
613 | end | |
614 | ||
615 | macro moveJSValueFromSlot(slot, destBuffer, destOffsetReg, profileOffset, scratchRegister) | |
616 | if JSVALUE64 | |
617 | loadq [slot], scratchRegister | |
618 | storeq scratchRegister, [destBuffer, destOffsetReg, 8] | |
619 | loadpFromInstruction(profileOffset, destOffsetReg) | |
620 | valueProfile(scratchRegister, destOffsetReg) | |
621 | else | |
622 | loadi PayloadOffset[slot], scratchRegister | |
623 | storei scratchRegister, PayloadOffset[destBuffer, destOffsetReg, 8] | |
624 | loadi TagOffset[slot], slot | |
625 | storei slot, TagOffset[destBuffer, destOffsetReg, 8] | |
626 | loadpFromInstruction(profileOffset, destOffsetReg) | |
627 | valueProfile(slot, scratchRegister, destOffsetReg) | |
628 | end | |
629 | end | |
630 | ||
631 | macro moveJSValueFromRegister(value, destBuffer, destOffsetReg, profileOffset) | |
632 | storeq value, [destBuffer, destOffsetReg, 8] | |
633 | loadpFromInstruction(profileOffset, destOffsetReg) | |
634 | valueProfile(value, destOffsetReg) | |
635 | end | |
636 | ||
637 | macro moveJSValueFromRegisters(tag, payload, destBuffer, destOffsetReg, profileOffset) | |
638 | storei tag, TagOffset[destBuffer, destOffsetReg, 8] | |
639 | storei payload, PayloadOffset[destBuffer, destOffsetReg, 8] | |
640 | loadpFromInstruction(profileOffset, destOffsetReg) | |
641 | valueProfile(tag, payload, destOffsetReg) | |
642 | end | |
643 | ||
644 | _llint_op_resolve_global_property: | |
645 | traceExecution() | |
646 | getResolveOperation(3, t0) | |
647 | loadp CodeBlock[cfr], t1 | |
648 | loadp CodeBlock::m_globalObject[t1], t1 | |
649 | loadp ResolveOperation::m_structure[t0], t2 | |
650 | bpneq JSCell::m_structure[t1], t2, .llint_op_resolve_local | |
651 | loadis ResolveOperation::m_offset[t0], t0 | |
652 | if JSVALUE64 | |
653 | loadPropertyAtVariableOffsetKnownNotInline(t0, t1, t2) | |
654 | loadisFromInstruction(1, t0) | |
655 | moveJSValueFromRegister(t2, cfr, t0, 4) | |
656 | else | |
657 | loadPropertyAtVariableOffsetKnownNotInline(t0, t1, t2, t3) | |
658 | loadisFromInstruction(1, t0) | |
659 | moveJSValueFromRegisters(t2, t3, cfr, t0, 4) | |
660 | end | |
661 | dispatch(5) | |
662 | ||
663 | _llint_op_resolve_global_var: | |
664 | traceExecution() | |
665 | getResolveOperation(3, t0) | |
666 | loadp ResolveOperation::m_registerAddress[t0], t0 | |
667 | loadisFromInstruction(1, t1) | |
668 | moveJSValueFromSlot(t0, cfr, t1, 4, t3) | |
669 | dispatch(5) | |
670 | ||
671 | macro resolveScopedVarBody(resolveOperations) | |
672 | # First ResolveOperation is to skip scope chain nodes | |
673 | getScope(macro(dest) | |
674 | loadp ScopeChain + PayloadOffset[cfr], dest | |
675 | end, | |
676 | ResolveOperation::m_scopesToSkip[resolveOperations], t1, t2) | |
677 | loadp JSVariableObject::m_registers[t1], t1 # t1 now contains the activation registers | |
678 | ||
679 | # Second ResolveOperation tells us what offset to use | |
680 | loadis ResolveOperation::m_offset + sizeof ResolveOperation[resolveOperations], t2 | |
681 | loadisFromInstruction(1, t3) | |
682 | moveJSValue(t1, t2, cfr, t3, 4, t0) | |
683 | end | |
684 | ||
685 | _llint_op_resolve_scoped_var: | |
686 | traceExecution() | |
687 | getResolveOperation(3, t0) | |
688 | resolveScopedVarBody(t0) | |
689 | dispatch(5) | |
690 | ||
691 | _llint_op_resolve_scoped_var_on_top_scope: | |
692 | traceExecution() | |
693 | getResolveOperation(3, t0) | |
694 | ||
695 | # Load destination index | |
696 | loadisFromInstruction(1, t3) | |
697 | ||
698 | # We know we want the top scope chain entry | |
699 | loadp ScopeChain + PayloadOffset[cfr], t1 | |
700 | loadp JSVariableObject::m_registers[t1], t1 # t1 now contains the activation registers | |
701 | ||
702 | # Second ResolveOperation tells us what offset to use | |
703 | loadis ResolveOperation::m_offset + sizeof ResolveOperation[t0], t2 | |
704 | ||
705 | moveJSValue(t1, t2, cfr, t3, 4, t0) | |
706 | dispatch(5) | |
707 | ||
708 | _llint_op_resolve_scoped_var_with_top_scope_check: | |
709 | traceExecution() | |
710 | getResolveOperation(3, t0) | |
711 | # First ResolveOperation tells us what register to check | |
712 | loadis ResolveOperation::m_activationRegister[t0], t1 | |
713 | ||
714 | loadp PayloadOffset[cfr, t1, 8], t1 | |
715 | ||
716 | getScope(macro(dest) | |
717 | btpz t1, .scopeChainNotCreated | |
718 | loadp JSScope::m_next[t1], dest | |
719 | jmp .done | |
720 | .scopeChainNotCreated: | |
721 | loadp ScopeChain + PayloadOffset[cfr], dest | |
722 | .done: | |
723 | end, | |
724 | # Second ResolveOperation tells us how many more nodes to skip | |
725 | ResolveOperation::m_scopesToSkip + sizeof ResolveOperation[t0], t1, t2) | |
726 | loadp JSVariableObject::m_registers[t1], t1 # t1 now contains the activation registers | |
727 | ||
728 | # Third operation tells us what offset to use | |
729 | loadis ResolveOperation::m_offset + 2 * sizeof ResolveOperation[t0], t2 | |
730 | loadisFromInstruction(1, t3) | |
731 | moveJSValue(t1, t2, cfr, t3, 4, t0) | |
732 | dispatch(5) | |
6fe7ccc8 A |
733 | |
734 | _llint_op_resolve: | |
93a37866 | 735 | .llint_op_resolve_local: |
6fe7ccc8 | 736 | traceExecution() |
93a37866 A |
737 | getResolveOperation(3, t0) |
738 | btpz t0, .noInstructions | |
739 | loadis ResolveOperation::m_operation[t0], t1 | |
740 | bineq t1, ResolveOperationSkipScopes, .notSkipScopes | |
741 | resolveScopedVarBody(t0) | |
742 | dispatch(5) | |
743 | .notSkipScopes: | |
744 | bineq t1, ResolveOperationGetAndReturnGlobalVar, .notGetAndReturnGlobalVar | |
745 | loadp ResolveOperation::m_registerAddress[t0], t0 | |
746 | loadisFromInstruction(1, t1) | |
747 | moveJSValueFromSlot(t0, cfr, t1, 4, t3) | |
748 | dispatch(5) | |
749 | .notGetAndReturnGlobalVar: | |
750 | ||
751 | .noInstructions: | |
6fe7ccc8 | 752 | callSlowPath(_llint_slow_path_resolve) |
93a37866 | 753 | dispatch(5) |
6fe7ccc8 | 754 | |
93a37866 A |
755 | _llint_op_resolve_base_to_global: |
756 | traceExecution() | |
757 | loadp CodeBlock[cfr], t1 | |
758 | loadp CodeBlock::m_globalObject[t1], t1 | |
759 | loadisFromInstruction(1, t3) | |
760 | if JSVALUE64 | |
761 | moveJSValueFromRegister(t1, cfr, t3, 6) | |
762 | else | |
763 | move CellTag, t2 | |
764 | moveJSValueFromRegisters(t2, t1, cfr, t3, 6) | |
765 | end | |
766 | dispatch(7) | |
767 | ||
768 | _llint_op_resolve_base_to_global_dynamic: | |
769 | jmp _llint_op_resolve_base | |
6fe7ccc8 | 770 | |
93a37866 | 771 | _llint_op_resolve_base_to_scope: |
6fe7ccc8 | 772 | traceExecution() |
93a37866 A |
773 | getResolveOperation(4, t0) |
774 | # First ResolveOperation is to skip scope chain nodes | |
775 | getScope(macro(dest) | |
776 | loadp ScopeChain + PayloadOffset[cfr], dest | |
777 | end, | |
778 | ResolveOperation::m_scopesToSkip[t0], t1, t2) | |
779 | loadisFromInstruction(1, t3) | |
780 | if JSVALUE64 | |
781 | moveJSValueFromRegister(t1, cfr, t3, 6) | |
782 | else | |
783 | move CellTag, t2 | |
784 | moveJSValueFromRegisters(t2, t1, cfr, t3, 6) | |
785 | end | |
786 | dispatch(7) | |
6fe7ccc8 | 787 | |
93a37866 A |
788 | _llint_op_resolve_base_to_scope_with_top_scope_check: |
789 | traceExecution() | |
790 | getResolveOperation(4, t0) | |
791 | # First ResolveOperation tells us what register to check | |
792 | loadis ResolveOperation::m_activationRegister[t0], t1 | |
793 | ||
794 | loadp PayloadOffset[cfr, t1, 8], t1 | |
795 | ||
796 | getScope(macro(dest) | |
797 | btpz t1, .scopeChainNotCreated | |
798 | loadp JSScope::m_next[t1], dest | |
799 | jmp .done | |
800 | .scopeChainNotCreated: | |
801 | loadp ScopeChain + PayloadOffset[cfr], dest | |
802 | .done: | |
803 | end, | |
804 | # Second ResolveOperation tells us how many more nodes to skip | |
805 | ResolveOperation::m_scopesToSkip + sizeof ResolveOperation[t0], t1, t2) | |
806 | ||
807 | loadisFromInstruction(1, t3) | |
808 | if JSVALUE64 | |
809 | moveJSValueFromRegister(t1, cfr, t3, 6) | |
810 | else | |
811 | move CellTag, t2 | |
812 | moveJSValueFromRegisters(t2, t1, cfr, t3, 6) | |
813 | end | |
814 | dispatch(7) | |
6fe7ccc8 A |
815 | |
816 | _llint_op_resolve_base: | |
817 | traceExecution() | |
818 | callSlowPath(_llint_slow_path_resolve_base) | |
93a37866 | 819 | dispatch(7) |
6fe7ccc8 | 820 | |
93a37866 | 821 | macro interpretResolveWithBase(opcodeLength, slowPath) |
6fe7ccc8 | 822 | traceExecution() |
93a37866 A |
823 | getResolveOperation(4, t0) |
824 | btpz t0, .slowPath | |
825 | ||
826 | loadp ScopeChain[cfr], t3 | |
827 | # Get the base | |
828 | loadis ResolveOperation::m_operation[t0], t2 | |
829 | ||
830 | bineq t2, ResolveOperationSkipScopes, .notSkipScopes | |
831 | getScope(macro(dest) move t3, dest end, | |
832 | ResolveOperation::m_scopesToSkip[t0], t1, t2) | |
833 | move t1, t3 | |
834 | addp sizeof ResolveOperation, t0, t0 | |
835 | jmp .haveCorrectScope | |
836 | ||
837 | .notSkipScopes: | |
838 | ||
839 | bineq t2, ResolveOperationSkipTopScopeNode, .notSkipTopScopeNode | |
840 | loadis ResolveOperation::m_activationRegister[t0], t1 | |
841 | loadp PayloadOffset[cfr, t1, 8], t1 | |
842 | ||
843 | getScope(macro(dest) | |
844 | btpz t1, .scopeChainNotCreated | |
845 | loadp JSScope::m_next[t1], dest | |
846 | jmp .done | |
847 | .scopeChainNotCreated: | |
848 | loadp ScopeChain + PayloadOffset[cfr], dest | |
849 | .done: | |
850 | end, | |
851 | sizeof ResolveOperation + ResolveOperation::m_scopesToSkip[t0], t1, t2) | |
852 | move t1, t3 | |
853 | # We've handled two opcodes here | |
854 | addp 2 * sizeof ResolveOperation, t0, t0 | |
855 | ||
856 | .notSkipTopScopeNode: | |
857 | ||
858 | .haveCorrectScope: | |
859 | ||
860 | # t3 now contains the correct Scope | |
861 | # t0 contains a pointer to the current ResolveOperation | |
862 | ||
863 | loadis ResolveOperation::m_operation[t0], t2 | |
864 | # t2 contains the next instruction | |
865 | ||
866 | loadisFromInstruction(1, t1) | |
867 | # t1 now contains the index for the base register | |
868 | ||
869 | bineq t2, ResolveOperationSetBaseToScope, .notSetBaseToScope | |
870 | if JSVALUE64 | |
871 | storeq t3, [cfr, t1, 8] | |
872 | else | |
873 | storei t3, PayloadOffset[cfr, t1, 8] | |
874 | storei CellTag, TagOffset[cfr, t1, 8] | |
875 | end | |
876 | jmp .haveSetBase | |
877 | ||
878 | .notSetBaseToScope: | |
879 | ||
880 | bineq t2, ResolveOperationSetBaseToUndefined, .notSetBaseToUndefined | |
881 | if JSVALUE64 | |
882 | storeq ValueUndefined, [cfr, t1, 8] | |
883 | else | |
884 | storei 0, PayloadOffset[cfr, t1, 8] | |
885 | storei UndefinedTag, TagOffset[cfr, t1, 8] | |
886 | end | |
887 | jmp .haveSetBase | |
888 | ||
889 | .notSetBaseToUndefined: | |
890 | bineq t2, ResolveOperationSetBaseToGlobal, .slowPath | |
891 | loadp JSCell::m_structure[t3], t2 | |
892 | loadp Structure::m_globalObject[t2], t2 | |
893 | if JSVALUE64 | |
894 | storeq t2, [cfr, t1, 8] | |
895 | else | |
896 | storei t2, PayloadOffset[cfr, t1, 8] | |
897 | storei CellTag, TagOffset[cfr, t1, 8] | |
898 | end | |
899 | ||
900 | .haveSetBase: | |
6fe7ccc8 | 901 | |
93a37866 A |
902 | # Get the value |
903 | ||
904 | # Load the operation into t2 | |
905 | loadis ResolveOperation::m_operation + sizeof ResolveOperation[t0], t2 | |
906 | ||
907 | # Load the index for the value register into t1 | |
908 | loadisFromInstruction(2, t1) | |
909 | ||
910 | bineq t2, ResolveOperationGetAndReturnScopedVar, .notGetAndReturnScopedVar | |
911 | loadp JSVariableObject::m_registers[t3], t3 # t3 now contains the activation registers | |
912 | ||
913 | # Second ResolveOperation tells us what offset to use | |
914 | loadis ResolveOperation::m_offset + sizeof ResolveOperation[t0], t2 | |
915 | moveJSValue(t3, t2, cfr, t1, opcodeLength - 1, t0) | |
916 | dispatch(opcodeLength) | |
917 | ||
918 | .notGetAndReturnScopedVar: | |
919 | bineq t2, ResolveOperationGetAndReturnGlobalProperty, .slowPath | |
920 | callSlowPath(slowPath) | |
921 | dispatch(opcodeLength) | |
922 | ||
923 | .slowPath: | |
924 | callSlowPath(slowPath) | |
925 | dispatch(opcodeLength) | |
926 | end | |
6fe7ccc8 A |
927 | |
928 | _llint_op_resolve_with_base: | |
93a37866 | 929 | interpretResolveWithBase(7, _llint_slow_path_resolve_with_base) |
6fe7ccc8 A |
930 | |
931 | ||
932 | _llint_op_resolve_with_this: | |
93a37866 A |
933 | interpretResolveWithBase(6, _llint_slow_path_resolve_with_this) |
934 | ||
935 | ||
936 | macro withInlineStorage(object, propertyStorage, continuation) | |
937 | # Indicate that the object is the property storage, and that the | |
938 | # property storage register is unused. | |
939 | continuation(object, propertyStorage) | |
940 | end | |
941 | ||
942 | macro withOutOfLineStorage(object, propertyStorage, continuation) | |
943 | loadp JSObject::m_butterfly[object], propertyStorage | |
944 | # Indicate that the propertyStorage register now points to the | |
945 | # property storage, and that the object register may be reused | |
946 | # if the object pointer is not needed anymore. | |
947 | continuation(propertyStorage, object) | |
948 | end | |
6fe7ccc8 A |
949 | |
950 | ||
951 | _llint_op_del_by_id: | |
952 | traceExecution() | |
953 | callSlowPath(_llint_slow_path_del_by_id) | |
954 | dispatch(4) | |
955 | ||
956 | ||
957 | _llint_op_del_by_val: | |
958 | traceExecution() | |
959 | callSlowPath(_llint_slow_path_del_by_val) | |
960 | dispatch(4) | |
961 | ||
962 | ||
963 | _llint_op_put_by_index: | |
964 | traceExecution() | |
965 | callSlowPath(_llint_slow_path_put_by_index) | |
966 | dispatch(4) | |
967 | ||
968 | ||
969 | _llint_op_put_getter_setter: | |
970 | traceExecution() | |
971 | callSlowPath(_llint_slow_path_put_getter_setter) | |
972 | dispatch(5) | |
973 | ||
974 | ||
6fe7ccc8 A |
975 | _llint_op_jtrue: |
976 | traceExecution() | |
977 | jumpTrueOrFalse( | |
978 | macro (value, target) btinz value, target end, | |
979 | _llint_slow_path_jtrue) | |
980 | ||
981 | ||
6fe7ccc8 A |
982 | _llint_op_jfalse: |
983 | traceExecution() | |
984 | jumpTrueOrFalse( | |
985 | macro (value, target) btiz value, target end, | |
986 | _llint_slow_path_jfalse) | |
987 | ||
988 | ||
6fe7ccc8 A |
989 | _llint_op_jless: |
990 | traceExecution() | |
991 | compare( | |
992 | macro (left, right, target) bilt left, right, target end, | |
993 | macro (left, right, target) bdlt left, right, target end, | |
994 | _llint_slow_path_jless) | |
995 | ||
996 | ||
997 | _llint_op_jnless: | |
998 | traceExecution() | |
999 | compare( | |
1000 | macro (left, right, target) bigteq left, right, target end, | |
1001 | macro (left, right, target) bdgtequn left, right, target end, | |
1002 | _llint_slow_path_jnless) | |
1003 | ||
1004 | ||
6fe7ccc8 A |
1005 | _llint_op_jgreater: |
1006 | traceExecution() | |
1007 | compare( | |
1008 | macro (left, right, target) bigt left, right, target end, | |
1009 | macro (left, right, target) bdgt left, right, target end, | |
1010 | _llint_slow_path_jgreater) | |
1011 | ||
1012 | ||
1013 | _llint_op_jngreater: | |
1014 | traceExecution() | |
1015 | compare( | |
1016 | macro (left, right, target) bilteq left, right, target end, | |
1017 | macro (left, right, target) bdltequn left, right, target end, | |
1018 | _llint_slow_path_jngreater) | |
1019 | ||
1020 | ||
6fe7ccc8 A |
1021 | _llint_op_jlesseq: |
1022 | traceExecution() | |
1023 | compare( | |
1024 | macro (left, right, target) bilteq left, right, target end, | |
1025 | macro (left, right, target) bdlteq left, right, target end, | |
1026 | _llint_slow_path_jlesseq) | |
1027 | ||
1028 | ||
1029 | _llint_op_jnlesseq: | |
1030 | traceExecution() | |
1031 | compare( | |
1032 | macro (left, right, target) bigt left, right, target end, | |
1033 | macro (left, right, target) bdgtun left, right, target end, | |
1034 | _llint_slow_path_jnlesseq) | |
1035 | ||
1036 | ||
6fe7ccc8 A |
1037 | _llint_op_jgreatereq: |
1038 | traceExecution() | |
1039 | compare( | |
1040 | macro (left, right, target) bigteq left, right, target end, | |
1041 | macro (left, right, target) bdgteq left, right, target end, | |
1042 | _llint_slow_path_jgreatereq) | |
1043 | ||
1044 | ||
1045 | _llint_op_jngreatereq: | |
1046 | traceExecution() | |
1047 | compare( | |
1048 | macro (left, right, target) bilt left, right, target end, | |
1049 | macro (left, right, target) bdltun left, right, target end, | |
1050 | _llint_slow_path_jngreatereq) | |
1051 | ||
1052 | ||
1053 | _llint_op_loop_hint: | |
1054 | traceExecution() | |
93a37866 A |
1055 | loadp JITStackFrame::vm[sp], t1 |
1056 | loadb VM::watchdog+Watchdog::m_timerDidFire[t1], t0 | |
1057 | btbnz t0, .handleWatchdogTimer | |
1058 | .afterWatchdogTimerCheck: | |
6fe7ccc8 A |
1059 | checkSwitchToJITForLoop() |
1060 | dispatch(1) | |
93a37866 A |
1061 | .handleWatchdogTimer: |
1062 | callWatchdogTimerHandler(.throwHandler) | |
1063 | jmp .afterWatchdogTimerCheck | |
1064 | .throwHandler: | |
1065 | jmp _llint_throw_from_slow_path_trampoline | |
6fe7ccc8 A |
1066 | |
1067 | _llint_op_switch_string: | |
1068 | traceExecution() | |
1069 | callSlowPath(_llint_slow_path_switch_string) | |
1070 | dispatch(0) | |
1071 | ||
1072 | ||
1073 | _llint_op_new_func_exp: | |
1074 | traceExecution() | |
1075 | callSlowPath(_llint_slow_path_new_func_exp) | |
1076 | dispatch(3) | |
1077 | ||
1078 | ||
1079 | _llint_op_call: | |
1080 | traceExecution() | |
93a37866 | 1081 | arrayProfileForCall() |
6fe7ccc8 A |
1082 | doCall(_llint_slow_path_call) |
1083 | ||
1084 | ||
1085 | _llint_op_construct: | |
1086 | traceExecution() | |
1087 | doCall(_llint_slow_path_construct) | |
1088 | ||
1089 | ||
1090 | _llint_op_call_varargs: | |
1091 | traceExecution() | |
1092 | slowPathForCall(6, _llint_slow_path_call_varargs) | |
1093 | ||
1094 | ||
1095 | _llint_op_call_eval: | |
1096 | traceExecution() | |
1097 | ||
1098 | # Eval is executed in one of two modes: | |
1099 | # | |
1100 | # 1) We find that we're really invoking eval() in which case the | |
1101 | # execution is perfomed entirely inside the slow_path, and it | |
1102 | # returns the PC of a function that just returns the return value | |
1103 | # that the eval returned. | |
1104 | # | |
1105 | # 2) We find that we're invoking something called eval() that is not | |
1106 | # the real eval. Then the slow_path returns the PC of the thing to | |
1107 | # call, and we call it. | |
1108 | # | |
1109 | # This allows us to handle two cases, which would require a total of | |
1110 | # up to four pieces of state that cannot be easily packed into two | |
1111 | # registers (C functions can return up to two registers, easily): | |
1112 | # | |
1113 | # - The call frame register. This may or may not have been modified | |
1114 | # by the slow_path, but the convention is that it returns it. It's not | |
1115 | # totally clear if that's necessary, since the cfr is callee save. | |
1116 | # But that's our style in this here interpreter so we stick with it. | |
1117 | # | |
1118 | # - A bit to say if the slow_path successfully executed the eval and has | |
1119 | # the return value, or did not execute the eval but has a PC for us | |
1120 | # to call. | |
1121 | # | |
1122 | # - Either: | |
1123 | # - The JS return value (two registers), or | |
1124 | # | |
1125 | # - The PC to call. | |
1126 | # | |
1127 | # It turns out to be easier to just always have this return the cfr | |
1128 | # and a PC to call, and that PC may be a dummy thunk that just | |
1129 | # returns the JS value that the eval returned. | |
1130 | ||
1131 | slowPathForCall(4, _llint_slow_path_call_eval) | |
1132 | ||
1133 | ||
1134 | _llint_generic_return_point: | |
1135 | dispatchAfterCall() | |
1136 | ||
1137 | ||
1138 | _llint_op_strcat: | |
1139 | traceExecution() | |
1140 | callSlowPath(_llint_slow_path_strcat) | |
1141 | dispatch(4) | |
1142 | ||
1143 | ||
6fe7ccc8 A |
1144 | _llint_op_get_pnames: |
1145 | traceExecution() | |
1146 | callSlowPath(_llint_slow_path_get_pnames) | |
1147 | dispatch(0) # The slow_path either advances the PC or jumps us to somewhere else. | |
1148 | ||
1149 | ||
93a37866 | 1150 | _llint_op_push_with_scope: |
6fe7ccc8 | 1151 | traceExecution() |
93a37866 | 1152 | callSlowPath(_llint_slow_path_push_with_scope) |
6fe7ccc8 A |
1153 | dispatch(2) |
1154 | ||
1155 | ||
1156 | _llint_op_pop_scope: | |
1157 | traceExecution() | |
1158 | callSlowPath(_llint_slow_path_pop_scope) | |
1159 | dispatch(1) | |
1160 | ||
1161 | ||
93a37866 | 1162 | _llint_op_push_name_scope: |
6fe7ccc8 | 1163 | traceExecution() |
93a37866 | 1164 | callSlowPath(_llint_slow_path_push_name_scope) |
6fe7ccc8 A |
1165 | dispatch(4) |
1166 | ||
1167 | ||
1168 | _llint_op_throw: | |
1169 | traceExecution() | |
1170 | callSlowPath(_llint_slow_path_throw) | |
1171 | dispatch(2) | |
1172 | ||
1173 | ||
93a37866 | 1174 | _llint_op_throw_static_error: |
6fe7ccc8 | 1175 | traceExecution() |
93a37866 A |
1176 | callSlowPath(_llint_slow_path_throw_static_error) |
1177 | dispatch(3) | |
6fe7ccc8 A |
1178 | |
1179 | ||
1180 | _llint_op_profile_will_call: | |
1181 | traceExecution() | |
6fe7ccc8 | 1182 | callSlowPath(_llint_slow_path_profile_will_call) |
6fe7ccc8 A |
1183 | dispatch(2) |
1184 | ||
1185 | ||
1186 | _llint_op_profile_did_call: | |
1187 | traceExecution() | |
6fe7ccc8 | 1188 | callSlowPath(_llint_slow_path_profile_did_call) |
6fe7ccc8 A |
1189 | dispatch(2) |
1190 | ||
1191 | ||
1192 | _llint_op_debug: | |
1193 | traceExecution() | |
1194 | callSlowPath(_llint_slow_path_debug) | |
93a37866 | 1195 | dispatch(5) |
6fe7ccc8 A |
1196 | |
1197 | ||
1198 | _llint_native_call_trampoline: | |
1199 | nativeCallTrampoline(NativeExecutable::m_function) | |
1200 | ||
1201 | ||
1202 | _llint_native_construct_trampoline: | |
1203 | nativeCallTrampoline(NativeExecutable::m_constructor) | |
1204 | ||
1205 | ||
1206 | # Lastly, make sure that we can link even though we don't support all opcodes. | |
1207 | # These opcodes should never arise when using LLInt or either JIT. We assert | |
1208 | # as much. | |
1209 | ||
1210 | macro notSupported() | |
1211 | if ASSERT_ENABLED | |
1212 | crash() | |
1213 | else | |
1214 | # We should use whatever the smallest possible instruction is, just to | |
1215 | # ensure that there is a gap between instruction labels. If multiple | |
1216 | # smallest instructions exist, we should pick the one that is most | |
1217 | # likely result in execution being halted. Currently that is the break | |
1218 | # instruction on all architectures we're interested in. (Break is int3 | |
1219 | # on Intel, which is 1 byte, and bkpt on ARMv7, which is 2 bytes.) | |
1220 | break | |
1221 | end | |
1222 | end | |
1223 | ||
6fe7ccc8 A |
1224 | _llint_op_get_by_id_chain: |
1225 | notSupported() | |
1226 | ||
1227 | _llint_op_get_by_id_custom_chain: | |
1228 | notSupported() | |
1229 | ||
1230 | _llint_op_get_by_id_custom_proto: | |
1231 | notSupported() | |
1232 | ||
1233 | _llint_op_get_by_id_custom_self: | |
1234 | notSupported() | |
1235 | ||
1236 | _llint_op_get_by_id_generic: | |
1237 | notSupported() | |
1238 | ||
1239 | _llint_op_get_by_id_getter_chain: | |
1240 | notSupported() | |
1241 | ||
1242 | _llint_op_get_by_id_getter_proto: | |
1243 | notSupported() | |
1244 | ||
1245 | _llint_op_get_by_id_getter_self: | |
1246 | notSupported() | |
1247 | ||
1248 | _llint_op_get_by_id_proto: | |
1249 | notSupported() | |
1250 | ||
1251 | _llint_op_get_by_id_self: | |
1252 | notSupported() | |
1253 | ||
1254 | _llint_op_get_string_length: | |
1255 | notSupported() | |
1256 | ||
1257 | _llint_op_put_by_id_generic: | |
1258 | notSupported() | |
1259 | ||
1260 | _llint_op_put_by_id_replace: | |
1261 | notSupported() | |
1262 | ||
1263 | _llint_op_put_by_id_transition: | |
1264 | notSupported() | |
1265 | ||
93a37866 A |
1266 | _llint_op_init_global_const_nop: |
1267 | dispatch(5) | |
6fe7ccc8 A |
1268 | |
1269 | # Indicate the end of LLInt. | |
1270 | _llint_end: | |
1271 | crash() | |
1272 |