]> git.saurik.com Git - apple/javascriptcore.git/blame - offlineasm/arm64.rb
JavaScriptCore-7600.1.4.15.12.tar.gz
[apple/javascriptcore.git] / offlineasm / arm64.rb
CommitLineData
81345200 1# Copyright (C) 2011, 2012, 2014 Apple Inc. All rights reserved.
93a37866
A
2#
3# Redistribution and use in source and binary forms, with or without
4# modification, are permitted provided that the following conditions
5# are met:
6# 1. Redistributions of source code must retain the above copyright
7# notice, this list of conditions and the following disclaimer.
8# 2. Redistributions in binary form must reproduce the above copyright
9# notice, this list of conditions and the following disclaimer in the
10# documentation and/or other materials provided with the distribution.
11#
12# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22# THE POSSIBILITY OF SUCH DAMAGE.
23
24require "ast"
25require "opt"
26require "risc"
93a37866
A
27
28# Naming conventions:
29#
30# x<number> => GPR. This is both the generic name of the register, and the name used
31# to indicate that the register is used in 64-bit mode.
32# w<number> => GPR in 32-bit mode. This is the low 32-bits of the GPR. If it is
33# mutated then the high 32-bit part of the register is zero filled.
34# q<number> => FPR. This is the generic name of the register.
35# d<number> => FPR used as an IEEE 64-bit binary floating point number (i.e. double).
36#
37# GPR conventions, to match the baseline JIT:
38#
39# x0 => return value, cached result, first argument, t0, a0, r0
40# x1 => t1, a1, r1
81345200
A
41# x2 => t2, a2
42# x3 => a3
43# x5 => t4
44# x6 => t6
93a37866 45# x9 => (nonArgGPR1 in baseline)
93a37866
A
46# x13 => scratch (unused in baseline)
47# x16 => scratch
48# x17 => scratch
49# x23 => t3
81345200 50# x24 => t5
93a37866
A
51# x27 => csr1 (tagTypeNumber)
52# x28 => csr2 (tagMask)
81345200 53# x29 => cfr
93a37866
A
54# sp => sp
55# lr => lr
56#
57# FPR conentions, to match the baseline JIT:
58#
59# q0 => ft0
60# q1 => ft1
61# q2 => ft2
62# q3 => ft3
63# q4 => ft4 (unused in baseline)
64# q5 => ft5 (unused in baseline)
65# q31 => scratch
66
67def arm64GPRName(name, kind)
68 raise "bad GPR name #{name}" unless name =~ /^x/
69 number = name[1..-1]
70 case kind
71 when :int
72 "w" + number
73 when :ptr
74 "x" + number
75 else
76 raise "Wrong kind: #{kind}"
77 end
78end
79
80def arm64FPRName(name, kind)
81 raise "bad FPR kind #{kind}" unless kind == :double
82 raise "bad FPR name #{name}" unless name =~ /^q/
83 "d" + name[1..-1]
84end
85
86class SpecialRegister
87 def arm64Operand(kind)
88 case @name
89 when /^x/
90 arm64GPRName(@name, kind)
91 when /^q/
92 arm64FPRName(@name, kind)
93 else
94 raise "Bad name: #{@name}"
95 end
96 end
97end
98
99ARM64_EXTRA_GPRS = [SpecialRegister.new("x16"), SpecialRegister.new("x17"), SpecialRegister.new("x13")]
100ARM64_EXTRA_FPRS = [SpecialRegister.new("q31")]
101
102class RegisterID
103 def arm64Operand(kind)
104 case @name
105 when 't0', 'a0', 'r0'
106 arm64GPRName('x0', kind)
107 when 't1', 'a1', 'r1'
108 arm64GPRName('x1', kind)
81345200 109 when 't2', 'a2'
93a37866 110 arm64GPRName('x2', kind)
81345200
A
111 when 'a3'
112 arm64GPRName('x3', kind)
93a37866
A
113 when 't3'
114 arm64GPRName('x23', kind)
115 when 't4'
81345200 116 arm64GPRName('x5', kind)
93a37866 117 when 't5'
81345200 118 arm64GPRName('x24', kind)
93a37866 119 when 't6'
81345200
A
120 arm64GPRName('x6', kind)
121 when 't7'
122 arm64GPRName('x7', kind)
93a37866 123 when 'cfr'
81345200 124 arm64GPRName('x29', kind)
93a37866
A
125 when 'csr1'
126 arm64GPRName('x27', kind)
127 when 'csr2'
128 arm64GPRName('x28', kind)
129 when 'sp'
130 'sp'
131 when 'lr'
81345200 132 'x30'
93a37866
A
133 else
134 raise "Bad register name #{@name} at #{codeOriginString}"
135 end
136 end
137end
138
139class FPRegisterID
140 def arm64Operand(kind)
141 case @name
142 when 'ft0'
143 arm64FPRName('q0', kind)
144 when 'ft1'
145 arm64FPRName('q1', kind)
146 when 'ft2'
147 arm64FPRName('q2', kind)
148 when 'ft3'
149 arm64FPRName('q3', kind)
150 when 'ft4'
151 arm64FPRName('q4', kind)
152 when 'ft5'
153 arm64FPRName('q5', kind)
154 else "Bad register name #{@name} at #{codeOriginString}"
155 end
156 end
157end
158
159class Immediate
160 def arm64Operand(kind)
161 raise "Invalid immediate #{value} at #{codeOriginString}" if value < 0 or value > 4095
162 "\##{value}"
163 end
164end
165
166class Address
167 def arm64Operand(kind)
168 raise "Invalid offset #{offset.value} at #{codeOriginString}" if offset.value < -255 or offset.value > 4095
169 "[#{base.arm64Operand(:ptr)}, \##{offset.value}]"
170 end
171
172 def arm64EmitLea(destination, kind)
173 $asm.puts "add #{destination.arm64Operand(kind)}, #{base.arm64Operand(kind)}, \##{offset.value}"
174 end
175end
176
177class BaseIndex
178 def arm64Operand(kind)
179 raise "Invalid offset #{offset.value} at #{codeOriginString}" if offset.value != 0
180 "[#{base.arm64Operand(:ptr)}, #{index.arm64Operand(:ptr)}, lsl \##{scaleShift}]"
181 end
182
183 def arm64EmitLea(destination, kind)
184 $asm.puts "add #{destination.arm64Operand(kind)}, #{base.arm64Operand(kind)}, #{index.arm64Operand(kind)}, lsl \##{scaleShift}"
185 end
186end
187
188class AbsoluteAddress
189 def arm64Operand(kind)
190 raise "Unconverted absolute address #{address.value} at #{codeOriginString}"
191 end
192end
193
81345200 194# FIXME: We could support AbsoluteAddress for lea, but we don't.
93a37866
A
195
196#
197# Actual lowering code follows.
198#
199
200class Sequence
201 def getModifiedListARM64
202 result = @list
203 result = riscLowerNot(result)
204 result = riscLowerSimpleBranchOps(result)
205 result = riscLowerHardBranchOps64(result)
206 result = riscLowerShiftOps(result)
207 result = riscLowerMalformedAddresses(result) {
208 | node, address |
209 case node.opcode
210 when "loadb", "loadbs", "storeb", /^bb/, /^btb/, /^cb/, /^tb/
211 size = 1
212 when "loadh", "loadhs"
213 size = 2
214 when "loadi", "loadis", "storei", "addi", "andi", "lshifti", "muli", "negi",
215 "noti", "ori", "rshifti", "urshifti", "subi", "xori", /^bi/, /^bti/,
216 /^ci/, /^ti/, "addis", "subis", "mulis", "smulli", "leai"
217 size = 4
218 when "loadp", "storep", "loadq", "storeq", "loadd", "stored", "lshiftp", "lshiftq", "negp", "negq", "rshiftp", "rshiftq",
219 "urshiftp", "urshiftq", "addp", "addq", "mulp", "mulq", "andp", "andq", "orp", "orq", "subp", "subq", "xorp", "xorq", "addd",
220 "divd", "subd", "muld", "sqrtd", /^bp/, /^bq/, /^btp/, /^btq/, /^cp/, /^cq/, /^tp/, /^tq/, /^bd/,
221 "jmp", "call", "leap", "leaq"
222 size = 8
223 else
224 raise "Bad instruction #{node.opcode} for heap access at #{node.codeOriginString}"
225 end
226
227 if address.is_a? BaseIndex
228 address.offset.value == 0 and
229 (node.opcode =~ /^lea/ or address.scale == 1 or address.scale == size)
230 elsif address.is_a? Address
231 (-255..4095).include? address.offset.value
232 else
233 false
234 end
235 }
236 result = riscLowerMisplacedImmediates(result, ["storeb", "storei", "storep", "storeq"])
237 result = riscLowerMalformedImmediates(result, 0..4095)
238 result = riscLowerMisplacedAddresses(result)
239 result = riscLowerMalformedAddresses(result) {
240 | node, address |
241 case node.opcode
242 when /^load/
243 true
244 when /^store/
245 not (address.is_a? Address and address.offset.value < 0)
246 when /^lea/
247 true
248 else
249 raise "Bad instruction #{node.opcode} for heap access at #{node.codeOriginString}"
250 end
251 }
252 result = riscLowerTest(result)
253 result = assignRegistersToTemporaries(result, :gpr, ARM64_EXTRA_GPRS)
254 result = assignRegistersToTemporaries(result, :fpr, ARM64_EXTRA_FPRS)
255 return result
256 end
257end
258
259def arm64Operands(operands, kinds)
260 if kinds.is_a? Array
261 raise "Mismatched operand lists: #{operands.inspect} and #{kinds.inspect}" if operands.size != kinds.size
262 else
263 kinds = operands.map{ kinds }
264 end
265 (0...operands.size).map {
266 | index |
267 operands[index].arm64Operand(kinds[index])
268 }.join(', ')
269end
270
271def arm64FlippedOperands(operands, kinds)
272 if kinds.is_a? Array
273 kinds = [kinds[-1]] + kinds[0..-2]
274 end
275 arm64Operands([operands[-1]] + operands[0..-2], kinds)
276end
277
278# TAC = three address code.
279def arm64TACOperands(operands, kind)
280 if operands.size == 3
281 return arm64FlippedOperands(operands, kind)
282 end
283
284 raise unless operands.size == 2
285
286 return operands[1].arm64Operand(kind) + ", " + arm64FlippedOperands(operands, kind)
287end
288
289def emitARM64Add(opcode, operands, kind)
290 if operands.size == 3
291 raise unless operands[1].register?
292 raise unless operands[2].register?
293
294 if operands[0].immediate?
295 if operands[0].value == 0 and flag !~ /s$/
296 unless operands[1] == operands[2]
297 $asm.puts "mov #{arm64FlippedOperands(operands[1..2], kind)}"
298 end
299 else
300 $asm.puts "#{opcode} #{arm64Operands(operands.reverse, kind)}"
301 end
302 return
303 end
304
305 raise unless operands[0].register?
306 $asm.puts "#{opcode} #{arm64FlippedOperands(operands, kind)}"
307 return
308 end
309
310 raise unless operands.size == 2
311
312 if operands[0].immediate? and operands[0].value == 0 and opcode !~ /s$/
313 return
314 end
315
316 $asm.puts "#{opcode} #{arm64TACOperands(operands, kind)}"
317end
318
319def emitARM64Unflipped(opcode, operands, kind)
320 $asm.puts "#{opcode} #{arm64Operands(operands, kind)}"
321end
322
323def emitARM64TAC(opcode, operands, kind)
324 $asm.puts "#{opcode} #{arm64TACOperands(operands, kind)}"
325end
326
327def emitARM64(opcode, operands, kind)
328 $asm.puts "#{opcode} #{arm64FlippedOperands(operands, kind)}"
329end
330
331def emitARM64Access(opcode, opcodeNegativeOffset, register, memory, kind)
332 if memory.is_a? Address and memory.offset.value < 0
333 $asm.puts "#{opcodeNegativeOffset} #{register.arm64Operand(kind)}, #{memory.arm64Operand(kind)}"
334 return
335 end
336
337 $asm.puts "#{opcode} #{register.arm64Operand(kind)}, #{memory.arm64Operand(kind)}"
338end
339
340def emitARM64Shift(opcodeRegs, opcodeImmediate, operands, kind)
341 if operands.size == 3 and operands[1].immediate?
342 magicNumbers = yield operands[1].value
343 $asm.puts "#{opcodeImmediate} #{operands[2].arm64Operand(kind)}, #{operands[0].arm64Operand(kind)}, \##{magicNumbers[0]}, \##{magicNumbers[1]}"
344 return
345 end
346
347 if operands.size == 2 and operands[0].immediate?
348 magicNumbers = yield operands[0].value
349 $asm.puts "#{opcodeImmediate} #{operands[1].arm64Operand(kind)}, #{operands[1].arm64Operand(kind)}, \##{magicNumbers[0]}, \##{magicNumbers[1]}"
350 return
351 end
352
353 emitARM64TAC(opcodeRegs, operands, kind)
354end
355
356def emitARM64Branch(opcode, operands, kind, branchOpcode)
357 emitARM64Unflipped(opcode, operands[0..-2], kind)
358 $asm.puts "#{branchOpcode} #{operands[-1].asmLabel}"
359end
360
361def emitARM64Compare(operands, kind, compareCode)
362 emitARM64Unflipped("subs #{arm64GPRName('xzr', kind)}, ", operands[0..-2], kind)
363 $asm.puts "csinc #{operands[-1].arm64Operand(:int)}, wzr, wzr, #{compareCode}"
364end
365
366def emitARM64MoveImmediate(value, target)
367 first = true
368 isNegative = value < 0
369 [48, 32, 16, 0].each {
370 | shift |
371 currentValue = (value >> shift) & 0xffff
372 next if currentValue == (isNegative ? 0xffff : 0) and shift != 0
373 if first
374 if isNegative
375 $asm.puts "movn #{target.arm64Operand(:ptr)}, \##{(~currentValue) & 0xffff}, lsl \##{shift}"
376 else
377 $asm.puts "movz #{target.arm64Operand(:ptr)}, \##{currentValue}, lsl \##{shift}"
378 end
379 first = false
380 else
381 $asm.puts "movk #{target.arm64Operand(:ptr)}, \##{currentValue}, lsl \##{shift}"
382 end
383 }
384end
385
386class Instruction
387 def lowerARM64
388 $asm.comment codeOriginString
81345200
A
389 $asm.annotation annotation if $enableInstrAnnotations
390
93a37866
A
391 case opcode
392 when 'addi'
393 emitARM64Add("add", operands, :int)
394 when 'addis'
395 emitARM64Add("adds", operands, :int)
396 when 'addp'
397 emitARM64Add("add", operands, :ptr)
398 when 'addps'
399 emitARM64Add("adds", operands, :ptr)
400 when 'addq'
401 emitARM64Add("add", operands, :ptr)
402 when "andi"
403 emitARM64TAC("and", operands, :int)
404 when "andp"
405 emitARM64TAC("and", operands, :ptr)
406 when "andq"
407 emitARM64TAC("and", operands, :ptr)
408 when "ori"
409 emitARM64TAC("orr", operands, :int)
410 when "orp"
411 emitARM64TAC("orr", operands, :ptr)
412 when "orq"
413 emitARM64TAC("orr", operands, :ptr)
414 when "xori"
415 emitARM64TAC("eor", operands, :int)
416 when "xorp"
417 emitARM64TAC("eor", operands, :ptr)
418 when "xorq"
419 emitARM64TAC("eor", operands, :ptr)
420 when "lshifti"
421 emitARM64Shift("lslv", "ubfm", operands, :int) {
422 | value |
423 [32 - value, 31 - value]
424 }
425 when "lshiftp"
426 emitARM64Shift("lslv", "ubfm", operands, :ptr) {
427 | value |
428 [64 - value, 63 - value]
429 }
430 when "lshiftq"
431 emitARM64Shift("lslv", "ubfm", operands, :ptr) {
432 | value |
433 [64 - value, 63 - value]
434 }
435 when "rshifti"
436 emitARM64Shift("asrv", "sbfm", operands, :int) {
437 | value |
438 [value, 31]
439 }
440 when "rshiftp"
441 emitARM64Shift("asrv", "sbfm", operands, :ptr) {
442 | value |
443 [value, 63]
444 }
445 when "rshiftq"
446 emitARM64Shift("asrv", "sbfm", operands, :ptr) {
447 | value |
448 [value, 63]
449 }
450 when "urshifti"
451 emitARM64Shift("lsrv", "ubfm", operands, :int) {
452 | value |
453 [value, 31]
454 }
455 when "urshiftp"
456 emitARM64Shift("lsrv", "ubfm", operands, :ptr) {
457 | value |
458 [value, 63]
459 }
460 when "urshiftq"
461 emitARM64Shift("lsrv", "ubfm", operands, :ptr) {
462 | value |
463 [value, 63]
464 }
465 when "muli"
466 $asm.puts "madd #{arm64TACOperands(operands, :int)}, wzr"
467 when "mulp"
468 $asm.puts "madd #{arm64TACOperands(operands, :ptr)}, xzr"
469 when "mulq"
470 $asm.puts "madd #{arm64TACOperands(operands, :ptr)}, xzr"
471 when "subi"
472 emitARM64TAC("sub", operands, :int)
473 when "subp"
474 emitARM64TAC("sub", operands, :ptr)
475 when "subq"
476 emitARM64TAC("sub", operands, :ptr)
477 when "subis"
478 emitARM64TAC("subs", operands, :int)
479 when "negi"
480 $asm.puts "sub #{operands[0].arm64Operand(:int)}, wzr, #{operands[0].arm64Operand(:int)}"
481 when "negp"
482 $asm.puts "sub #{operands[0].arm64Operand(:ptr)}, xzr, #{operands[0].arm64Operand(:ptr)}"
483 when "negq"
484 $asm.puts "sub #{operands[0].arm64Operand(:ptr)}, xzr, #{operands[0].arm64Operand(:ptr)}"
485 when "loadi"
486 emitARM64Access("ldr", "ldur", operands[1], operands[0], :int)
487 when "loadis"
488 emitARM64Access("ldrsw", "ldursw", operands[1], operands[0], :ptr)
489 when "loadp"
490 emitARM64Access("ldr", "ldur", operands[1], operands[0], :ptr)
491 when "loadq"
492 emitARM64Access("ldr", "ldur", operands[1], operands[0], :ptr)
493 when "storei"
494 emitARM64Unflipped("str", operands, :int)
495 when "storep"
496 emitARM64Unflipped("str", operands, :ptr)
497 when "storeq"
498 emitARM64Unflipped("str", operands, :ptr)
499 when "loadb"
500 emitARM64Access("ldrb", "ldurb", operands[1], operands[0], :int)
501 when "loadbs"
502 emitARM64Access("ldrsb", "ldursb", operands[1], operands[0], :int)
503 when "storeb"
504 emitARM64Unflipped("strb", operands, :int)
505 when "loadh"
506 emitARM64Access("ldrh", "ldurh", operands[1], operands[0], :int)
507 when "loadhs"
508 emitARM64Access("ldrsh", "ldursh", operands[1], operands[0], :int)
509 when "storeh"
510 emitARM64Unflipped("strh", operands, :int)
511 when "loadd"
512 emitARM64Access("ldr", "ldur", operands[1], operands[0], :double)
513 when "stored"
514 emitARM64Unflipped("str", operands, :double)
515 when "addd"
516 emitARM64TAC("fadd", operands, :double)
517 when "divd"
518 emitARM64TAC("fdiv", operands, :double)
519 when "subd"
520 emitARM64TAC("fsub", operands, :double)
521 when "muld"
522 emitARM64TAC("fmul", operands, :double)
523 when "sqrtd"
524 emitARM64("fsqrt", operands, :double)
525 when "ci2d"
526 emitARM64("scvtf", operands, [:int, :double])
527 when "bdeq"
528 emitARM64Branch("fcmp", operands, :double, "b.eq")
529 when "bdneq"
530 emitARM64Unflipped("fcmp", operands[0..1], :double)
531 isUnordered = LocalLabel.unique("bdneq")
532 $asm.puts "b.vs #{LocalLabelReference.new(codeOrigin, isUnordered).asmLabel}"
533 $asm.puts "b.ne #{operands[2].asmLabel}"
534 isUnordered.lower("ARM64")
535 when "bdgt"
536 emitARM64Branch("fcmp", operands, :double, "b.gt")
537 when "bdgteq"
538 emitARM64Branch("fcmp", operands, :double, "b.ge")
539 when "bdlt"
540 emitARM64Branch("fcmp", operands, :double, "b.mi")
541 when "bdlteq"
542 emitARM64Branch("fcmp", operands, :double, "b.ls")
543 when "bdequn"
544 emitARM64Unflipped("fcmp", operands[0..1], :double)
545 $asm.puts "b.vs #{operands[2].asmLabel}"
546 $asm.puts "b.eq #{operands[2].asmLabel}"
547 when "bdnequn"
548 emitARM64Branch("fcmp", operands, :double, "b.ne")
549 when "bdgtun"
550 emitARM64Branch("fcmp", operands, :double, "b.hi")
551 when "bdgtequn"
552 emitARM64Branch("fcmp", operands, :double, "b.pl")
553 when "bdltun"
554 emitARM64Branch("fcmp", operands, :double, "b.lt")
555 when "bdltequn"
556 emitARM64Branch("fcmp", operands, :double, "b.le")
557 when "btd2i"
81345200 558 # FIXME: May be a good idea to just get rid of this instruction, since the interpreter
93a37866
A
559 # currently does not use it.
560 raise "ARM64 does not support this opcode yet, #{codeOriginString}"
561 when "td2i"
562 emitARM64("fcvtzs", operands, [:double, :int])
563 when "bcd2i"
81345200 564 # FIXME: Remove this instruction, or use it and implement it. Currently it's not
93a37866
A
565 # used.
566 raise "ARM64 does not support this opcode yet, #{codeOriginString}"
567 when "movdz"
81345200 568 # FIXME: Remove it or support it.
93a37866
A
569 raise "ARM64 does not support this opcode yet, #{codeOriginString}"
570 when "pop"
81345200
A
571 operands.each_slice(2) {
572 | ops |
573 # Note that the operands are in the reverse order of the case for push.
574 # This is due to the fact that order matters for pushing and popping, and
575 # on platforms that only push/pop one slot at a time they pop their
576 # arguments in the reverse order that they were pushed. In order to remain
577 # compatible with those platforms we assume here that that's what has been done.
578
579 # So for example, if we did push(A, B, C, D), we would then pop(D, C, B, A).
580 # But since the ordering of arguments doesn't change on arm64 between the stp and ldp
581 # instructions we need to flip flop the argument positions that were passed to us.
582 $asm.puts "ldp #{ops[1].arm64Operand(:ptr)}, #{ops[0].arm64Operand(:ptr)}, [sp], #16"
583 }
93a37866 584 when "push"
81345200
A
585 operands.each_slice(2) {
586 | ops |
587 $asm.puts "stp #{ops[0].arm64Operand(:ptr)}, #{ops[1].arm64Operand(:ptr)}, [sp, #-16]!"
588 }
589 when "popLRAndFP"
590 $asm.puts "ldp x29, x30, [sp], #16"
591 when "pushLRAndFP"
592 $asm.puts "stp x29, x30, [sp, #-16]!"
593 when "popCalleeSaves"
594 $asm.puts "ldp x28, x27, [sp], #16"
595 $asm.puts "ldp x26, x25, [sp], #16"
596 $asm.puts "ldp x24, x23, [sp], #16"
597 $asm.puts "ldp x22, x21, [sp], #16"
598 $asm.puts "ldp x20, x19, [sp], #16"
599 when "pushCalleeSaves"
600 $asm.puts "stp x20, x19, [sp, #-16]!"
601 $asm.puts "stp x22, x21, [sp, #-16]!"
602 $asm.puts "stp x24, x23, [sp, #-16]!"
603 $asm.puts "stp x26, x25, [sp, #-16]!"
604 $asm.puts "stp x28, x27, [sp, #-16]!"
93a37866
A
605 when "move"
606 if operands[0].immediate?
607 emitARM64MoveImmediate(operands[0].value, operands[1])
608 else
609 emitARM64("mov", operands, :ptr)
610 end
611 when "sxi2p"
81345200 612 emitARM64("sxtw", operands, [:int, :ptr])
93a37866 613 when "sxi2q"
81345200 614 emitARM64("sxtw", operands, [:int, :ptr])
93a37866 615 when "zxi2p"
81345200 616 emitARM64("uxtw", operands, [:int, :ptr])
93a37866 617 when "zxi2q"
81345200 618 emitARM64("uxtw", operands, [:int, :ptr])
93a37866
A
619 when "nop"
620 $asm.puts "nop"
621 when "bieq", "bbeq"
622 if operands[0].immediate? and operands[0].value == 0
623 $asm.puts "cbz #{operands[1].arm64Operand(:int)}, #{operands[2].asmLabel}"
624 elsif operands[1].immediate? and operands[1].value == 0
625 $asm.puts "cbz #{operands[0].arm64Operand(:int)}, #{operands[2].asmLabel}"
626 else
627 emitARM64Branch("subs wzr, ", operands, :int, "b.eq")
628 end
629 when "bpeq"
630 if operands[0].immediate? and operands[0].value == 0
631 $asm.puts "cbz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
632 elsif operands[1].immediate? and operands[1].value == 0
633 $asm.puts "cbz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
634 else
635 emitARM64Branch("subs xzr, ", operands, :ptr, "b.eq")
636 end
637 when "bqeq"
638 if operands[0].immediate? and operands[0].value == 0
639 $asm.puts "cbz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
640 elsif operands[1].immediate? and operands[1].value == 0
641 $asm.puts "cbz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
642 else
643 emitARM64Branch("subs xzr, ", operands, :ptr, "b.eq")
644 end
645 when "bineq", "bbneq"
646 if operands[0].immediate? and operands[0].value == 0
647 $asm.puts "cbnz #{operands[1].arm64Operand(:int)}, #{operands[2].asmLabel}"
648 elsif operands[1].immediate? and operands[1].value == 0
649 $asm.puts "cbnz #{operands[0].arm64Operand(:int)}, #{operands[2].asmLabel}"
650 else
651 emitARM64Branch("subs wzr, ", operands, :int, "b.ne")
652 end
653 when "bpneq"
654 if operands[0].immediate? and operands[0].value == 0
655 $asm.puts "cbnz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
656 elsif operands[1].immediate? and operands[1].value == 0
657 $asm.puts "cbnz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
658 else
659 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ne")
660 end
661 when "bqneq"
662 if operands[0].immediate? and operands[0].value == 0
663 $asm.puts "cbnz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
664 elsif operands[1].immediate? and operands[1].value == 0
665 $asm.puts "cbnz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
666 else
667 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ne")
668 end
669 when "bia", "bba"
670 emitARM64Branch("subs wzr, ", operands, :int, "b.hi")
671 when "bpa"
672 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hi")
673 when "bqa"
674 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hi")
675 when "biaeq", "bbaeq"
676 emitARM64Branch("subs wzr, ", operands, :int, "b.hs")
677 when "bpaeq"
678 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hs")
679 when "bqaeq"
680 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hs")
681 when "bib", "bbb"
682 emitARM64Branch("subs wzr, ", operands, :int, "b.lo")
683 when "bpb"
684 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lo")
685 when "bqb"
686 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lo")
687 when "bibeq", "bbbeq"
688 emitARM64Branch("subs wzr, ", operands, :int, "b.ls")
689 when "bpbeq"
690 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ls")
691 when "bqbeq"
692 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ls")
693 when "bigt", "bbgt"
694 emitARM64Branch("subs wzr, ", operands, :int, "b.gt")
695 when "bpgt"
696 emitARM64Branch("subs xzr, ", operands, :ptr, "b.gt")
697 when "bqgt"
698 emitARM64Branch("subs xzr, ", operands, :ptr, "b.gt")
699 when "bigteq", "bbgteq"
700 emitARM64Branch("subs wzr, ", operands, :int, "b.ge")
701 when "bpgteq"
702 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ge")
703 when "bqgteq"
704 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ge")
705 when "bilt", "bblt"
706 emitARM64Branch("subs wzr, ", operands, :int, "b.lt")
707 when "bplt"
708 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lt")
709 when "bqlt"
710 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lt")
711 when "bilteq", "bblteq"
712 emitARM64Branch("subs wzr, ", operands, :int, "b.le")
713 when "bplteq"
714 emitARM64Branch("subs xzr, ", operands, :ptr, "b.le")
715 when "bqlteq"
716 emitARM64Branch("subs xzr, ", operands, :ptr, "b.le")
717 when "jmp"
718 if operands[0].label?
719 $asm.puts "b #{operands[0].asmLabel}"
720 else
721 emitARM64Unflipped("br", operands, :ptr)
722 end
723 when "call"
724 if operands[0].label?
725 $asm.puts "bl #{operands[0].asmLabel}"
726 else
727 emitARM64Unflipped("blr", operands, :ptr)
728 end
729 when "break"
730 $asm.puts "brk \#0"
731 when "ret"
732 $asm.puts "ret"
733 when "cieq", "cbeq"
734 emitARM64Compare(operands, :int, "ne")
735 when "cpeq"
736 emitARM64Compare(operands, :ptr, "ne")
737 when "cqeq"
738 emitARM64Compare(operands, :ptr, "ne")
739 when "cineq", "cbneq"
740 emitARM64Compare(operands, :int, "eq")
741 when "cpneq"
742 emitARM64Compare(operands, :ptr, "eq")
743 when "cqneq"
744 emitARM64Compare(operands, :ptr, "eq")
745 when "cia", "cba"
746 emitARM64Compare(operands, :int, "ls")
747 when "cpa"
748 emitARM64Compare(operands, :ptr, "ls")
749 when "cqa"
750 emitARM64Compare(operands, :ptr, "ls")
751 when "ciaeq", "cbaeq"
752 emitARM64Compare(operands, :int, "lo")
753 when "cpaeq"
754 emitARM64Compare(operands, :ptr, "lo")
755 when "cqaeq"
756 emitARM64Compare(operands, :ptr, "lo")
757 when "cib", "cbb"
758 emitARM64Compare(operands, :int, "hs")
759 when "cpb"
760 emitARM64Compare(operands, :ptr, "hs")
761 when "cqb"
762 emitARM64Compare(operands, :ptr, "hs")
763 when "cibeq", "cbbeq"
764 emitARM64Compare(operands, :int, "hi")
765 when "cpbeq"
766 emitARM64Compare(operands, :ptr, "hi")
767 when "cqbeq"
768 emitARM64Compare(operands, :ptr, "hi")
769 when "cilt", "cblt"
770 emitARM64Compare(operands, :int, "ge")
771 when "cplt"
772 emitARM64Compare(operands, :ptr, "ge")
773 when "cqlt"
774 emitARM64Compare(operands, :ptr, "ge")
775 when "cilteq", "cblteq"
776 emitARM64Compare(operands, :int, "gt")
777 when "cplteq"
778 emitARM64Compare(operands, :ptr, "gt")
779 when "cqlteq"
780 emitARM64Compare(operands, :ptr, "gt")
781 when "cigt", "cbgt"
782 emitARM64Compare(operands, :int, "le")
783 when "cpgt"
784 emitARM64Compare(operands, :ptr, "le")
785 when "cqgt"
786 emitARM64Compare(operands, :ptr, "le")
787 when "cigteq", "cbgteq"
788 emitARM64Compare(operands, :int, "lt")
789 when "cpgteq"
790 emitARM64Compare(operands, :ptr, "lt")
791 when "cqgteq"
792 emitARM64Compare(operands, :ptr, "lt")
793 when "peek"
794 $asm.puts "ldr #{operands[1].arm64Operand(:ptr)}, [sp, \##{operands[0].value * 8}]"
795 when "poke"
796 $asm.puts "str #{operands[1].arm64Operand(:ptr)}, [sp, \##{operands[0].value * 8}]"
797 when "fp2d"
798 emitARM64("fmov", operands, [:ptr, :double])
799 when "fq2d"
800 emitARM64("fmov", operands, [:ptr, :double])
801 when "fd2p"
802 emitARM64("fmov", operands, [:double, :ptr])
803 when "fd2q"
804 emitARM64("fmov", operands, [:double, :ptr])
805 when "bo"
806 $asm.puts "b.vs #{operands[0].asmLabel}"
807 when "bs"
808 $asm.puts "b.mi #{operands[0].asmLabel}"
809 when "bz"
810 $asm.puts "b.eq #{operands[0].asmLabel}"
811 when "bnz"
812 $asm.puts "b.ne #{operands[0].asmLabel}"
813 when "leai"
814 operands[0].arm64EmitLea(operands[1], :int)
815 when "leap"
816 operands[0].arm64EmitLea(operands[1], :ptr)
817 when "leaq"
818 operands[0].arm64EmitLea(operands[1], :ptr)
819 when "smulli"
820 $asm.puts "smaddl #{operands[2].arm64Operand(:ptr)}, #{operands[0].arm64Operand(:int)}, #{operands[1].arm64Operand(:int)}, xzr"
81345200
A
821 when "memfence"
822 $asm.puts "dmb sy"
823 when "pcrtoaddr"
824 $asm.puts "adr #{operands[1].arm64Operand(:ptr)}, #{operands[0].value}"
93a37866 825 else
81345200 826 lowerDefault
93a37866
A
827 end
828 end
829end
830