]> git.saurik.com Git - apple/javascriptcore.git/blame - offlineasm/arm64.rb
JavaScriptCore-1218.0.1.tar.gz
[apple/javascriptcore.git] / offlineasm / arm64.rb
CommitLineData
93a37866
A
1# Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
2#
3# Redistribution and use in source and binary forms, with or without
4# modification, are permitted provided that the following conditions
5# are met:
6# 1. Redistributions of source code must retain the above copyright
7# notice, this list of conditions and the following disclaimer.
8# 2. Redistributions in binary form must reproduce the above copyright
9# notice, this list of conditions and the following disclaimer in the
10# documentation and/or other materials provided with the distribution.
11#
12# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22# THE POSSIBILITY OF SUCH DAMAGE.
23
24require "ast"
25require "opt"
26require "risc"
27require "risc_arm64"
28
29# Naming conventions:
30#
31# x<number> => GPR. This is both the generic name of the register, and the name used
32# to indicate that the register is used in 64-bit mode.
33# w<number> => GPR in 32-bit mode. This is the low 32-bits of the GPR. If it is
34# mutated then the high 32-bit part of the register is zero filled.
35# q<number> => FPR. This is the generic name of the register.
36# d<number> => FPR used as an IEEE 64-bit binary floating point number (i.e. double).
37#
38# GPR conventions, to match the baseline JIT:
39#
40# x0 => return value, cached result, first argument, t0, a0, r0
41# x1 => t1, a1, r1
42# x2 => t2
43# x9 => (nonArgGPR1 in baseline)
44# x10 => t4 (unused in baseline)
45# x11 => t5 (unused in baseline)
46# x12 => t6 (unused in baseline)
47# x13 => scratch (unused in baseline)
48# x16 => scratch
49# x17 => scratch
50# x23 => t3
51# x25 => cfr
52# x26 => timeout check (i.e. not touched by LLInt)
53# x27 => csr1 (tagTypeNumber)
54# x28 => csr2 (tagMask)
55# sp => sp
56# lr => lr
57#
58# FPR conentions, to match the baseline JIT:
59#
60# q0 => ft0
61# q1 => ft1
62# q2 => ft2
63# q3 => ft3
64# q4 => ft4 (unused in baseline)
65# q5 => ft5 (unused in baseline)
66# q31 => scratch
67
68def arm64GPRName(name, kind)
69 raise "bad GPR name #{name}" unless name =~ /^x/
70 number = name[1..-1]
71 case kind
72 when :int
73 "w" + number
74 when :ptr
75 "x" + number
76 else
77 raise "Wrong kind: #{kind}"
78 end
79end
80
81def arm64FPRName(name, kind)
82 raise "bad FPR kind #{kind}" unless kind == :double
83 raise "bad FPR name #{name}" unless name =~ /^q/
84 "d" + name[1..-1]
85end
86
87class SpecialRegister
88 def arm64Operand(kind)
89 case @name
90 when /^x/
91 arm64GPRName(@name, kind)
92 when /^q/
93 arm64FPRName(@name, kind)
94 else
95 raise "Bad name: #{@name}"
96 end
97 end
98end
99
100ARM64_EXTRA_GPRS = [SpecialRegister.new("x16"), SpecialRegister.new("x17"), SpecialRegister.new("x13")]
101ARM64_EXTRA_FPRS = [SpecialRegister.new("q31")]
102
103class RegisterID
104 def arm64Operand(kind)
105 case @name
106 when 't0', 'a0', 'r0'
107 arm64GPRName('x0', kind)
108 when 't1', 'a1', 'r1'
109 arm64GPRName('x1', kind)
110 when 't2'
111 arm64GPRName('x2', kind)
112 when 't3'
113 arm64GPRName('x23', kind)
114 when 't4'
115 arm64GPRName('x10', kind)
116 when 't5'
117 arm64GPRName('x11', kind)
118 when 't6'
119 arm64GPRName('x12', kind)
120 when 'cfr'
121 arm64GPRName('x25', kind)
122 when 'csr1'
123 arm64GPRName('x27', kind)
124 when 'csr2'
125 arm64GPRName('x28', kind)
126 when 'sp'
127 'sp'
128 when 'lr'
129 'lr'
130 else
131 raise "Bad register name #{@name} at #{codeOriginString}"
132 end
133 end
134end
135
136class FPRegisterID
137 def arm64Operand(kind)
138 case @name
139 when 'ft0'
140 arm64FPRName('q0', kind)
141 when 'ft1'
142 arm64FPRName('q1', kind)
143 when 'ft2'
144 arm64FPRName('q2', kind)
145 when 'ft3'
146 arm64FPRName('q3', kind)
147 when 'ft4'
148 arm64FPRName('q4', kind)
149 when 'ft5'
150 arm64FPRName('q5', kind)
151 else "Bad register name #{@name} at #{codeOriginString}"
152 end
153 end
154end
155
156class Immediate
157 def arm64Operand(kind)
158 raise "Invalid immediate #{value} at #{codeOriginString}" if value < 0 or value > 4095
159 "\##{value}"
160 end
161end
162
163class Address
164 def arm64Operand(kind)
165 raise "Invalid offset #{offset.value} at #{codeOriginString}" if offset.value < -255 or offset.value > 4095
166 "[#{base.arm64Operand(:ptr)}, \##{offset.value}]"
167 end
168
169 def arm64EmitLea(destination, kind)
170 $asm.puts "add #{destination.arm64Operand(kind)}, #{base.arm64Operand(kind)}, \##{offset.value}"
171 end
172end
173
174class BaseIndex
175 def arm64Operand(kind)
176 raise "Invalid offset #{offset.value} at #{codeOriginString}" if offset.value != 0
177 "[#{base.arm64Operand(:ptr)}, #{index.arm64Operand(:ptr)}, lsl \##{scaleShift}]"
178 end
179
180 def arm64EmitLea(destination, kind)
181 $asm.puts "add #{destination.arm64Operand(kind)}, #{base.arm64Operand(kind)}, #{index.arm64Operand(kind)}, lsl \##{scaleShift}"
182 end
183end
184
185class AbsoluteAddress
186 def arm64Operand(kind)
187 raise "Unconverted absolute address #{address.value} at #{codeOriginString}"
188 end
189end
190
191# FIXME: we could support AbsoluteAddress for lea, but we don't.
192
193#
194# Actual lowering code follows.
195#
196
197class Sequence
198 def getModifiedListARM64
199 result = @list
200 result = riscLowerNot(result)
201 result = riscLowerSimpleBranchOps(result)
202 result = riscLowerHardBranchOps64(result)
203 result = riscLowerShiftOps(result)
204 result = riscLowerMalformedAddresses(result) {
205 | node, address |
206 case node.opcode
207 when "loadb", "loadbs", "storeb", /^bb/, /^btb/, /^cb/, /^tb/
208 size = 1
209 when "loadh", "loadhs"
210 size = 2
211 when "loadi", "loadis", "storei", "addi", "andi", "lshifti", "muli", "negi",
212 "noti", "ori", "rshifti", "urshifti", "subi", "xori", /^bi/, /^bti/,
213 /^ci/, /^ti/, "addis", "subis", "mulis", "smulli", "leai"
214 size = 4
215 when "loadp", "storep", "loadq", "storeq", "loadd", "stored", "lshiftp", "lshiftq", "negp", "negq", "rshiftp", "rshiftq",
216 "urshiftp", "urshiftq", "addp", "addq", "mulp", "mulq", "andp", "andq", "orp", "orq", "subp", "subq", "xorp", "xorq", "addd",
217 "divd", "subd", "muld", "sqrtd", /^bp/, /^bq/, /^btp/, /^btq/, /^cp/, /^cq/, /^tp/, /^tq/, /^bd/,
218 "jmp", "call", "leap", "leaq"
219 size = 8
220 else
221 raise "Bad instruction #{node.opcode} for heap access at #{node.codeOriginString}"
222 end
223
224 if address.is_a? BaseIndex
225 address.offset.value == 0 and
226 (node.opcode =~ /^lea/ or address.scale == 1 or address.scale == size)
227 elsif address.is_a? Address
228 (-255..4095).include? address.offset.value
229 else
230 false
231 end
232 }
233 result = riscLowerMisplacedImmediates(result, ["storeb", "storei", "storep", "storeq"])
234 result = riscLowerMalformedImmediates(result, 0..4095)
235 result = riscLowerMisplacedAddresses(result)
236 result = riscLowerMalformedAddresses(result) {
237 | node, address |
238 case node.opcode
239 when /^load/
240 true
241 when /^store/
242 not (address.is_a? Address and address.offset.value < 0)
243 when /^lea/
244 true
245 else
246 raise "Bad instruction #{node.opcode} for heap access at #{node.codeOriginString}"
247 end
248 }
249 result = riscLowerTest(result)
250 result = assignRegistersToTemporaries(result, :gpr, ARM64_EXTRA_GPRS)
251 result = assignRegistersToTemporaries(result, :fpr, ARM64_EXTRA_FPRS)
252 return result
253 end
254end
255
256def arm64Operands(operands, kinds)
257 if kinds.is_a? Array
258 raise "Mismatched operand lists: #{operands.inspect} and #{kinds.inspect}" if operands.size != kinds.size
259 else
260 kinds = operands.map{ kinds }
261 end
262 (0...operands.size).map {
263 | index |
264 operands[index].arm64Operand(kinds[index])
265 }.join(', ')
266end
267
268def arm64FlippedOperands(operands, kinds)
269 if kinds.is_a? Array
270 kinds = [kinds[-1]] + kinds[0..-2]
271 end
272 arm64Operands([operands[-1]] + operands[0..-2], kinds)
273end
274
275# TAC = three address code.
276def arm64TACOperands(operands, kind)
277 if operands.size == 3
278 return arm64FlippedOperands(operands, kind)
279 end
280
281 raise unless operands.size == 2
282
283 return operands[1].arm64Operand(kind) + ", " + arm64FlippedOperands(operands, kind)
284end
285
286def emitARM64Add(opcode, operands, kind)
287 if operands.size == 3
288 raise unless operands[1].register?
289 raise unless operands[2].register?
290
291 if operands[0].immediate?
292 if operands[0].value == 0 and flag !~ /s$/
293 unless operands[1] == operands[2]
294 $asm.puts "mov #{arm64FlippedOperands(operands[1..2], kind)}"
295 end
296 else
297 $asm.puts "#{opcode} #{arm64Operands(operands.reverse, kind)}"
298 end
299 return
300 end
301
302 raise unless operands[0].register?
303 $asm.puts "#{opcode} #{arm64FlippedOperands(operands, kind)}"
304 return
305 end
306
307 raise unless operands.size == 2
308
309 if operands[0].immediate? and operands[0].value == 0 and opcode !~ /s$/
310 return
311 end
312
313 $asm.puts "#{opcode} #{arm64TACOperands(operands, kind)}"
314end
315
316def emitARM64Unflipped(opcode, operands, kind)
317 $asm.puts "#{opcode} #{arm64Operands(operands, kind)}"
318end
319
320def emitARM64TAC(opcode, operands, kind)
321 $asm.puts "#{opcode} #{arm64TACOperands(operands, kind)}"
322end
323
324def emitARM64(opcode, operands, kind)
325 $asm.puts "#{opcode} #{arm64FlippedOperands(operands, kind)}"
326end
327
328def emitARM64Access(opcode, opcodeNegativeOffset, register, memory, kind)
329 if memory.is_a? Address and memory.offset.value < 0
330 $asm.puts "#{opcodeNegativeOffset} #{register.arm64Operand(kind)}, #{memory.arm64Operand(kind)}"
331 return
332 end
333
334 $asm.puts "#{opcode} #{register.arm64Operand(kind)}, #{memory.arm64Operand(kind)}"
335end
336
337def emitARM64Shift(opcodeRegs, opcodeImmediate, operands, kind)
338 if operands.size == 3 and operands[1].immediate?
339 magicNumbers = yield operands[1].value
340 $asm.puts "#{opcodeImmediate} #{operands[2].arm64Operand(kind)}, #{operands[0].arm64Operand(kind)}, \##{magicNumbers[0]}, \##{magicNumbers[1]}"
341 return
342 end
343
344 if operands.size == 2 and operands[0].immediate?
345 magicNumbers = yield operands[0].value
346 $asm.puts "#{opcodeImmediate} #{operands[1].arm64Operand(kind)}, #{operands[1].arm64Operand(kind)}, \##{magicNumbers[0]}, \##{magicNumbers[1]}"
347 return
348 end
349
350 emitARM64TAC(opcodeRegs, operands, kind)
351end
352
353def emitARM64Branch(opcode, operands, kind, branchOpcode)
354 emitARM64Unflipped(opcode, operands[0..-2], kind)
355 $asm.puts "#{branchOpcode} #{operands[-1].asmLabel}"
356end
357
358def emitARM64Compare(operands, kind, compareCode)
359 emitARM64Unflipped("subs #{arm64GPRName('xzr', kind)}, ", operands[0..-2], kind)
360 $asm.puts "csinc #{operands[-1].arm64Operand(:int)}, wzr, wzr, #{compareCode}"
361end
362
363def emitARM64MoveImmediate(value, target)
364 first = true
365 isNegative = value < 0
366 [48, 32, 16, 0].each {
367 | shift |
368 currentValue = (value >> shift) & 0xffff
369 next if currentValue == (isNegative ? 0xffff : 0) and shift != 0
370 if first
371 if isNegative
372 $asm.puts "movn #{target.arm64Operand(:ptr)}, \##{(~currentValue) & 0xffff}, lsl \##{shift}"
373 else
374 $asm.puts "movz #{target.arm64Operand(:ptr)}, \##{currentValue}, lsl \##{shift}"
375 end
376 first = false
377 else
378 $asm.puts "movk #{target.arm64Operand(:ptr)}, \##{currentValue}, lsl \##{shift}"
379 end
380 }
381end
382
383class Instruction
384 def lowerARM64
385 $asm.comment codeOriginString
386 case opcode
387 when 'addi'
388 emitARM64Add("add", operands, :int)
389 when 'addis'
390 emitARM64Add("adds", operands, :int)
391 when 'addp'
392 emitARM64Add("add", operands, :ptr)
393 when 'addps'
394 emitARM64Add("adds", operands, :ptr)
395 when 'addq'
396 emitARM64Add("add", operands, :ptr)
397 when "andi"
398 emitARM64TAC("and", operands, :int)
399 when "andp"
400 emitARM64TAC("and", operands, :ptr)
401 when "andq"
402 emitARM64TAC("and", operands, :ptr)
403 when "ori"
404 emitARM64TAC("orr", operands, :int)
405 when "orp"
406 emitARM64TAC("orr", operands, :ptr)
407 when "orq"
408 emitARM64TAC("orr", operands, :ptr)
409 when "xori"
410 emitARM64TAC("eor", operands, :int)
411 when "xorp"
412 emitARM64TAC("eor", operands, :ptr)
413 when "xorq"
414 emitARM64TAC("eor", operands, :ptr)
415 when "lshifti"
416 emitARM64Shift("lslv", "ubfm", operands, :int) {
417 | value |
418 [32 - value, 31 - value]
419 }
420 when "lshiftp"
421 emitARM64Shift("lslv", "ubfm", operands, :ptr) {
422 | value |
423 [64 - value, 63 - value]
424 }
425 when "lshiftq"
426 emitARM64Shift("lslv", "ubfm", operands, :ptr) {
427 | value |
428 [64 - value, 63 - value]
429 }
430 when "rshifti"
431 emitARM64Shift("asrv", "sbfm", operands, :int) {
432 | value |
433 [value, 31]
434 }
435 when "rshiftp"
436 emitARM64Shift("asrv", "sbfm", operands, :ptr) {
437 | value |
438 [value, 63]
439 }
440 when "rshiftq"
441 emitARM64Shift("asrv", "sbfm", operands, :ptr) {
442 | value |
443 [value, 63]
444 }
445 when "urshifti"
446 emitARM64Shift("lsrv", "ubfm", operands, :int) {
447 | value |
448 [value, 31]
449 }
450 when "urshiftp"
451 emitARM64Shift("lsrv", "ubfm", operands, :ptr) {
452 | value |
453 [value, 63]
454 }
455 when "urshiftq"
456 emitARM64Shift("lsrv", "ubfm", operands, :ptr) {
457 | value |
458 [value, 63]
459 }
460 when "muli"
461 $asm.puts "madd #{arm64TACOperands(operands, :int)}, wzr"
462 when "mulp"
463 $asm.puts "madd #{arm64TACOperands(operands, :ptr)}, xzr"
464 when "mulq"
465 $asm.puts "madd #{arm64TACOperands(operands, :ptr)}, xzr"
466 when "subi"
467 emitARM64TAC("sub", operands, :int)
468 when "subp"
469 emitARM64TAC("sub", operands, :ptr)
470 when "subq"
471 emitARM64TAC("sub", operands, :ptr)
472 when "subis"
473 emitARM64TAC("subs", operands, :int)
474 when "negi"
475 $asm.puts "sub #{operands[0].arm64Operand(:int)}, wzr, #{operands[0].arm64Operand(:int)}"
476 when "negp"
477 $asm.puts "sub #{operands[0].arm64Operand(:ptr)}, xzr, #{operands[0].arm64Operand(:ptr)}"
478 when "negq"
479 $asm.puts "sub #{operands[0].arm64Operand(:ptr)}, xzr, #{operands[0].arm64Operand(:ptr)}"
480 when "loadi"
481 emitARM64Access("ldr", "ldur", operands[1], operands[0], :int)
482 when "loadis"
483 emitARM64Access("ldrsw", "ldursw", operands[1], operands[0], :ptr)
484 when "loadp"
485 emitARM64Access("ldr", "ldur", operands[1], operands[0], :ptr)
486 when "loadq"
487 emitARM64Access("ldr", "ldur", operands[1], operands[0], :ptr)
488 when "storei"
489 emitARM64Unflipped("str", operands, :int)
490 when "storep"
491 emitARM64Unflipped("str", operands, :ptr)
492 when "storeq"
493 emitARM64Unflipped("str", operands, :ptr)
494 when "loadb"
495 emitARM64Access("ldrb", "ldurb", operands[1], operands[0], :int)
496 when "loadbs"
497 emitARM64Access("ldrsb", "ldursb", operands[1], operands[0], :int)
498 when "storeb"
499 emitARM64Unflipped("strb", operands, :int)
500 when "loadh"
501 emitARM64Access("ldrh", "ldurh", operands[1], operands[0], :int)
502 when "loadhs"
503 emitARM64Access("ldrsh", "ldursh", operands[1], operands[0], :int)
504 when "storeh"
505 emitARM64Unflipped("strh", operands, :int)
506 when "loadd"
507 emitARM64Access("ldr", "ldur", operands[1], operands[0], :double)
508 when "stored"
509 emitARM64Unflipped("str", operands, :double)
510 when "addd"
511 emitARM64TAC("fadd", operands, :double)
512 when "divd"
513 emitARM64TAC("fdiv", operands, :double)
514 when "subd"
515 emitARM64TAC("fsub", operands, :double)
516 when "muld"
517 emitARM64TAC("fmul", operands, :double)
518 when "sqrtd"
519 emitARM64("fsqrt", operands, :double)
520 when "ci2d"
521 emitARM64("scvtf", operands, [:int, :double])
522 when "bdeq"
523 emitARM64Branch("fcmp", operands, :double, "b.eq")
524 when "bdneq"
525 emitARM64Unflipped("fcmp", operands[0..1], :double)
526 isUnordered = LocalLabel.unique("bdneq")
527 $asm.puts "b.vs #{LocalLabelReference.new(codeOrigin, isUnordered).asmLabel}"
528 $asm.puts "b.ne #{operands[2].asmLabel}"
529 isUnordered.lower("ARM64")
530 when "bdgt"
531 emitARM64Branch("fcmp", operands, :double, "b.gt")
532 when "bdgteq"
533 emitARM64Branch("fcmp", operands, :double, "b.ge")
534 when "bdlt"
535 emitARM64Branch("fcmp", operands, :double, "b.mi")
536 when "bdlteq"
537 emitARM64Branch("fcmp", operands, :double, "b.ls")
538 when "bdequn"
539 emitARM64Unflipped("fcmp", operands[0..1], :double)
540 $asm.puts "b.vs #{operands[2].asmLabel}"
541 $asm.puts "b.eq #{operands[2].asmLabel}"
542 when "bdnequn"
543 emitARM64Branch("fcmp", operands, :double, "b.ne")
544 when "bdgtun"
545 emitARM64Branch("fcmp", operands, :double, "b.hi")
546 when "bdgtequn"
547 emitARM64Branch("fcmp", operands, :double, "b.pl")
548 when "bdltun"
549 emitARM64Branch("fcmp", operands, :double, "b.lt")
550 when "bdltequn"
551 emitARM64Branch("fcmp", operands, :double, "b.le")
552 when "btd2i"
553 # FIXME: may be a good idea to just get rid of this instruction, since the interpreter
554 # currently does not use it.
555 raise "ARM64 does not support this opcode yet, #{codeOriginString}"
556 when "td2i"
557 emitARM64("fcvtzs", operands, [:double, :int])
558 when "bcd2i"
559 # FIXME: remove this instruction, or use it and implement it. Currently it's not
560 # used.
561 raise "ARM64 does not support this opcode yet, #{codeOriginString}"
562 when "movdz"
563 # FIXME: remove it or support it.
564 raise "ARM64 does not support this opcode yet, #{codeOriginString}"
565 when "pop"
566 emitARM64Unflipped("pop", operands, :ptr)
567 when "push"
568 emitARM64Unflipped("push", operands, :ptr)
569 when "move"
570 if operands[0].immediate?
571 emitARM64MoveImmediate(operands[0].value, operands[1])
572 else
573 emitARM64("mov", operands, :ptr)
574 end
575 when "sxi2p"
576 emitARM64("sxtw", operands, :ptr)
577 when "sxi2q"
578 emitARM64("sxtw", operands, :ptr)
579 when "zxi2p"
580 emitARM64("uxtw", operands, :ptr)
581 when "zxi2q"
582 emitARM64("uxtw", operands, :ptr)
583 when "nop"
584 $asm.puts "nop"
585 when "bieq", "bbeq"
586 if operands[0].immediate? and operands[0].value == 0
587 $asm.puts "cbz #{operands[1].arm64Operand(:int)}, #{operands[2].asmLabel}"
588 elsif operands[1].immediate? and operands[1].value == 0
589 $asm.puts "cbz #{operands[0].arm64Operand(:int)}, #{operands[2].asmLabel}"
590 else
591 emitARM64Branch("subs wzr, ", operands, :int, "b.eq")
592 end
593 when "bpeq"
594 if operands[0].immediate? and operands[0].value == 0
595 $asm.puts "cbz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
596 elsif operands[1].immediate? and operands[1].value == 0
597 $asm.puts "cbz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
598 else
599 emitARM64Branch("subs xzr, ", operands, :ptr, "b.eq")
600 end
601 when "bqeq"
602 if operands[0].immediate? and operands[0].value == 0
603 $asm.puts "cbz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
604 elsif operands[1].immediate? and operands[1].value == 0
605 $asm.puts "cbz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
606 else
607 emitARM64Branch("subs xzr, ", operands, :ptr, "b.eq")
608 end
609 when "bineq", "bbneq"
610 if operands[0].immediate? and operands[0].value == 0
611 $asm.puts "cbnz #{operands[1].arm64Operand(:int)}, #{operands[2].asmLabel}"
612 elsif operands[1].immediate? and operands[1].value == 0
613 $asm.puts "cbnz #{operands[0].arm64Operand(:int)}, #{operands[2].asmLabel}"
614 else
615 emitARM64Branch("subs wzr, ", operands, :int, "b.ne")
616 end
617 when "bpneq"
618 if operands[0].immediate? and operands[0].value == 0
619 $asm.puts "cbnz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
620 elsif operands[1].immediate? and operands[1].value == 0
621 $asm.puts "cbnz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
622 else
623 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ne")
624 end
625 when "bqneq"
626 if operands[0].immediate? and operands[0].value == 0
627 $asm.puts "cbnz #{operands[1].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
628 elsif operands[1].immediate? and operands[1].value == 0
629 $asm.puts "cbnz #{operands[0].arm64Operand(:ptr)}, #{operands[2].asmLabel}"
630 else
631 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ne")
632 end
633 when "bia", "bba"
634 emitARM64Branch("subs wzr, ", operands, :int, "b.hi")
635 when "bpa"
636 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hi")
637 when "bqa"
638 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hi")
639 when "biaeq", "bbaeq"
640 emitARM64Branch("subs wzr, ", operands, :int, "b.hs")
641 when "bpaeq"
642 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hs")
643 when "bqaeq"
644 emitARM64Branch("subs xzr, ", operands, :ptr, "b.hs")
645 when "bib", "bbb"
646 emitARM64Branch("subs wzr, ", operands, :int, "b.lo")
647 when "bpb"
648 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lo")
649 when "bqb"
650 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lo")
651 when "bibeq", "bbbeq"
652 emitARM64Branch("subs wzr, ", operands, :int, "b.ls")
653 when "bpbeq"
654 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ls")
655 when "bqbeq"
656 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ls")
657 when "bigt", "bbgt"
658 emitARM64Branch("subs wzr, ", operands, :int, "b.gt")
659 when "bpgt"
660 emitARM64Branch("subs xzr, ", operands, :ptr, "b.gt")
661 when "bqgt"
662 emitARM64Branch("subs xzr, ", operands, :ptr, "b.gt")
663 when "bigteq", "bbgteq"
664 emitARM64Branch("subs wzr, ", operands, :int, "b.ge")
665 when "bpgteq"
666 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ge")
667 when "bqgteq"
668 emitARM64Branch("subs xzr, ", operands, :ptr, "b.ge")
669 when "bilt", "bblt"
670 emitARM64Branch("subs wzr, ", operands, :int, "b.lt")
671 when "bplt"
672 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lt")
673 when "bqlt"
674 emitARM64Branch("subs xzr, ", operands, :ptr, "b.lt")
675 when "bilteq", "bblteq"
676 emitARM64Branch("subs wzr, ", operands, :int, "b.le")
677 when "bplteq"
678 emitARM64Branch("subs xzr, ", operands, :ptr, "b.le")
679 when "bqlteq"
680 emitARM64Branch("subs xzr, ", operands, :ptr, "b.le")
681 when "jmp"
682 if operands[0].label?
683 $asm.puts "b #{operands[0].asmLabel}"
684 else
685 emitARM64Unflipped("br", operands, :ptr)
686 end
687 when "call"
688 if operands[0].label?
689 $asm.puts "bl #{operands[0].asmLabel}"
690 else
691 emitARM64Unflipped("blr", operands, :ptr)
692 end
693 when "break"
694 $asm.puts "brk \#0"
695 when "ret"
696 $asm.puts "ret"
697 when "cieq", "cbeq"
698 emitARM64Compare(operands, :int, "ne")
699 when "cpeq"
700 emitARM64Compare(operands, :ptr, "ne")
701 when "cqeq"
702 emitARM64Compare(operands, :ptr, "ne")
703 when "cineq", "cbneq"
704 emitARM64Compare(operands, :int, "eq")
705 when "cpneq"
706 emitARM64Compare(operands, :ptr, "eq")
707 when "cqneq"
708 emitARM64Compare(operands, :ptr, "eq")
709 when "cia", "cba"
710 emitARM64Compare(operands, :int, "ls")
711 when "cpa"
712 emitARM64Compare(operands, :ptr, "ls")
713 when "cqa"
714 emitARM64Compare(operands, :ptr, "ls")
715 when "ciaeq", "cbaeq"
716 emitARM64Compare(operands, :int, "lo")
717 when "cpaeq"
718 emitARM64Compare(operands, :ptr, "lo")
719 when "cqaeq"
720 emitARM64Compare(operands, :ptr, "lo")
721 when "cib", "cbb"
722 emitARM64Compare(operands, :int, "hs")
723 when "cpb"
724 emitARM64Compare(operands, :ptr, "hs")
725 when "cqb"
726 emitARM64Compare(operands, :ptr, "hs")
727 when "cibeq", "cbbeq"
728 emitARM64Compare(operands, :int, "hi")
729 when "cpbeq"
730 emitARM64Compare(operands, :ptr, "hi")
731 when "cqbeq"
732 emitARM64Compare(operands, :ptr, "hi")
733 when "cilt", "cblt"
734 emitARM64Compare(operands, :int, "ge")
735 when "cplt"
736 emitARM64Compare(operands, :ptr, "ge")
737 when "cqlt"
738 emitARM64Compare(operands, :ptr, "ge")
739 when "cilteq", "cblteq"
740 emitARM64Compare(operands, :int, "gt")
741 when "cplteq"
742 emitARM64Compare(operands, :ptr, "gt")
743 when "cqlteq"
744 emitARM64Compare(operands, :ptr, "gt")
745 when "cigt", "cbgt"
746 emitARM64Compare(operands, :int, "le")
747 when "cpgt"
748 emitARM64Compare(operands, :ptr, "le")
749 when "cqgt"
750 emitARM64Compare(operands, :ptr, "le")
751 when "cigteq", "cbgteq"
752 emitARM64Compare(operands, :int, "lt")
753 when "cpgteq"
754 emitARM64Compare(operands, :ptr, "lt")
755 when "cqgteq"
756 emitARM64Compare(operands, :ptr, "lt")
757 when "peek"
758 $asm.puts "ldr #{operands[1].arm64Operand(:ptr)}, [sp, \##{operands[0].value * 8}]"
759 when "poke"
760 $asm.puts "str #{operands[1].arm64Operand(:ptr)}, [sp, \##{operands[0].value * 8}]"
761 when "fp2d"
762 emitARM64("fmov", operands, [:ptr, :double])
763 when "fq2d"
764 emitARM64("fmov", operands, [:ptr, :double])
765 when "fd2p"
766 emitARM64("fmov", operands, [:double, :ptr])
767 when "fd2q"
768 emitARM64("fmov", operands, [:double, :ptr])
769 when "bo"
770 $asm.puts "b.vs #{operands[0].asmLabel}"
771 when "bs"
772 $asm.puts "b.mi #{operands[0].asmLabel}"
773 when "bz"
774 $asm.puts "b.eq #{operands[0].asmLabel}"
775 when "bnz"
776 $asm.puts "b.ne #{operands[0].asmLabel}"
777 when "leai"
778 operands[0].arm64EmitLea(operands[1], :int)
779 when "leap"
780 operands[0].arm64EmitLea(operands[1], :ptr)
781 when "leaq"
782 operands[0].arm64EmitLea(operands[1], :ptr)
783 when "smulli"
784 $asm.puts "smaddl #{operands[2].arm64Operand(:ptr)}, #{operands[0].arm64Operand(:int)}, #{operands[1].arm64Operand(:int)}, xzr"
785 else
786 raise "Unhandled opcode #{opcode} at #{codeOriginString}"
787 end
788 end
789end
790