]>
Commit | Line | Data |
---|---|---|
6fe7ccc8 A |
1 | # Copyright (C) 2011 Apple Inc. All rights reserved. |
2 | # | |
3 | # Redistribution and use in source and binary forms, with or without | |
4 | # modification, are permitted provided that the following conditions | |
5 | # are met: | |
6 | # 1. Redistributions of source code must retain the above copyright | |
7 | # notice, this list of conditions and the following disclaimer. | |
8 | # 2. Redistributions in binary form must reproduce the above copyright | |
9 | # notice, this list of conditions and the following disclaimer in the | |
10 | # documentation and/or other materials provided with the distribution. | |
11 | # | |
12 | # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' | |
13 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, | |
14 | # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
15 | # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS | |
16 | # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
17 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
18 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
19 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
20 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
21 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF | |
22 | # THE POSSIBILITY OF SUCH DAMAGE. | |
23 | ||
24 | require "ast" | |
25 | require "opt" | |
26 | ||
27 | class Node | |
28 | def armV7Single | |
29 | doubleOperand = armV7Operand | |
30 | raise "Bogus register name #{doubleOperand}" unless doubleOperand =~ /^d/ | |
31 | "s" + ($~.post_match.to_i * 2).to_s | |
32 | end | |
33 | end | |
34 | ||
35 | class SpecialRegister < NoChildren | |
36 | def armV7Operand | |
37 | @name | |
38 | end | |
39 | end | |
40 | ||
41 | ARMv7_EXTRA_GPRS = [SpecialRegister.new("r9"), SpecialRegister.new("r8"), SpecialRegister.new("r3")] | |
42 | ARMv7_EXTRA_FPRS = [SpecialRegister.new("d7")] | |
43 | ARMv7_SCRATCH_FPR = SpecialRegister.new("d8") | |
44 | ||
45 | def armV7MoveImmediate(value, register) | |
46 | # Currently we only handle the simple cases, and fall back to mov/movt for the complex ones. | |
47 | if value >= 0 && value < 256 | |
48 | $asm.puts "movw #{register.armV7Operand}, \##{value}" | |
49 | elsif (~value) >= 0 && (~value) < 256 | |
50 | $asm.puts "mvn #{register.armV7Operand}, \##{~value}" | |
51 | else | |
52 | $asm.puts "movw #{register.armV7Operand}, \##{value & 0xffff}" | |
53 | if (value & 0xffff0000) != 0 | |
54 | $asm.puts "movt #{register.armV7Operand}, \##{value >> 16}" | |
55 | end | |
56 | end | |
57 | end | |
58 | ||
59 | class RegisterID | |
60 | def armV7Operand | |
61 | case name | |
62 | when "t0", "a0", "r0" | |
63 | "r0" | |
64 | when "t1", "a1", "r1" | |
65 | "r1" | |
66 | when "t2", "a2" | |
67 | "r2" | |
68 | when "a3" | |
69 | "r3" | |
70 | when "t3" | |
71 | "r4" | |
72 | when "t4" | |
73 | "r10" | |
74 | when "cfr" | |
75 | "r5" | |
76 | when "lr" | |
77 | "lr" | |
78 | when "sp" | |
79 | "sp" | |
80 | else | |
81 | raise "Bad register #{name} for ARMv7 at #{codeOriginString}" | |
82 | end | |
83 | end | |
84 | end | |
85 | ||
86 | class FPRegisterID | |
87 | def armV7Operand | |
88 | case name | |
89 | when "ft0", "fr" | |
90 | "d0" | |
91 | when "ft1" | |
92 | "d1" | |
93 | when "ft2" | |
94 | "d2" | |
95 | when "ft3" | |
96 | "d3" | |
97 | when "ft4" | |
98 | "d4" | |
99 | when "ft5" | |
100 | "d5" | |
101 | else | |
102 | raise "Bad register #{name} for ARMv7 at #{codeOriginString}" | |
103 | end | |
104 | end | |
105 | end | |
106 | ||
107 | class Immediate | |
108 | def armV7Operand | |
109 | raise "Invalid immediate #{value} at #{codeOriginString}" if value < 0 or value > 255 | |
110 | "\##{value}" | |
111 | end | |
112 | end | |
113 | ||
114 | class Address | |
115 | def armV7Operand | |
116 | raise "Bad offset at #{codeOriginString}" if offset.value < -0xff or offset.value > 0xfff | |
117 | "[#{base.armV7Operand}, \##{offset.value}]" | |
118 | end | |
119 | end | |
120 | ||
121 | class BaseIndex | |
122 | def armV7Operand | |
123 | raise "Bad offset at #{codeOriginString}" if offset.value != 0 | |
124 | "[#{base.armV7Operand}, #{index.armV7Operand}, lsl \##{scaleShift}]" | |
125 | end | |
126 | end | |
127 | ||
128 | class AbsoluteAddress | |
129 | def armV7Operand | |
130 | raise "Unconverted absolute address at #{codeOriginString}" | |
131 | end | |
132 | end | |
133 | ||
134 | # | |
135 | # Lowering of branch ops. For example: | |
136 | # | |
137 | # baddiz foo, bar, baz | |
138 | # | |
139 | # will become: | |
140 | # | |
141 | # addi foo, bar | |
142 | # bz baz | |
143 | # | |
144 | ||
145 | def armV7LowerBranchOps(list) | |
146 | newList = [] | |
147 | list.each { | |
148 | | node | | |
149 | if node.is_a? Instruction | |
150 | case node.opcode | |
151 | when /^b(addi|subi|ori|addp)/ | |
152 | op = $1 | |
153 | branch = "b" + $~.post_match | |
154 | ||
155 | case op | |
156 | when "addi", "addp" | |
157 | op = "addis" | |
158 | when "subi" | |
159 | op = "subis" | |
160 | when "ori" | |
161 | op = "oris" | |
162 | end | |
163 | ||
164 | newList << Instruction.new(node.codeOrigin, op, node.operands[0..-2]) | |
165 | newList << Instruction.new(node.codeOrigin, branch, [node.operands[-1]]) | |
166 | when "bmulio" | |
167 | tmp1 = Tmp.new(node.codeOrigin, :gpr) | |
168 | tmp2 = Tmp.new(node.codeOrigin, :gpr) | |
169 | newList << Instruction.new(node.codeOrigin, "smulli", [node.operands[0], node.operands[1], node.operands[1], tmp1]) | |
170 | newList << Instruction.new(node.codeOrigin, "rshifti", [node.operands[-2], Immediate.new(node.codeOrigin, 31), tmp2]) | |
171 | newList << Instruction.new(node.codeOrigin, "bineq", [tmp1, tmp2, node.operands[-1]]) | |
172 | when /^bmuli/ | |
173 | condition = $~.post_match | |
174 | newList << Instruction.new(node.codeOrigin, "muli", node.operands[0..-2]) | |
175 | newList << Instruction.new(node.codeOrigin, "bti" + condition, [node.operands[-2], node.operands[-1]]) | |
176 | else | |
177 | newList << node | |
178 | end | |
179 | else | |
180 | newList << node | |
181 | end | |
182 | } | |
183 | newList | |
184 | end | |
185 | ||
186 | # | |
187 | # Lowering of shift ops. For example: | |
188 | # | |
189 | # lshifti foo, bar | |
190 | # | |
191 | # will become: | |
192 | # | |
193 | # andi foo, 31, tmp | |
194 | # lshifti tmp, bar | |
195 | # | |
196 | ||
197 | def armV7SanitizeShift(operand, list) | |
198 | return operand if operand.immediate? | |
199 | ||
200 | tmp = Tmp.new(operand.codeOrigin, :gpr) | |
201 | list << Instruction.new(operand.codeOrigin, "andi", [operand, Immediate.new(operand.codeOrigin, 31), tmp]) | |
202 | tmp | |
203 | end | |
204 | ||
205 | def armV7LowerShiftOps(list) | |
206 | newList = [] | |
207 | list.each { | |
208 | | node | | |
209 | if node.is_a? Instruction | |
210 | case node.opcode | |
211 | when "lshifti", "rshifti", "urshifti", "lshiftp", "rshiftp", "urshiftp" | |
212 | if node.operands.size == 2 | |
213 | newList << Instruction.new(node.codeOrigin, node.opcode, [armV7SanitizeShift(node.operands[0], newList), node.operands[1]]) | |
214 | else | |
215 | newList << Instruction.new(node.codeOrigin, node.opcode, [node.operands[0], armV7SanitizeShift(node.operands[1], newList), node.operands[2]]) | |
216 | raise "Wrong number of operands for shift at #{node.codeOriginString}" unless node.operands.size == 3 | |
217 | end | |
218 | else | |
219 | newList << node | |
220 | end | |
221 | else | |
222 | newList << node | |
223 | end | |
224 | } | |
225 | newList | |
226 | end | |
227 | ||
228 | # | |
229 | # Lowering of malformed addresses. For example: | |
230 | # | |
231 | # loadp 10000[foo], bar | |
232 | # | |
233 | # will become: | |
234 | # | |
235 | # move 10000, tmp | |
236 | # addp foo, tmp | |
237 | # loadp 0[tmp], bar | |
238 | # | |
239 | ||
240 | class Node | |
241 | def armV7LowerMalformedAddressesRecurse(list) | |
242 | mapChildren { | |
243 | | node | | |
244 | node.armV7LowerMalformedAddressesRecurse(list) | |
245 | } | |
246 | end | |
247 | end | |
248 | ||
249 | class Address | |
250 | def armV7LowerMalformedAddressesRecurse(list) | |
251 | if offset.value < -0xff or offset.value > 0xfff | |
252 | tmp = Tmp.new(codeOrigin, :gpr) | |
253 | list << Instruction.new(codeOrigin, "move", [offset, tmp]) | |
254 | list << Instruction.new(codeOrigin, "addp", [base, tmp]) | |
255 | Address.new(codeOrigin, tmp, Immediate.new(codeOrigin, 0)) | |
256 | else | |
257 | self | |
258 | end | |
259 | end | |
260 | end | |
261 | ||
262 | class BaseIndex | |
263 | def armV7LowerMalformedAddressesRecurse(list) | |
264 | if offset.value != 0 | |
265 | tmp = Tmp.new(codeOrigin, :gpr) | |
266 | list << Instruction.new(codeOrigin, "move", [offset, tmp]) | |
267 | list << Instruction.new(codeOrigin, "addp", [base, tmp]) | |
268 | BaseIndex.new(codeOrigin, tmp, index, scale, Immediate.new(codeOrigin, 0)) | |
269 | else | |
270 | self | |
271 | end | |
272 | end | |
273 | end | |
274 | ||
275 | class AbsoluteAddress | |
276 | def armV7LowerMalformedAddressesRecurse(list) | |
277 | tmp = Tmp.new(codeOrigin, :gpr) | |
278 | list << Instruction.new(codeOrigin, "move", [address, tmp]) | |
279 | Address.new(codeOrigin, tmp, Immediate.new(codeOrigin, 0)) | |
280 | end | |
281 | end | |
282 | ||
283 | def armV7LowerMalformedAddresses(list) | |
284 | newList = [] | |
285 | list.each { | |
286 | | node | | |
287 | newList << node.armV7LowerMalformedAddressesRecurse(newList) | |
288 | } | |
289 | newList | |
290 | end | |
291 | ||
292 | # | |
293 | # Lowering of malformed addresses in double loads and stores. For example: | |
294 | # | |
295 | # loadd [foo, bar, 8], baz | |
296 | # | |
297 | # becomes: | |
298 | # | |
299 | # leap [foo, bar, 8], tmp | |
300 | # loadd [tmp], baz | |
301 | # | |
302 | ||
303 | class Node | |
304 | def armV7DoubleAddress(list) | |
305 | self | |
306 | end | |
307 | end | |
308 | ||
309 | class BaseIndex | |
310 | def armV7DoubleAddress(list) | |
311 | tmp = Tmp.new(codeOrigin, :gpr) | |
312 | list << Instruction.new(codeOrigin, "leap", [self, tmp]) | |
313 | Address.new(codeOrigin, tmp, Immediate.new(codeOrigin, 0)) | |
314 | end | |
315 | end | |
316 | ||
317 | def armV7LowerMalformedAddressesDouble(list) | |
318 | newList = [] | |
319 | list.each { | |
320 | | node | | |
321 | if node.is_a? Instruction | |
322 | case node.opcode | |
323 | when "loadd" | |
324 | newList << Instruction.new(node.codeOrigin, "loadd", [node.operands[0].armV7DoubleAddress(newList), node.operands[1]]) | |
325 | when "stored" | |
326 | newList << Instruction.new(node.codeOrigin, "stored", [node.operands[0], node.operands[1].armV7DoubleAddress(newList)]) | |
327 | else | |
328 | newList << node | |
329 | end | |
330 | else | |
331 | newList << node | |
332 | end | |
333 | } | |
334 | newList | |
335 | end | |
336 | ||
337 | # | |
338 | # Lowering of misplaced immediates. For example: | |
339 | # | |
340 | # storei 0, [foo] | |
341 | # | |
342 | # will become: | |
343 | # | |
344 | # move 0, tmp | |
345 | # storei tmp, [foo] | |
346 | # | |
347 | ||
348 | def armV7LowerMisplacedImmediates(list) | |
349 | newList = [] | |
350 | list.each { | |
351 | | node | | |
352 | if node.is_a? Instruction | |
353 | case node.opcode | |
354 | when "storei", "storep" | |
355 | operands = node.operands | |
356 | newOperands = [] | |
357 | operands.each { | |
358 | | operand | | |
359 | if operand.is_a? Immediate | |
360 | tmp = Tmp.new(operand.codeOrigin, :gpr) | |
361 | newList << Instruction.new(operand.codeOrigin, "move", [operand, tmp]) | |
362 | newOperands << tmp | |
363 | else | |
364 | newOperands << operand | |
365 | end | |
366 | } | |
367 | newList << Instruction.new(node.codeOrigin, node.opcode, newOperands) | |
368 | else | |
369 | newList << node | |
370 | end | |
371 | else | |
372 | newList << node | |
373 | end | |
374 | } | |
375 | newList | |
376 | end | |
377 | ||
378 | # | |
379 | # Lowering of malformed immediates except when used in a "move" instruction. | |
380 | # For example: | |
381 | # | |
382 | # addp 642641, foo | |
383 | # | |
384 | # will become: | |
385 | # | |
386 | # move 642641, tmp | |
387 | # addp tmp, foo | |
388 | # | |
389 | ||
390 | class Node | |
391 | def armV7LowerMalformedImmediatesRecurse(list) | |
392 | mapChildren { | |
393 | | node | | |
394 | node.armV7LowerMalformedImmediatesRecurse(list) | |
395 | } | |
396 | end | |
397 | end | |
398 | ||
399 | class Address | |
400 | def armV7LowerMalformedImmediatesRecurse(list) | |
401 | self | |
402 | end | |
403 | end | |
404 | ||
405 | class BaseIndex | |
406 | def armV7LowerMalformedImmediatesRecurse(list) | |
407 | self | |
408 | end | |
409 | end | |
410 | ||
411 | class AbsoluteAddress | |
412 | def armV7LowerMalformedImmediatesRecurse(list) | |
413 | self | |
414 | end | |
415 | end | |
416 | ||
417 | class Immediate | |
418 | def armV7LowerMalformedImmediatesRecurse(list) | |
419 | if value < 0 or value > 255 | |
420 | tmp = Tmp.new(codeOrigin, :gpr) | |
421 | list << Instruction.new(codeOrigin, "move", [self, tmp]) | |
422 | tmp | |
423 | else | |
424 | self | |
425 | end | |
426 | end | |
427 | end | |
428 | ||
429 | def armV7LowerMalformedImmediates(list) | |
430 | newList = [] | |
431 | list.each { | |
432 | | node | | |
433 | if node.is_a? Instruction | |
434 | case node.opcode | |
435 | when "move" | |
436 | newList << node | |
437 | when "addi", "addp", "addis", "subi", "subp", "subis" | |
438 | if node.operands[0].is_a? Immediate and | |
439 | node.operands[0].value < 0 and | |
440 | node.operands[0].value >= 255 and | |
441 | node.operands.size == 2 | |
442 | if node.opcode =~ /add/ | |
443 | newOpcode = "sub" + node.opcode[-1..-1] | |
444 | else | |
445 | newOpcode = "add" + node.opcode[-1..-1] | |
446 | end | |
447 | newList << Instruction.new(node.codeOrigin, newOpcode, | |
448 | [Immediate.new(-node.operands[0].value)] + node.operands[1..-1]) | |
449 | else | |
450 | newList << node.armV7LowerMalformedImmediatesRecurse(newList) | |
451 | end | |
452 | when "muli", "mulp" | |
453 | if node.operands[0].is_a? Immediate | |
454 | tmp = Tmp.new(codeOrigin, :gpr) | |
455 | newList << Instruction.new(node.codeOrigin, "move", [node.operands[0], tmp]) | |
456 | newList << Instruction.new(node.codeOrigin, "muli", [tmp] + node.operands[1..-1]) | |
457 | else | |
458 | newList << node.armV7LowerMalformedImmediatesRecurse(newList) | |
459 | end | |
460 | else | |
461 | newList << node.armV7LowerMalformedImmediatesRecurse(newList) | |
462 | end | |
463 | else | |
464 | newList << node | |
465 | end | |
466 | } | |
467 | newList | |
468 | end | |
469 | ||
470 | # | |
471 | # Lowering of misplaced addresses. For example: | |
472 | # | |
473 | # addi foo, [bar] | |
474 | # | |
475 | # will become: | |
476 | # | |
477 | # loadi [bar], tmp | |
478 | # addi foo, tmp | |
479 | # storei tmp, [bar] | |
480 | # | |
481 | # Another example: | |
482 | # | |
483 | # addi [foo], bar | |
484 | # | |
485 | # will become: | |
486 | # | |
487 | # loadi [foo], tmp | |
488 | # addi tmp, bar | |
489 | # | |
490 | ||
491 | def armV7AsRegister(preList, postList, operand, suffix, needStore) | |
492 | return operand unless operand.address? | |
493 | ||
494 | tmp = Tmp.new(operand.codeOrigin, if suffix == "d" then :fpr else :gpr end) | |
495 | preList << Instruction.new(operand.codeOrigin, "load" + suffix, [operand, tmp]) | |
496 | if needStore | |
497 | postList << Instruction.new(operand.codeOrigin, "store" + suffix, [tmp, operand]) | |
498 | end | |
499 | tmp | |
500 | end | |
501 | ||
502 | def armV7AsRegisters(preList, postList, operands, suffix) | |
503 | newOperands = [] | |
504 | operands.each_with_index { | |
505 | | operand, index | | |
506 | newOperands << armV7AsRegister(preList, postList, operand, suffix, index == operands.size - 1) | |
507 | } | |
508 | newOperands | |
509 | end | |
510 | ||
511 | def armV7LowerMisplacedAddresses(list) | |
512 | newList = [] | |
513 | list.each { | |
514 | | node | | |
515 | if node.is_a? Instruction | |
516 | postInstructions = [] | |
517 | case node.opcode | |
518 | when "addi", "addp", "addis", "andi", "andp", "lshifti", "lshiftp", "muli", "mulp", "negi", | |
519 | "negp", "noti", "ori", "oris", "orp", "rshifti", "urshifti", "rshiftp", "urshiftp", "subi", | |
520 | "subp", "subis", "xori", "xorp", /^bi/, /^bp/, /^bti/, /^btp/, /^ci/, /^cp/, /^ti/ | |
521 | newList << Instruction.new(node.codeOrigin, | |
522 | node.opcode, | |
523 | armV7AsRegisters(newList, postInstructions, node.operands, "i")) | |
524 | when "bbeq", "bbneq", "bba", "bbaeq", "bbb", "bbbeq", "btbo", "btbz", "btbnz", "tbz", "tbnz", | |
525 | "tbo", "cbeq", "cbneq", "cba", "cbaeq", "cbb", "cbbeq" | |
526 | newList << Instruction.new(node.codeOrigin, | |
527 | node.opcode, | |
528 | armV7AsRegisters(newList, postInstructions, node.operands, "b")) | |
529 | when "bbgt", "bbgteq", "bblt", "bblteq", "btbs", "tbs", "cbgt", "cbgteq", "cblt", "cblteq" | |
530 | newList << Instruction.new(node.codeOrigin, | |
531 | node.opcode, | |
532 | armV7AsRegisters(newList, postInstructions, node.operands, "bs")) | |
533 | when "addd", "divd", "subd", "muld", "sqrtd", /^bd/ | |
534 | newList << Instruction.new(node.codeOrigin, | |
535 | node.opcode, | |
536 | armV7AsRegisters(newList, postInstructions, node.operands, "d")) | |
537 | when "jmp", "call" | |
538 | newList << Instruction.new(node.codeOrigin, | |
539 | node.opcode, | |
540 | [armV7AsRegister(newList, postInstructions, node.operands[0], "p", false)]) | |
541 | else | |
542 | newList << node | |
543 | end | |
544 | newList += postInstructions | |
545 | else | |
546 | newList << node | |
547 | end | |
548 | } | |
549 | newList | |
550 | end | |
551 | ||
552 | # | |
553 | # Lowering of register reuse in compare instructions. For example: | |
554 | # | |
555 | # cieq t0, t1, t0 | |
556 | # | |
557 | # will become: | |
558 | # | |
559 | # mov tmp, t0 | |
560 | # cieq tmp, t1, t0 | |
561 | # | |
562 | ||
563 | def armV7LowerRegisterReuse(list) | |
564 | newList = [] | |
565 | list.each { | |
566 | | node | | |
567 | if node.is_a? Instruction | |
568 | case node.opcode | |
569 | when "cieq", "cineq", "cia", "ciaeq", "cib", "cibeq", "cigt", "cigteq", "cilt", "cilteq", | |
570 | "cpeq", "cpneq", "cpa", "cpaeq", "cpb", "cpbeq", "cpgt", "cpgteq", "cplt", "cplteq", | |
571 | "tio", "tis", "tiz", "tinz", "tbo", "tbs", "tbz", "tbnz", "tpo", "tps", "tpz", "tpnz", | |
572 | "cbeq", "cbneq", "cba", "cbaeq", "cbb", "cbbeq", "cbgt", "cbgteq", "cblt", "cblteq" | |
573 | if node.operands.size == 2 | |
574 | if node.operands[0] == node.operands[1] | |
575 | tmp = Tmp.new(node.codeOrigin, :gpr) | |
576 | newList << Instruction.new(node.codeOrigin, "move", [node.operands[0], tmp]) | |
577 | newList << Instruction.new(node.codeOrigin, node.opcode, [tmp, node.operands[1]]) | |
578 | else | |
579 | newList << node | |
580 | end | |
581 | else | |
582 | raise "Wrong number of arguments at #{node.codeOriginString}" unless node.operands.size == 3 | |
583 | if node.operands[0] == node.operands[2] | |
584 | tmp = Tmp.new(node.codeOrigin, :gpr) | |
585 | newList << Instruction.new(node.codeOrigin, "move", [node.operands[0], tmp]) | |
586 | newList << Instruction.new(node.codeOrigin, node.opcode, [tmp, node.operands[1], node.operands[2]]) | |
587 | elsif node.operands[1] == node.operands[2] | |
588 | tmp = Tmp.new(node.codeOrigin, :gpr) | |
589 | newList << Instruction.new(node.codeOrigin, "move", [node.operands[1], tmp]) | |
590 | newList << Instruction.new(node.codeOrigin, node.opcode, [node.operands[0], tmp, node.operands[2]]) | |
591 | else | |
592 | newList << node | |
593 | end | |
594 | end | |
595 | else | |
596 | newList << node | |
597 | end | |
598 | else | |
599 | newList << node | |
600 | end | |
601 | } | |
602 | newList | |
603 | end | |
604 | ||
605 | # | |
606 | # Lea support. | |
607 | # | |
608 | ||
609 | class Address | |
610 | def armV7EmitLea(destination) | |
611 | if destination == base | |
612 | $asm.puts "adds #{destination.armV7Operand}, \##{offset.value}" | |
613 | else | |
614 | $asm.puts "adds #{destination.armV7Operand}, #{base.armV7Operand}, \##{offset.value}" | |
615 | end | |
616 | end | |
617 | end | |
618 | ||
619 | class BaseIndex | |
620 | def armV7EmitLea(destination) | |
621 | raise "Malformed BaseIndex, offset should be zero at #{codeOriginString}" unless offset.value == 0 | |
622 | $asm.puts "add.w #{destination.armV7Operand}, #{base.armV7Operand}, #{index.armV7Operand}, lsl \##{scaleShift}" | |
623 | end | |
624 | end | |
625 | ||
626 | # FIXME: we could support AbsoluteAddress for lea, but we don't. | |
627 | ||
628 | # | |
629 | # Actual lowering code follows. | |
630 | # | |
631 | ||
632 | class Sequence | |
633 | def getModifiedListARMv7 | |
634 | myList = @list | |
635 | ||
636 | # Verify that we will only see instructions and labels. | |
637 | myList.each { | |
638 | | node | | |
639 | unless node.is_a? Instruction or | |
640 | node.is_a? Label or | |
641 | node.is_a? LocalLabel or | |
642 | node.is_a? Skip | |
643 | raise "Unexpected #{node.inspect} at #{node.codeOrigin}" | |
644 | end | |
645 | } | |
646 | ||
647 | myList = armV7LowerBranchOps(myList) | |
648 | myList = armV7LowerShiftOps(myList) | |
649 | myList = armV7LowerMalformedAddresses(myList) | |
650 | myList = armV7LowerMalformedAddressesDouble(myList) | |
651 | myList = armV7LowerMisplacedImmediates(myList) | |
652 | myList = armV7LowerMalformedImmediates(myList) | |
653 | myList = armV7LowerMisplacedAddresses(myList) | |
654 | myList = armV7LowerRegisterReuse(myList) | |
655 | myList = assignRegistersToTemporaries(myList, :gpr, ARMv7_EXTRA_GPRS) | |
656 | myList = assignRegistersToTemporaries(myList, :fpr, ARMv7_EXTRA_FPRS) | |
657 | ||
658 | return myList | |
659 | end | |
660 | end | |
661 | ||
662 | def armV7Operands(operands) | |
663 | operands.map{|v| v.armV7Operand}.join(", ") | |
664 | end | |
665 | ||
666 | def armV7FlippedOperands(operands) | |
667 | armV7Operands([operands[-1]] + operands[0..-2]) | |
668 | end | |
669 | ||
670 | def emitArmV7Compact(opcode2, opcode3, operands) | |
671 | if operands.size == 3 | |
672 | $asm.puts "#{opcode3} #{armV7FlippedOperands(operands)}" | |
673 | else | |
674 | raise unless operands.size == 2 | |
675 | raise unless operands[1].is_a? RegisterID | |
676 | if operands[0].is_a? Immediate | |
677 | $asm.puts "#{opcode3} #{operands[1].armV7Operand}, #{operands[1].armV7Operand}, #{operands[0].armV7Operand}" | |
678 | else | |
679 | $asm.puts "#{opcode2} #{armV7FlippedOperands(operands)}" | |
680 | end | |
681 | end | |
682 | end | |
683 | ||
684 | def emitArmV7(opcode, operands) | |
685 | if operands.size == 3 | |
686 | $asm.puts "#{opcode} #{armV7FlippedOperands(operands)}" | |
687 | else | |
688 | raise unless operands.size == 2 | |
689 | $asm.puts "#{opcode} #{operands[1].armV7Operand}, #{operands[1].armV7Operand}, #{operands[0].armV7Operand}" | |
690 | end | |
691 | end | |
692 | ||
693 | def emitArmV7DoubleBranch(branchOpcode, operands) | |
694 | $asm.puts "vcmpe.f64 #{armV7Operands(operands[0..1])}" | |
695 | $asm.puts "vmrs apsr_nzcv, fpscr" | |
696 | $asm.puts "#{branchOpcode} #{operands[2].asmLabel}" | |
697 | end | |
698 | ||
699 | def emitArmV7Test(operands) | |
700 | value = operands[0] | |
701 | case operands.size | |
702 | when 2 | |
703 | mask = Immediate.new(codeOrigin, -1) | |
704 | when 3 | |
705 | mask = operands[1] | |
706 | else | |
707 | raise "Expected 2 or 3 operands but got #{operands.size} at #{codeOriginString}" | |
708 | end | |
709 | ||
710 | if mask.is_a? Immediate and mask.value == -1 | |
711 | $asm.puts "tst #{value.armV7Operand}, #{value.armV7Operand}" | |
712 | elsif mask.is_a? Immediate | |
713 | $asm.puts "tst.w #{value.armV7Operand}, #{mask.armV7Operand}" | |
714 | else | |
715 | $asm.puts "tst #{value.armV7Operand}, #{mask.armV7Operand}" | |
716 | end | |
717 | end | |
718 | ||
719 | def emitArmV7Compare(operands, code) | |
720 | $asm.puts "movs #{operands[2].armV7Operand}, \#0" | |
721 | $asm.puts "cmp #{operands[0].armV7Operand}, #{operands[1].armV7Operand}" | |
722 | $asm.puts "it #{code}" | |
723 | $asm.puts "mov#{code} #{operands[2].armV7Operand}, \#1" | |
724 | end | |
725 | ||
726 | def emitArmV7TestSet(operands, code) | |
727 | $asm.puts "movs #{operands[-1].armV7Operand}, \#0" | |
728 | emitArmV7Test(operands) | |
729 | $asm.puts "it #{code}" | |
730 | $asm.puts "mov#{code} #{operands[-1].armV7Operand}, \#1" | |
731 | end | |
732 | ||
733 | class Instruction | |
734 | def lowerARMv7 | |
735 | $asm.comment codeOriginString | |
736 | case opcode | |
737 | when "addi", "addp", "addis" | |
738 | if opcode == "addis" | |
739 | suffix = "s" | |
740 | else | |
741 | suffix = "" | |
742 | end | |
743 | if operands.size == 3 and operands[0].is_a? Immediate | |
744 | raise unless operands[1].is_a? RegisterID | |
745 | raise unless operands[2].is_a? RegisterID | |
746 | if operands[0].value == 0 and suffix.empty? | |
747 | unless operands[1] == operands[2] | |
748 | $asm.puts "mov #{operands[2].armV7Operand}, #{operands[1].armV7Operand}" | |
749 | end | |
750 | else | |
751 | $asm.puts "adds #{operands[2].armV7Operand}, #{operands[1].armV7Operand}, #{operands[0].armV7Operand}" | |
752 | end | |
753 | elsif operands.size == 3 and operands[0].is_a? RegisterID | |
754 | raise unless operands[1].is_a? RegisterID | |
755 | raise unless operands[2].is_a? RegisterID | |
756 | $asm.puts "adds #{armV7FlippedOperands(operands)}" | |
757 | else | |
758 | if operands[0].is_a? Immediate | |
759 | unless Immediate.new(nil, 0) == operands[0] | |
760 | $asm.puts "adds #{armV7FlippedOperands(operands)}" | |
761 | end | |
762 | else | |
763 | $asm.puts "add#{suffix} #{armV7FlippedOperands(operands)}" | |
764 | end | |
765 | end | |
766 | when "andi", "andp" | |
767 | emitArmV7Compact("ands", "and", operands) | |
768 | when "ori", "orp" | |
769 | emitArmV7Compact("orrs", "orr", operands) | |
770 | when "oris" | |
771 | emitArmV7Compact("orrs", "orrs", operands) | |
772 | when "xori", "xorp" | |
773 | emitArmV7Compact("eors", "eor", operands) | |
774 | when "lshifti", "lshiftp" | |
775 | emitArmV7Compact("lsls", "lsls", operands) | |
776 | when "rshifti", "rshiftp" | |
777 | emitArmV7Compact("asrs", "asrs", operands) | |
778 | when "urshifti", "urshiftp" | |
779 | emitArmV7Compact("lsrs", "lsrs", operands) | |
780 | when "muli", "mulp" | |
781 | emitArmV7("mul", operands) | |
782 | when "subi", "subp", "subis" | |
783 | emitArmV7Compact("subs", "subs", operands) | |
784 | when "negi", "negp" | |
785 | $asm.puts "rsbs #{operands[0].armV7Operand}, #{operands[0].armV7Operand}, \#0" | |
786 | when "noti" | |
787 | $asm.puts "mvns #{operands[0].armV7Operand}, #{operands[0].armV7Operand}" | |
788 | when "loadi", "loadis", "loadp" | |
789 | $asm.puts "ldr #{armV7FlippedOperands(operands)}" | |
790 | when "storei", "storep" | |
791 | $asm.puts "str #{armV7Operands(operands)}" | |
792 | when "loadb" | |
793 | $asm.puts "ldrb #{armV7FlippedOperands(operands)}" | |
794 | when "loadbs" | |
795 | $asm.puts "ldrsb.w #{armV7FlippedOperands(operands)}" | |
796 | when "storeb" | |
797 | $asm.puts "strb #{armV7Operands(operands)}" | |
798 | when "loadh" | |
799 | $asm.puts "ldrh #{armV7FlippedOperands(operands)}" | |
800 | when "loadhs" | |
801 | $asm.puts "ldrsh.w #{armV7FlippedOperands(operands)}" | |
802 | when "storeh" | |
803 | $asm.puts "strh #{armV7Operands(operands)}" | |
804 | when "loadd" | |
805 | $asm.puts "vldr.64 #{armV7FlippedOperands(operands)}" | |
806 | when "stored" | |
807 | $asm.puts "vstr.64 #{armV7Operands(operands)}" | |
808 | when "addd" | |
809 | emitArmV7("vadd.f64", operands) | |
810 | when "divd" | |
811 | emitArmV7("vdiv.f64", operands) | |
812 | when "subd" | |
813 | emitArmV7("vsub.f64", operands) | |
814 | when "muld" | |
815 | emitArmV7("vmul.f64", operands) | |
816 | when "sqrtd" | |
817 | $asm.puts "vsqrt.f64 #{armV7FlippedOperands(operands)}" | |
818 | when "ci2d" | |
819 | $asm.puts "vmov #{operands[1].armV7Single}, #{operands[0].armV7Operand}" | |
820 | $asm.puts "vcvt.f64.s32 #{operands[1].armV7Operand}, #{operands[1].armV7Single}" | |
821 | when "bdeq" | |
822 | emitArmV7DoubleBranch("beq", operands) | |
823 | when "bdneq" | |
824 | $asm.puts "vcmpe.f64 #{armV7Operands(operands[0..1])}" | |
825 | $asm.puts "vmrs apsr_nzcv, fpscr" | |
826 | isUnordered = LocalLabel.unique("bdneq") | |
827 | $asm.puts "bvs #{LabelReference.new(codeOrigin, isUnordered).asmLabel}" | |
828 | $asm.puts "bne #{operands[2].asmLabel}" | |
829 | isUnordered.lower("ARMv7") | |
830 | when "bdgt" | |
831 | emitArmV7DoubleBranch("bgt", operands) | |
832 | when "bdgteq" | |
833 | emitArmV7DoubleBranch("bge", operands) | |
834 | when "bdlt" | |
835 | emitArmV7DoubleBranch("bmi", operands) | |
836 | when "bdlteq" | |
837 | emitArmV7DoubleBranch("bls", operands) | |
838 | when "bdequn" | |
839 | $asm.puts "vcmpe.f64 #{armV7Operands(operands[0..1])}" | |
840 | $asm.puts "vmrs apsr_nzcv, fpscr" | |
841 | $asm.puts "bvs #{operands[2].asmLabel}" | |
842 | $asm.puts "beq #{operands[2].asmLabel}" | |
843 | when "bdnequn" | |
844 | emitArmV7DoubleBranch("bne", operands) | |
845 | when "bdgtun" | |
846 | emitArmV7DoubleBranch("bhi", operands) | |
847 | when "bdgtequn" | |
848 | emitArmV7DoubleBranch("bpl", operands) | |
849 | when "bdltun" | |
850 | emitArmV7DoubleBranch("blt", operands) | |
851 | when "bdltequn" | |
852 | emitArmV7DoubleBranch("ble", operands) | |
853 | when "btd2i" | |
854 | # FIXME: may be a good idea to just get rid of this instruction, since the interpreter | |
855 | # currently does not use it. | |
856 | raise "ARMv7 does not support this opcode yet, #{codeOrigin}" | |
857 | when "td2i" | |
858 | $asm.puts "vcvt.s32.f64 #{ARMv7_SCRATCH_FPR.armV7Single}, #{operands[0].armV7Operand}" | |
859 | $asm.puts "vmov #{operands[1].armV7Operand}, #{ARMv7_SCRATCH_FPR.armV7Single}" | |
860 | when "bcd2i" | |
861 | $asm.puts "vcvt.s32.f64 #{ARMv7_SCRATCH_FPR.armV7Single}, #{operands[0].armV7Operand}" | |
862 | $asm.puts "vmov #{operands[1].armV7Operand}, #{ARMv7_SCRATCH_FPR.armV7Single}" | |
863 | $asm.puts "vcvt.f64.s32 #{ARMv7_SCRATCH_FPR.armV7Operand}, #{ARMv7_SCRATCH_FPR.armV7Single}" | |
864 | emitArmV7DoubleBranch("bne", [ARMv7_SCRATCH_FPR, operands[0], operands[2]]) | |
865 | $asm.puts "tst #{operands[1].armV7Operand}, #{operands[1].armV7Operand}" | |
866 | $asm.puts "beq #{operands[2].asmLabel}" | |
867 | when "movdz" | |
868 | # FIXME: either support this or remove it. | |
869 | raise "ARMv7 does not support this opcode yet, #{codeOrigin}" | |
870 | when "pop" | |
871 | $asm.puts "pop #{operands[0].armV7Operand}" | |
872 | when "push" | |
873 | $asm.puts "push #{operands[0].armV7Operand}" | |
874 | when "move", "sxi2p", "zxi2p" | |
875 | if operands[0].is_a? Immediate | |
876 | armV7MoveImmediate(operands[0].value, operands[1]) | |
877 | else | |
878 | $asm.puts "mov #{armV7FlippedOperands(operands)}" | |
879 | end | |
880 | when "nop" | |
881 | $asm.puts "nop" | |
882 | when "bieq", "bpeq", "bbeq" | |
883 | if Immediate.new(nil, 0) == operands[0] | |
884 | $asm.puts "tst #{operands[1].armV7Operand}, #{operands[1].armV7Operand}" | |
885 | elsif Immediate.new(nil, 0) == operands[1] | |
886 | $asm.puts "tst #{operands[0].armV7Operand}, #{operands[0].armV7Operand}" | |
887 | else | |
888 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
889 | end | |
890 | $asm.puts "beq #{operands[2].asmLabel}" | |
891 | when "bineq", "bpneq", "bbneq" | |
892 | if Immediate.new(nil, 0) == operands[0] | |
893 | $asm.puts "tst #{operands[1].armV7Operand}, #{operands[1].armV7Operand}" | |
894 | elsif Immediate.new(nil, 0) == operands[1] | |
895 | $asm.puts "tst #{operands[0].armV7Operand}, #{operands[0].armV7Operand}" | |
896 | else | |
897 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
898 | end | |
899 | $asm.puts "bne #{operands[2].asmLabel}" | |
900 | when "bia", "bpa", "bba" | |
901 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
902 | $asm.puts "bhi #{operands[2].asmLabel}" | |
903 | when "biaeq", "bpaeq", "bbaeq" | |
904 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
905 | $asm.puts "bhs #{operands[2].asmLabel}" | |
906 | when "bib", "bpb", "bbb" | |
907 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
908 | $asm.puts "blo #{operands[2].asmLabel}" | |
909 | when "bibeq", "bpbeq", "bbbeq" | |
910 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
911 | $asm.puts "bls #{operands[2].asmLabel}" | |
912 | when "bigt", "bpgt", "bbgt" | |
913 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
914 | $asm.puts "bgt #{operands[2].asmLabel}" | |
915 | when "bigteq", "bpgteq", "bbgteq" | |
916 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
917 | $asm.puts "bge #{operands[2].asmLabel}" | |
918 | when "bilt", "bplt", "bblt" | |
919 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
920 | $asm.puts "blt #{operands[2].asmLabel}" | |
921 | when "bilteq", "bplteq", "bblteq" | |
922 | $asm.puts "cmp #{armV7Operands(operands[0..1])}" | |
923 | $asm.puts "ble #{operands[2].asmLabel}" | |
924 | when "btiz", "btpz", "btbz" | |
925 | emitArmV7Test(operands) | |
926 | $asm.puts "beq #{operands[-1].asmLabel}" | |
927 | when "btinz", "btpnz", "btbnz" | |
928 | emitArmV7Test(operands) | |
929 | $asm.puts "bne #{operands[-1].asmLabel}" | |
930 | when "btio", "btpo", "btbo" | |
931 | emitArmV7Test(operands) | |
932 | $asm.puts "bvs #{operands[-1].asmLabel}" | |
933 | when "btis", "btps", "btbs" | |
934 | emitArmV7Test(operands) | |
935 | $asm.puts "bmi #{operands[-1].asmLabel}" | |
936 | when "jmp" | |
937 | if operands[0].label? | |
938 | $asm.puts "b #{operands[0].asmLabel}" | |
939 | else | |
940 | $asm.puts "mov pc, #{operands[0].armV7Operand}" | |
941 | end | |
942 | when "call" | |
943 | if operands[0].label? | |
944 | $asm.puts "blx #{operands[0].asmLabel}" | |
945 | else | |
946 | $asm.puts "blx #{operands[0].armV7Operand}" | |
947 | end | |
948 | when "break" | |
949 | $asm.puts "bkpt #0" | |
950 | when "ret" | |
951 | $asm.puts "bx lr" | |
952 | when "cieq", "cpeq", "cbeq" | |
953 | emitArmV7Compare(operands, "eq") | |
954 | when "cineq", "cpneq", "cbneq" | |
955 | emitArmV7Compare(operands, "ne") | |
956 | when "cia", "cpa", "cba" | |
957 | emitArmV7Compare(operands, "hi") | |
958 | when "ciaeq", "cpaeq", "cbaeq" | |
959 | emitArmV7Compare(operands, "hs") | |
960 | when "cib", "cpb", "cbb" | |
961 | emitArmV7Compare(operands, "lo") | |
962 | when "cibeq", "cpbeq", "cbbeq" | |
963 | emitArmV7Compare(operands, "ls") | |
964 | when "cigt", "cpgt", "cbgt" | |
965 | emitArmV7Compare(operands, "gt") | |
966 | when "cigteq", "cpgteq", "cbgteq" | |
967 | emitArmV7Compare(operands, "ge") | |
968 | when "cilt", "cplt", "cblt" | |
969 | emitArmV7Compare(operands, "lt") | |
970 | when "cilteq", "cplteq", "cblteq" | |
971 | emitArmV7Compare(operands, "le") | |
972 | when "tio", "tbo", "tpo" | |
973 | emitArmV7TestSet(operands, "vs") | |
974 | when "tis", "tbs", "tps" | |
975 | emitArmV7TestSet(operands, "mi") | |
976 | when "tiz", "tbz", "tpz" | |
977 | emitArmV7TestSet(operands, "eq") | |
978 | when "tinz", "tbnz", "tpnz" | |
979 | emitArmV7TestSet(operands, "ne") | |
980 | when "peek" | |
981 | $asm.puts "ldr #{operands[1].armV7Operand}, [sp, \##{operands[0].value * 4}]" | |
982 | when "poke" | |
983 | $asm.puts "str #{operands[1].armV7Operand}, [sp, \##{operands[0].value * 4}]" | |
984 | when "fii2d" | |
985 | $asm.puts "vmov #{operands[2].armV7Operand}, #{operands[0].armV7Operand}, #{operands[1].armV7Operand}" | |
986 | when "fd2ii" | |
987 | $asm.puts "vmov #{operands[1].armV7Operand}, #{operands[2].armV7Operand}, #{operands[0].armV7Operand}" | |
988 | when "bo" | |
989 | $asm.puts "bvs #{operands[0].asmLabel}" | |
990 | when "bs" | |
991 | $asm.puts "bmi #{operands[0].asmLabel}" | |
992 | when "bz" | |
993 | $asm.puts "beq #{operands[0].asmLabel}" | |
994 | when "bnz" | |
995 | $asm.puts "bne #{operands[0].asmLabel}" | |
996 | when "leai", "leap" | |
997 | operands[0].armV7EmitLea(operands[1]) | |
998 | when "smulli" | |
999 | raise "Wrong number of arguments to smull in #{self.inspect} at #{codeOriginString}" unless operands.length == 4 | |
1000 | $asm.puts "smull #{operands[2].armV7Operand}, #{operands[3].armV7Operand}, #{operands[0].armV7Operand}, #{operands[1].armV7Operand}" | |
1001 | else | |
1002 | raise "Unhandled opcode #{opcode} at #{codeOriginString}" | |
1003 | end | |
1004 | end | |
1005 | end | |
1006 |