blob: 413920f2612b601554c8177b5209cfd9b6a05c83 [file] [log] [blame]
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001/*
2 * Optimizations for Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2010 Samsung Electronics.
5 * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
24 */
25
26#include "config.h"
27
28#include <stdlib.h>
29#include <stdio.h>
30
31#include "qemu-common.h"
32#include "tcg-op.h"
33
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040034#define CASE_OP_32_64(x) \
35 glue(glue(case INDEX_op_, x), _i32): \
36 glue(glue(case INDEX_op_, x), _i64)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040037
Kirill Batuzov22613af2011-07-07 16:37:13 +040038typedef enum {
39 TCG_TEMP_UNDEF = 0,
40 TCG_TEMP_CONST,
41 TCG_TEMP_COPY,
Kirill Batuzov22613af2011-07-07 16:37:13 +040042} tcg_temp_state;
43
44struct tcg_temp_info {
45 tcg_temp_state state;
46 uint16_t prev_copy;
47 uint16_t next_copy;
48 tcg_target_ulong val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080049 tcg_target_ulong mask;
Kirill Batuzov22613af2011-07-07 16:37:13 +040050};
51
52static struct tcg_temp_info temps[TCG_MAX_TEMPS];
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020053static TCGTempSet temps_used;
Kirill Batuzov22613af2011-07-07 16:37:13 +040054
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +020055/* Reset TEMP's state to TCG_TEMP_UNDEF. If TEMP only had one copy, remove
56 the copy flag from the left temp. */
57static void reset_temp(TCGArg temp)
Kirill Batuzov22613af2011-07-07 16:37:13 +040058{
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +020059 if (temps[temp].state == TCG_TEMP_COPY) {
60 if (temps[temp].prev_copy == temps[temp].next_copy) {
61 temps[temps[temp].next_copy].state = TCG_TEMP_UNDEF;
62 } else {
63 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
64 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
Kirill Batuzov22613af2011-07-07 16:37:13 +040065 }
Kirill Batuzov22613af2011-07-07 16:37:13 +040066 }
Aurelien Jarno48b56ce2012-09-10 23:51:42 +020067 temps[temp].state = TCG_TEMP_UNDEF;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080068 temps[temp].mask = -1;
Kirill Batuzov22613af2011-07-07 16:37:13 +040069}
70
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020071/* Reset all temporaries, given that there are NB_TEMPS of them. */
72static void reset_all_temps(int nb_temps)
73{
74 bitmap_zero(temps_used.l, nb_temps);
75}
76
77/* Initialize and activate a temporary. */
78static void init_temp_info(TCGArg temp)
79{
80 if (!test_bit(temp, temps_used.l)) {
81 temps[temp].state = TCG_TEMP_UNDEF;
82 temps[temp].mask = -1;
83 set_bit(temp, temps_used.l);
84 }
85}
86
Richard Hendersona4ce0992014-03-30 17:14:02 -070087static TCGOp *insert_op_before(TCGContext *s, TCGOp *old_op,
88 TCGOpcode opc, int nargs)
89{
90 int oi = s->gen_next_op_idx;
91 int pi = s->gen_next_parm_idx;
92 int prev = old_op->prev;
93 int next = old_op - s->gen_op_buf;
94 TCGOp *new_op;
95
96 tcg_debug_assert(oi < OPC_BUF_SIZE);
97 tcg_debug_assert(pi + nargs <= OPPARAM_BUF_SIZE);
98 s->gen_next_op_idx = oi + 1;
99 s->gen_next_parm_idx = pi + nargs;
100
101 new_op = &s->gen_op_buf[oi];
102 *new_op = (TCGOp){
103 .opc = opc,
104 .args = pi,
105 .prev = prev,
106 .next = next
107 };
108 if (prev >= 0) {
109 s->gen_op_buf[prev].next = oi;
110 } else {
111 s->gen_first_op_idx = oi;
112 }
113 old_op->prev = oi;
114
115 return new_op;
116}
117
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000118static int op_bits(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400119{
Richard Henderson8399ad52011-08-17 14:11:45 -0700120 const TCGOpDef *def = &tcg_op_defs[op];
121 return def->flags & TCG_OPF_64BIT ? 64 : 32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400122}
123
Richard Hendersona62f6f52014-05-22 10:59:12 -0700124static TCGOpcode op_to_mov(TCGOpcode op)
125{
126 switch (op_bits(op)) {
127 case 32:
128 return INDEX_op_mov_i32;
129 case 64:
130 return INDEX_op_mov_i64;
131 default:
132 fprintf(stderr, "op_to_mov: unexpected return value of "
133 "function op_bits.\n");
134 tcg_abort();
135 }
136}
137
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000138static TCGOpcode op_to_movi(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400139{
140 switch (op_bits(op)) {
141 case 32:
142 return INDEX_op_movi_i32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400143 case 64:
144 return INDEX_op_movi_i64;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400145 default:
146 fprintf(stderr, "op_to_movi: unexpected return value of "
147 "function op_bits.\n");
148 tcg_abort();
149 }
150}
151
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200152static TCGArg find_better_copy(TCGContext *s, TCGArg temp)
153{
154 TCGArg i;
155
156 /* If this is already a global, we can't do better. */
157 if (temp < s->nb_globals) {
158 return temp;
159 }
160
161 /* Search for a global first. */
162 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
163 if (i < s->nb_globals) {
164 return i;
165 }
166 }
167
168 /* If it is a temp, search for a temp local. */
169 if (!s->temps[temp].temp_local) {
170 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
171 if (s->temps[i].temp_local) {
172 return i;
173 }
174 }
175 }
176
177 /* Failure to find a better representation, return the same temp. */
178 return temp;
179}
180
181static bool temps_are_copies(TCGArg arg1, TCGArg arg2)
182{
183 TCGArg i;
184
185 if (arg1 == arg2) {
186 return true;
187 }
188
189 if (temps[arg1].state != TCG_TEMP_COPY
190 || temps[arg2].state != TCG_TEMP_COPY) {
191 return false;
192 }
193
194 for (i = temps[arg1].next_copy ; i != arg1 ; i = temps[i].next_copy) {
195 if (i == arg2) {
196 return true;
197 }
198 }
199
200 return false;
201}
202
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200203static void tcg_opt_gen_movi(TCGContext *s, TCGOp *op, TCGArg *args,
204 TCGArg dst, TCGArg val)
205{
206 TCGOpcode new_op = op_to_movi(op->opc);
207 tcg_target_ulong mask;
208
209 op->opc = new_op;
210
211 reset_temp(dst);
212 temps[dst].state = TCG_TEMP_CONST;
213 temps[dst].val = val;
214 mask = val;
Aurelien Jarno96152122015-07-10 18:03:30 +0200215 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_movi_i32) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200216 /* High bits of the destination are now garbage. */
217 mask |= ~0xffffffffull;
218 }
219 temps[dst].mask = mask;
220
221 args[0] = dst;
222 args[1] = val;
223}
224
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700225static void tcg_opt_gen_mov(TCGContext *s, TCGOp *op, TCGArg *args,
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200226 TCGArg dst, TCGArg src)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400227{
Aurelien Jarno53657182015-06-04 21:53:25 +0200228 if (temps_are_copies(dst, src)) {
229 tcg_op_remove(s, op);
230 return;
231 }
232
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200233 if (temps[src].state == TCG_TEMP_CONST) {
234 tcg_opt_gen_movi(s, op, args, dst, temps[src].val);
235 return;
236 }
237
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200238 TCGOpcode new_op = op_to_mov(op->opc);
Richard Henderson24666ba2014-05-22 11:14:10 -0700239 tcg_target_ulong mask;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700240
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700241 op->opc = new_op;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700242
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800243 reset_temp(dst);
Richard Henderson24666ba2014-05-22 11:14:10 -0700244 mask = temps[src].mask;
245 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_mov_i32) {
246 /* High bits of the destination are now garbage. */
247 mask |= ~0xffffffffull;
248 }
249 temps[dst].mask = mask;
250
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800251 assert(temps[src].state != TCG_TEMP_CONST);
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200252
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800253 if (s->temps[src].type == s->temps[dst].type) {
254 if (temps[src].state != TCG_TEMP_COPY) {
255 temps[src].state = TCG_TEMP_COPY;
256 temps[src].next_copy = src;
257 temps[src].prev_copy = src;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400258 }
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800259 temps[dst].state = TCG_TEMP_COPY;
260 temps[dst].next_copy = temps[src].next_copy;
261 temps[dst].prev_copy = src;
262 temps[temps[dst].next_copy].prev_copy = dst;
263 temps[src].next_copy = dst;
264 }
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200265
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700266 args[0] = dst;
267 args[1] = src;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400268}
269
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000270static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400271{
Richard Henderson03271522013-08-14 14:35:56 -0700272 uint64_t l64, h64;
273
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400274 switch (op) {
275 CASE_OP_32_64(add):
276 return x + y;
277
278 CASE_OP_32_64(sub):
279 return x - y;
280
281 CASE_OP_32_64(mul):
282 return x * y;
283
Kirill Batuzov9a810902011-07-07 16:37:15 +0400284 CASE_OP_32_64(and):
285 return x & y;
286
287 CASE_OP_32_64(or):
288 return x | y;
289
290 CASE_OP_32_64(xor):
291 return x ^ y;
292
Kirill Batuzov55c09752011-07-07 16:37:16 +0400293 case INDEX_op_shl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700294 return (uint32_t)x << (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400295
Kirill Batuzov55c09752011-07-07 16:37:16 +0400296 case INDEX_op_shl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700297 return (uint64_t)x << (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400298
299 case INDEX_op_shr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700300 return (uint32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400301
Richard Henderson4bb7a412013-09-09 17:03:24 -0700302 case INDEX_op_trunc_shr_i32:
Kirill Batuzov55c09752011-07-07 16:37:16 +0400303 case INDEX_op_shr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700304 return (uint64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400305
306 case INDEX_op_sar_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700307 return (int32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400308
Kirill Batuzov55c09752011-07-07 16:37:16 +0400309 case INDEX_op_sar_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700310 return (int64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400311
312 case INDEX_op_rotr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700313 return ror32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400314
Kirill Batuzov55c09752011-07-07 16:37:16 +0400315 case INDEX_op_rotr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700316 return ror64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400317
318 case INDEX_op_rotl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700319 return rol32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400320
Kirill Batuzov55c09752011-07-07 16:37:16 +0400321 case INDEX_op_rotl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700322 return rol64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400323
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700324 CASE_OP_32_64(not):
Kirill Batuzova640f032011-07-07 16:37:17 +0400325 return ~x;
326
Richard Hendersoncb25c802011-08-17 14:11:47 -0700327 CASE_OP_32_64(neg):
328 return -x;
329
330 CASE_OP_32_64(andc):
331 return x & ~y;
332
333 CASE_OP_32_64(orc):
334 return x | ~y;
335
336 CASE_OP_32_64(eqv):
337 return ~(x ^ y);
338
339 CASE_OP_32_64(nand):
340 return ~(x & y);
341
342 CASE_OP_32_64(nor):
343 return ~(x | y);
344
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700345 CASE_OP_32_64(ext8s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400346 return (int8_t)x;
347
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700348 CASE_OP_32_64(ext16s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400349 return (int16_t)x;
350
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700351 CASE_OP_32_64(ext8u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400352 return (uint8_t)x;
353
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700354 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400355 return (uint16_t)x;
356
Kirill Batuzova640f032011-07-07 16:37:17 +0400357 case INDEX_op_ext32s_i64:
358 return (int32_t)x;
359
360 case INDEX_op_ext32u_i64:
361 return (uint32_t)x;
Kirill Batuzova640f032011-07-07 16:37:17 +0400362
Richard Henderson03271522013-08-14 14:35:56 -0700363 case INDEX_op_muluh_i32:
364 return ((uint64_t)(uint32_t)x * (uint32_t)y) >> 32;
365 case INDEX_op_mulsh_i32:
366 return ((int64_t)(int32_t)x * (int32_t)y) >> 32;
367
368 case INDEX_op_muluh_i64:
369 mulu64(&l64, &h64, x, y);
370 return h64;
371 case INDEX_op_mulsh_i64:
372 muls64(&l64, &h64, x, y);
373 return h64;
374
Richard Henderson01547f72013-08-14 15:22:46 -0700375 case INDEX_op_div_i32:
376 /* Avoid crashing on divide by zero, otherwise undefined. */
377 return (int32_t)x / ((int32_t)y ? : 1);
378 case INDEX_op_divu_i32:
379 return (uint32_t)x / ((uint32_t)y ? : 1);
380 case INDEX_op_div_i64:
381 return (int64_t)x / ((int64_t)y ? : 1);
382 case INDEX_op_divu_i64:
383 return (uint64_t)x / ((uint64_t)y ? : 1);
384
385 case INDEX_op_rem_i32:
386 return (int32_t)x % ((int32_t)y ? : 1);
387 case INDEX_op_remu_i32:
388 return (uint32_t)x % ((uint32_t)y ? : 1);
389 case INDEX_op_rem_i64:
390 return (int64_t)x % ((int64_t)y ? : 1);
391 case INDEX_op_remu_i64:
392 return (uint64_t)x % ((uint64_t)y ? : 1);
393
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400394 default:
395 fprintf(stderr,
396 "Unrecognized operation %d in do_constant_folding.\n", op);
397 tcg_abort();
398 }
399}
400
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000401static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400402{
403 TCGArg res = do_constant_folding_2(op, x, y);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400404 if (op_bits(op) == 32) {
Aurelien Jarno29f3ff82015-07-10 18:03:31 +0200405 res = (int32_t)res;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400406 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400407 return res;
408}
409
Richard Henderson9519da72012-10-02 11:32:26 -0700410static bool do_constant_folding_cond_32(uint32_t x, uint32_t y, TCGCond c)
411{
412 switch (c) {
413 case TCG_COND_EQ:
414 return x == y;
415 case TCG_COND_NE:
416 return x != y;
417 case TCG_COND_LT:
418 return (int32_t)x < (int32_t)y;
419 case TCG_COND_GE:
420 return (int32_t)x >= (int32_t)y;
421 case TCG_COND_LE:
422 return (int32_t)x <= (int32_t)y;
423 case TCG_COND_GT:
424 return (int32_t)x > (int32_t)y;
425 case TCG_COND_LTU:
426 return x < y;
427 case TCG_COND_GEU:
428 return x >= y;
429 case TCG_COND_LEU:
430 return x <= y;
431 case TCG_COND_GTU:
432 return x > y;
433 default:
434 tcg_abort();
435 }
436}
437
438static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c)
439{
440 switch (c) {
441 case TCG_COND_EQ:
442 return x == y;
443 case TCG_COND_NE:
444 return x != y;
445 case TCG_COND_LT:
446 return (int64_t)x < (int64_t)y;
447 case TCG_COND_GE:
448 return (int64_t)x >= (int64_t)y;
449 case TCG_COND_LE:
450 return (int64_t)x <= (int64_t)y;
451 case TCG_COND_GT:
452 return (int64_t)x > (int64_t)y;
453 case TCG_COND_LTU:
454 return x < y;
455 case TCG_COND_GEU:
456 return x >= y;
457 case TCG_COND_LEU:
458 return x <= y;
459 case TCG_COND_GTU:
460 return x > y;
461 default:
462 tcg_abort();
463 }
464}
465
466static bool do_constant_folding_cond_eq(TCGCond c)
467{
468 switch (c) {
469 case TCG_COND_GT:
470 case TCG_COND_LTU:
471 case TCG_COND_LT:
472 case TCG_COND_GTU:
473 case TCG_COND_NE:
474 return 0;
475 case TCG_COND_GE:
476 case TCG_COND_GEU:
477 case TCG_COND_LE:
478 case TCG_COND_LEU:
479 case TCG_COND_EQ:
480 return 1;
481 default:
482 tcg_abort();
483 }
484}
485
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200486/* Return 2 if the condition can't be simplified, and the result
487 of the condition (0 or 1) if it can */
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200488static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
489 TCGArg y, TCGCond c)
490{
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200491 if (temps[x].state == TCG_TEMP_CONST && temps[y].state == TCG_TEMP_CONST) {
492 switch (op_bits(op)) {
493 case 32:
Richard Henderson9519da72012-10-02 11:32:26 -0700494 return do_constant_folding_cond_32(temps[x].val, temps[y].val, c);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200495 case 64:
Richard Henderson9519da72012-10-02 11:32:26 -0700496 return do_constant_folding_cond_64(temps[x].val, temps[y].val, c);
497 default:
498 tcg_abort();
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200499 }
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200500 } else if (temps_are_copies(x, y)) {
Richard Henderson9519da72012-10-02 11:32:26 -0700501 return do_constant_folding_cond_eq(c);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200502 } else if (temps[y].state == TCG_TEMP_CONST && temps[y].val == 0) {
503 switch (c) {
504 case TCG_COND_LTU:
505 return 0;
506 case TCG_COND_GEU:
507 return 1;
508 default:
509 return 2;
510 }
511 } else {
512 return 2;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200513 }
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200514}
515
Richard Henderson6c4382f2012-10-02 11:32:27 -0700516/* Return 2 if the condition can't be simplified, and the result
517 of the condition (0 or 1) if it can */
518static TCGArg do_constant_folding_cond2(TCGArg *p1, TCGArg *p2, TCGCond c)
519{
520 TCGArg al = p1[0], ah = p1[1];
521 TCGArg bl = p2[0], bh = p2[1];
522
523 if (temps[bl].state == TCG_TEMP_CONST
524 && temps[bh].state == TCG_TEMP_CONST) {
525 uint64_t b = ((uint64_t)temps[bh].val << 32) | (uint32_t)temps[bl].val;
526
527 if (temps[al].state == TCG_TEMP_CONST
528 && temps[ah].state == TCG_TEMP_CONST) {
529 uint64_t a;
530 a = ((uint64_t)temps[ah].val << 32) | (uint32_t)temps[al].val;
531 return do_constant_folding_cond_64(a, b, c);
532 }
533 if (b == 0) {
534 switch (c) {
535 case TCG_COND_LTU:
536 return 0;
537 case TCG_COND_GEU:
538 return 1;
539 default:
540 break;
541 }
542 }
543 }
544 if (temps_are_copies(al, bl) && temps_are_copies(ah, bh)) {
545 return do_constant_folding_cond_eq(c);
546 }
547 return 2;
548}
549
Richard Henderson24c9ae42012-10-02 11:32:21 -0700550static bool swap_commutative(TCGArg dest, TCGArg *p1, TCGArg *p2)
551{
552 TCGArg a1 = *p1, a2 = *p2;
553 int sum = 0;
554 sum += temps[a1].state == TCG_TEMP_CONST;
555 sum -= temps[a2].state == TCG_TEMP_CONST;
556
557 /* Prefer the constant in second argument, and then the form
558 op a, a, b, which is better handled on non-RISC hosts. */
559 if (sum > 0 || (sum == 0 && dest == a2)) {
560 *p1 = a2;
561 *p2 = a1;
562 return true;
563 }
564 return false;
565}
566
Richard Henderson0bfcb862012-10-02 11:32:23 -0700567static bool swap_commutative2(TCGArg *p1, TCGArg *p2)
568{
569 int sum = 0;
570 sum += temps[p1[0]].state == TCG_TEMP_CONST;
571 sum += temps[p1[1]].state == TCG_TEMP_CONST;
572 sum -= temps[p2[0]].state == TCG_TEMP_CONST;
573 sum -= temps[p2[1]].state == TCG_TEMP_CONST;
574 if (sum > 0) {
575 TCGArg t;
576 t = p1[0], p1[0] = p2[0], p2[0] = t;
577 t = p1[1], p1[1] = p2[1], p2[1] = t;
578 return true;
579 }
580 return false;
581}
582
Kirill Batuzov22613af2011-07-07 16:37:13 +0400583/* Propagate constants and copies, fold constant expressions. */
Aurelien Jarno36e60ef2015-06-04 21:53:27 +0200584void tcg_optimize(TCGContext *s)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400585{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700586 int oi, oi_next, nb_temps, nb_globals;
Richard Henderson5d8f5362012-09-21 10:13:38 -0700587
Kirill Batuzov22613af2011-07-07 16:37:13 +0400588 /* Array VALS has an element for each temp.
589 If this temp holds a constant then its value is kept in VALS' element.
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200590 If this temp is a copy of other ones then the other copies are
591 available through the doubly linked circular list. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400592
593 nb_temps = s->nb_temps;
594 nb_globals = s->nb_globals;
Paolo Bonzinid193a142013-01-11 15:42:51 -0800595 reset_all_temps(nb_temps);
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400596
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700597 for (oi = s->gen_first_op_idx; oi >= 0; oi = oi_next) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700598 tcg_target_ulong mask, partmask, affected;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700599 int nb_oargs, nb_iargs, i;
Richard Hendersoncf066672014-03-22 20:06:52 -0700600 TCGArg tmp;
601
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700602 TCGOp * const op = &s->gen_op_buf[oi];
603 TCGArg * const args = &s->gen_opparam_buf[op->args];
604 TCGOpcode opc = op->opc;
605 const TCGOpDef *def = &tcg_op_defs[opc];
606
607 oi_next = op->next;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200608
609 /* Count the arguments, and initialize the temps that are
610 going to be used */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700611 if (opc == INDEX_op_call) {
612 nb_oargs = op->callo;
613 nb_iargs = op->calli;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200614 for (i = 0; i < nb_oargs + nb_iargs; i++) {
615 tmp = args[i];
616 if (tmp != TCG_CALL_DUMMY_ARG) {
617 init_temp_info(tmp);
618 }
619 }
Aurelien Jarno1ff8c542012-09-11 16:18:49 +0200620 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -0700621 nb_oargs = def->nb_oargs;
622 nb_iargs = def->nb_iargs;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200623 for (i = 0; i < nb_oargs + nb_iargs; i++) {
624 init_temp_info(args[i]);
625 }
Richard Hendersoncf066672014-03-22 20:06:52 -0700626 }
627
628 /* Do copy propagation */
629 for (i = nb_oargs; i < nb_oargs + nb_iargs; i++) {
630 if (temps[args[i]].state == TCG_TEMP_COPY) {
631 args[i] = find_better_copy(s, args[i]);
Kirill Batuzov22613af2011-07-07 16:37:13 +0400632 }
633 }
634
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400635 /* For commutative operations make constant second argument */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700636 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400637 CASE_OP_32_64(add):
638 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400639 CASE_OP_32_64(and):
640 CASE_OP_32_64(or):
641 CASE_OP_32_64(xor):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700642 CASE_OP_32_64(eqv):
643 CASE_OP_32_64(nand):
644 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -0700645 CASE_OP_32_64(muluh):
646 CASE_OP_32_64(mulsh):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700647 swap_commutative(args[0], &args[1], &args[2]);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400648 break;
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200649 CASE_OP_32_64(brcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700650 if (swap_commutative(-1, &args[0], &args[1])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200651 args[2] = tcg_swap_cond(args[2]);
652 }
653 break;
654 CASE_OP_32_64(setcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700655 if (swap_commutative(args[0], &args[1], &args[2])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200656 args[3] = tcg_swap_cond(args[3]);
657 }
658 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -0700659 CASE_OP_32_64(movcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700660 if (swap_commutative(-1, &args[1], &args[2])) {
661 args[5] = tcg_swap_cond(args[5]);
Richard Hendersonfa01a202012-09-21 10:13:37 -0700662 }
Richard Henderson5d8f5362012-09-21 10:13:38 -0700663 /* For movcond, we canonicalize the "false" input reg to match
664 the destination reg so that the tcg backend can implement
665 a "move if true" operation. */
Richard Henderson24c9ae42012-10-02 11:32:21 -0700666 if (swap_commutative(args[0], &args[4], &args[3])) {
667 args[5] = tcg_invert_cond(args[5]);
Richard Henderson5d8f5362012-09-21 10:13:38 -0700668 }
Richard Henderson1e484e62012-10-02 11:32:22 -0700669 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800670 CASE_OP_32_64(add2):
Richard Henderson1e484e62012-10-02 11:32:22 -0700671 swap_commutative(args[0], &args[2], &args[4]);
672 swap_commutative(args[1], &args[3], &args[5]);
673 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800674 CASE_OP_32_64(mulu2):
Richard Henderson4d3203f2013-02-19 23:51:53 -0800675 CASE_OP_32_64(muls2):
Richard Henderson14149682012-10-02 11:32:30 -0700676 swap_commutative(args[0], &args[2], &args[3]);
677 break;
Richard Henderson0bfcb862012-10-02 11:32:23 -0700678 case INDEX_op_brcond2_i32:
679 if (swap_commutative2(&args[0], &args[2])) {
680 args[4] = tcg_swap_cond(args[4]);
681 }
682 break;
683 case INDEX_op_setcond2_i32:
684 if (swap_commutative2(&args[1], &args[3])) {
685 args[5] = tcg_swap_cond(args[5]);
686 }
687 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400688 default:
689 break;
690 }
691
Richard Henderson2d497542013-03-21 09:13:33 -0700692 /* Simplify expressions for "shift/rot r, 0, a => movi r, 0",
693 and "sub r, 0, a => neg r, a" case. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700694 switch (opc) {
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200695 CASE_OP_32_64(shl):
696 CASE_OP_32_64(shr):
697 CASE_OP_32_64(sar):
698 CASE_OP_32_64(rotl):
699 CASE_OP_32_64(rotr):
700 if (temps[args[1]].state == TCG_TEMP_CONST
701 && temps[args[1]].val == 0) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200702 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200703 continue;
704 }
705 break;
Richard Henderson2d497542013-03-21 09:13:33 -0700706 CASE_OP_32_64(sub):
707 {
708 TCGOpcode neg_op;
709 bool have_neg;
710
711 if (temps[args[2]].state == TCG_TEMP_CONST) {
712 /* Proceed with possible constant folding. */
713 break;
714 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700715 if (opc == INDEX_op_sub_i32) {
Richard Henderson2d497542013-03-21 09:13:33 -0700716 neg_op = INDEX_op_neg_i32;
717 have_neg = TCG_TARGET_HAS_neg_i32;
718 } else {
719 neg_op = INDEX_op_neg_i64;
720 have_neg = TCG_TARGET_HAS_neg_i64;
721 }
722 if (!have_neg) {
723 break;
724 }
725 if (temps[args[1]].state == TCG_TEMP_CONST
726 && temps[args[1]].val == 0) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700727 op->opc = neg_op;
Richard Henderson2d497542013-03-21 09:13:33 -0700728 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700729 args[1] = args[2];
Richard Henderson2d497542013-03-21 09:13:33 -0700730 continue;
731 }
732 }
733 break;
Richard Hendersone201b562014-01-28 13:15:38 -0800734 CASE_OP_32_64(xor):
735 CASE_OP_32_64(nand):
736 if (temps[args[1]].state != TCG_TEMP_CONST
737 && temps[args[2]].state == TCG_TEMP_CONST
738 && temps[args[2]].val == -1) {
739 i = 1;
740 goto try_not;
741 }
742 break;
743 CASE_OP_32_64(nor):
744 if (temps[args[1]].state != TCG_TEMP_CONST
745 && temps[args[2]].state == TCG_TEMP_CONST
746 && temps[args[2]].val == 0) {
747 i = 1;
748 goto try_not;
749 }
750 break;
751 CASE_OP_32_64(andc):
752 if (temps[args[2]].state != TCG_TEMP_CONST
753 && temps[args[1]].state == TCG_TEMP_CONST
754 && temps[args[1]].val == -1) {
755 i = 2;
756 goto try_not;
757 }
758 break;
759 CASE_OP_32_64(orc):
760 CASE_OP_32_64(eqv):
761 if (temps[args[2]].state != TCG_TEMP_CONST
762 && temps[args[1]].state == TCG_TEMP_CONST
763 && temps[args[1]].val == 0) {
764 i = 2;
765 goto try_not;
766 }
767 break;
768 try_not:
769 {
770 TCGOpcode not_op;
771 bool have_not;
772
773 if (def->flags & TCG_OPF_64BIT) {
774 not_op = INDEX_op_not_i64;
775 have_not = TCG_TARGET_HAS_not_i64;
776 } else {
777 not_op = INDEX_op_not_i32;
778 have_not = TCG_TARGET_HAS_not_i32;
779 }
780 if (!have_not) {
781 break;
782 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700783 op->opc = not_op;
Richard Hendersone201b562014-01-28 13:15:38 -0800784 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700785 args[1] = args[i];
Richard Hendersone201b562014-01-28 13:15:38 -0800786 continue;
787 }
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200788 default:
789 break;
790 }
791
Richard Henderson464a1442014-01-31 07:42:11 -0600792 /* Simplify expression for "op r, a, const => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700793 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400794 CASE_OP_32_64(add):
795 CASE_OP_32_64(sub):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400796 CASE_OP_32_64(shl):
797 CASE_OP_32_64(shr):
798 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700799 CASE_OP_32_64(rotl):
800 CASE_OP_32_64(rotr):
Aurelien Jarno38ee1882012-09-06 16:47:14 +0200801 CASE_OP_32_64(or):
802 CASE_OP_32_64(xor):
Richard Henderson464a1442014-01-31 07:42:11 -0600803 CASE_OP_32_64(andc):
804 if (temps[args[1]].state != TCG_TEMP_CONST
805 && temps[args[2]].state == TCG_TEMP_CONST
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400806 && temps[args[2]].val == 0) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200807 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
808 continue;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400809 }
810 break;
Richard Henderson464a1442014-01-31 07:42:11 -0600811 CASE_OP_32_64(and):
812 CASE_OP_32_64(orc):
813 CASE_OP_32_64(eqv):
814 if (temps[args[1]].state != TCG_TEMP_CONST
815 && temps[args[2]].state == TCG_TEMP_CONST
816 && temps[args[2]].val == -1) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200817 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
818 continue;
Richard Henderson464a1442014-01-31 07:42:11 -0600819 }
820 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200821 default:
822 break;
823 }
824
Aurelien Jarno30312442013-09-03 08:27:38 +0200825 /* Simplify using known-zero bits. Currently only ops with a single
826 output argument is supported. */
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800827 mask = -1;
Paolo Bonzini633f6502013-01-11 15:42:53 -0800828 affected = -1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700829 switch (opc) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800830 CASE_OP_32_64(ext8s):
831 if ((temps[args[1]].mask & 0x80) != 0) {
832 break;
833 }
834 CASE_OP_32_64(ext8u):
835 mask = 0xff;
836 goto and_const;
837 CASE_OP_32_64(ext16s):
838 if ((temps[args[1]].mask & 0x8000) != 0) {
839 break;
840 }
841 CASE_OP_32_64(ext16u):
842 mask = 0xffff;
843 goto and_const;
844 case INDEX_op_ext32s_i64:
845 if ((temps[args[1]].mask & 0x80000000) != 0) {
846 break;
847 }
848 case INDEX_op_ext32u_i64:
849 mask = 0xffffffffU;
850 goto and_const;
851
852 CASE_OP_32_64(and):
853 mask = temps[args[2]].mask;
854 if (temps[args[2]].state == TCG_TEMP_CONST) {
855 and_const:
Paolo Bonzini633f6502013-01-11 15:42:53 -0800856 affected = temps[args[1]].mask & ~mask;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800857 }
858 mask = temps[args[1]].mask & mask;
859 break;
860
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800861 CASE_OP_32_64(andc):
862 /* Known-zeros does not imply known-ones. Therefore unless
863 args[2] is constant, we can't infer anything from it. */
864 if (temps[args[2]].state == TCG_TEMP_CONST) {
865 mask = ~temps[args[2]].mask;
866 goto and_const;
867 }
868 /* But we certainly know nothing outside args[1] may be set. */
869 mask = temps[args[1]].mask;
870 break;
871
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200872 case INDEX_op_sar_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800873 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700874 tmp = temps[args[2]].val & 31;
875 mask = (int32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200876 }
877 break;
878 case INDEX_op_sar_i64:
879 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700880 tmp = temps[args[2]].val & 63;
881 mask = (int64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800882 }
883 break;
884
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200885 case INDEX_op_shr_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800886 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700887 tmp = temps[args[2]].val & 31;
888 mask = (uint32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200889 }
890 break;
891 case INDEX_op_shr_i64:
892 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700893 tmp = temps[args[2]].val & 63;
894 mask = (uint64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800895 }
896 break;
897
Richard Henderson4bb7a412013-09-09 17:03:24 -0700898 case INDEX_op_trunc_shr_i32:
899 mask = (uint64_t)temps[args[1]].mask >> args[2];
900 break;
901
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800902 CASE_OP_32_64(shl):
903 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700904 tmp = temps[args[2]].val & (TCG_TARGET_REG_BITS - 1);
905 mask = temps[args[1]].mask << tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800906 }
907 break;
908
909 CASE_OP_32_64(neg):
910 /* Set to 1 all bits to the left of the rightmost. */
911 mask = -(temps[args[1]].mask & -temps[args[1]].mask);
912 break;
913
914 CASE_OP_32_64(deposit):
Richard Hendersond998e552014-03-18 14:23:52 -0700915 mask = deposit64(temps[args[1]].mask, args[3], args[4],
916 temps[args[2]].mask);
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800917 break;
918
919 CASE_OP_32_64(or):
920 CASE_OP_32_64(xor):
921 mask = temps[args[1]].mask | temps[args[2]].mask;
922 break;
923
924 CASE_OP_32_64(setcond):
Richard Hendersona7635512014-04-23 22:18:30 -0700925 case INDEX_op_setcond2_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800926 mask = 1;
927 break;
928
929 CASE_OP_32_64(movcond):
930 mask = temps[args[3]].mask | temps[args[4]].mask;
931 break;
932
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200933 CASE_OP_32_64(ld8u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200934 mask = 0xff;
935 break;
936 CASE_OP_32_64(ld16u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200937 mask = 0xffff;
938 break;
939 case INDEX_op_ld32u_i64:
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200940 mask = 0xffffffffu;
941 break;
942
943 CASE_OP_32_64(qemu_ld):
944 {
Richard Henderson59227d52015-05-12 11:51:44 -0700945 TCGMemOpIdx oi = args[nb_oargs + nb_iargs];
946 TCGMemOp mop = get_memop(oi);
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200947 if (!(mop & MO_SIGN)) {
948 mask = (2ULL << ((8 << (mop & MO_SIZE)) - 1)) - 1;
949 }
950 }
951 break;
952
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800953 default:
954 break;
955 }
956
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700957 /* 32-bit ops generate 32-bit results. For the result is zero test
958 below, we can ignore high bits, but for further optimizations we
959 need to record that the high bits contain garbage. */
Richard Henderson24666ba2014-05-22 11:14:10 -0700960 partmask = mask;
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700961 if (!(def->flags & TCG_OPF_64BIT)) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700962 mask |= ~(tcg_target_ulong)0xffffffffu;
963 partmask &= 0xffffffffu;
964 affected &= 0xffffffffu;
Aurelien Jarnof096dc92013-09-03 08:27:38 +0200965 }
966
Richard Henderson24666ba2014-05-22 11:14:10 -0700967 if (partmask == 0) {
Richard Hendersoncf066672014-03-22 20:06:52 -0700968 assert(nb_oargs == 1);
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200969 tcg_opt_gen_movi(s, op, args, args[0], 0);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800970 continue;
971 }
972 if (affected == 0) {
Richard Hendersoncf066672014-03-22 20:06:52 -0700973 assert(nb_oargs == 1);
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200974 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800975 continue;
976 }
977
Aurelien Jarno56e49432012-09-06 16:47:13 +0200978 /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700979 switch (opc) {
Aurelien Jarno61251c02012-09-06 16:47:14 +0200980 CASE_OP_32_64(and):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400981 CASE_OP_32_64(mul):
Richard Henderson03271522013-08-14 14:35:56 -0700982 CASE_OP_32_64(muluh):
983 CASE_OP_32_64(mulsh):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400984 if ((temps[args[2]].state == TCG_TEMP_CONST
985 && temps[args[2]].val == 0)) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200986 tcg_opt_gen_movi(s, op, args, args[0], 0);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400987 continue;
988 }
989 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200990 default:
991 break;
992 }
993
994 /* Simplify expression for "op r, a, a => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700995 switch (opc) {
Kirill Batuzov9a810902011-07-07 16:37:15 +0400996 CASE_OP_32_64(or):
997 CASE_OP_32_64(and):
Aurelien Jarno0aba1c72012-09-18 19:11:32 +0200998 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200999 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Kirill Batuzov9a810902011-07-07 16:37:15 +04001000 continue;
1001 }
1002 break;
Blue Swirlfe0de7a2011-07-30 19:18:32 +00001003 default:
1004 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001005 }
1006
Aurelien Jarno3c941932012-09-18 19:12:36 +02001007 /* Simplify expression for "op r, a, a => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001008 switch (opc) {
Richard Hendersone64e9582014-01-28 13:26:17 -08001009 CASE_OP_32_64(andc):
Aurelien Jarno3c941932012-09-18 19:12:36 +02001010 CASE_OP_32_64(sub):
1011 CASE_OP_32_64(xor):
1012 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001013 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno3c941932012-09-18 19:12:36 +02001014 continue;
1015 }
1016 break;
1017 default:
1018 break;
1019 }
1020
Kirill Batuzov22613af2011-07-07 16:37:13 +04001021 /* Propagate constants through copy operations and do constant
1022 folding. Constants will be substituted to arguments by register
1023 allocator where needed and possible. Also detect copies. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001024 switch (opc) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001025 CASE_OP_32_64(mov):
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001026 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
1027 break;
Kirill Batuzov22613af2011-07-07 16:37:13 +04001028 CASE_OP_32_64(movi):
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001029 tcg_opt_gen_movi(s, op, args, args[0], args[1]);
Kirill Batuzov22613af2011-07-07 16:37:13 +04001030 break;
Richard Henderson6e14e912012-10-02 11:32:24 -07001031
Kirill Batuzova640f032011-07-07 16:37:17 +04001032 CASE_OP_32_64(not):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001033 CASE_OP_32_64(neg):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001034 CASE_OP_32_64(ext8s):
1035 CASE_OP_32_64(ext8u):
1036 CASE_OP_32_64(ext16s):
1037 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +04001038 case INDEX_op_ext32s_i64:
1039 case INDEX_op_ext32u_i64:
Kirill Batuzova640f032011-07-07 16:37:17 +04001040 if (temps[args[1]].state == TCG_TEMP_CONST) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001041 tmp = do_constant_folding(opc, temps[args[1]].val, 0);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001042 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001043 break;
Kirill Batuzova640f032011-07-07 16:37:17 +04001044 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001045 goto do_default;
1046
Richard Henderson4bb7a412013-09-09 17:03:24 -07001047 case INDEX_op_trunc_shr_i32:
1048 if (temps[args[1]].state == TCG_TEMP_CONST) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001049 tmp = do_constant_folding(opc, temps[args[1]].val, args[2]);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001050 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson4bb7a412013-09-09 17:03:24 -07001051 break;
1052 }
1053 goto do_default;
1054
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001055 CASE_OP_32_64(add):
1056 CASE_OP_32_64(sub):
1057 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +04001058 CASE_OP_32_64(or):
1059 CASE_OP_32_64(and):
1060 CASE_OP_32_64(xor):
Kirill Batuzov55c09752011-07-07 16:37:16 +04001061 CASE_OP_32_64(shl):
1062 CASE_OP_32_64(shr):
1063 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001064 CASE_OP_32_64(rotl):
1065 CASE_OP_32_64(rotr):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001066 CASE_OP_32_64(andc):
1067 CASE_OP_32_64(orc):
1068 CASE_OP_32_64(eqv):
1069 CASE_OP_32_64(nand):
1070 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -07001071 CASE_OP_32_64(muluh):
1072 CASE_OP_32_64(mulsh):
Richard Henderson01547f72013-08-14 15:22:46 -07001073 CASE_OP_32_64(div):
1074 CASE_OP_32_64(divu):
1075 CASE_OP_32_64(rem):
1076 CASE_OP_32_64(remu):
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001077 if (temps[args[1]].state == TCG_TEMP_CONST
1078 && temps[args[2]].state == TCG_TEMP_CONST) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001079 tmp = do_constant_folding(opc, temps[args[1]].val,
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001080 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001081 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001082 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001083 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001084 goto do_default;
1085
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001086 CASE_OP_32_64(deposit):
1087 if (temps[args[1]].state == TCG_TEMP_CONST
1088 && temps[args[2]].state == TCG_TEMP_CONST) {
Richard Hendersond998e552014-03-18 14:23:52 -07001089 tmp = deposit64(temps[args[1]].val, args[3], args[4],
1090 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001091 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001092 break;
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001093 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001094 goto do_default;
1095
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001096 CASE_OP_32_64(setcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001097 tmp = do_constant_folding_cond(opc, args[1], args[2], args[3]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001098 if (tmp != 2) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001099 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001100 break;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001101 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001102 goto do_default;
1103
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001104 CASE_OP_32_64(brcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001105 tmp = do_constant_folding_cond(opc, args[0], args[1], args[2]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001106 if (tmp != 2) {
1107 if (tmp) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001108 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001109 op->opc = INDEX_op_br;
1110 args[0] = args[3];
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001111 } else {
Richard Henderson0c627cd2014-03-30 16:51:54 -07001112 tcg_op_remove(s, op);
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001113 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001114 break;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001115 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001116 goto do_default;
1117
Richard Hendersonfa01a202012-09-21 10:13:37 -07001118 CASE_OP_32_64(movcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001119 tmp = do_constant_folding_cond(opc, args[1], args[2], args[5]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001120 if (tmp != 2) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001121 tcg_opt_gen_mov(s, op, args, args[0], args[4-tmp]);
Richard Henderson6e14e912012-10-02 11:32:24 -07001122 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -07001123 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001124 goto do_default;
1125
Richard Henderson212c3282012-10-02 11:32:28 -07001126 case INDEX_op_add2_i32:
1127 case INDEX_op_sub2_i32:
1128 if (temps[args[2]].state == TCG_TEMP_CONST
1129 && temps[args[3]].state == TCG_TEMP_CONST
1130 && temps[args[4]].state == TCG_TEMP_CONST
1131 && temps[args[5]].state == TCG_TEMP_CONST) {
1132 uint32_t al = temps[args[2]].val;
1133 uint32_t ah = temps[args[3]].val;
1134 uint32_t bl = temps[args[4]].val;
1135 uint32_t bh = temps[args[5]].val;
1136 uint64_t a = ((uint64_t)ah << 32) | al;
1137 uint64_t b = ((uint64_t)bh << 32) | bl;
1138 TCGArg rl, rh;
Richard Hendersona4ce0992014-03-30 17:14:02 -07001139 TCGOp *op2 = insert_op_before(s, op, INDEX_op_movi_i32, 2);
1140 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson212c3282012-10-02 11:32:28 -07001141
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001142 if (opc == INDEX_op_add2_i32) {
Richard Henderson212c3282012-10-02 11:32:28 -07001143 a += b;
1144 } else {
1145 a -= b;
1146 }
1147
Richard Henderson212c3282012-10-02 11:32:28 -07001148 rl = args[0];
1149 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001150 tcg_opt_gen_movi(s, op, args, rl, (int32_t)a);
1151 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(a >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001152
1153 /* We've done all we need to do with the movi. Skip it. */
1154 oi_next = op2->next;
Richard Henderson212c3282012-10-02 11:32:28 -07001155 break;
1156 }
1157 goto do_default;
1158
Richard Henderson14149682012-10-02 11:32:30 -07001159 case INDEX_op_mulu2_i32:
1160 if (temps[args[2]].state == TCG_TEMP_CONST
1161 && temps[args[3]].state == TCG_TEMP_CONST) {
1162 uint32_t a = temps[args[2]].val;
1163 uint32_t b = temps[args[3]].val;
1164 uint64_t r = (uint64_t)a * b;
1165 TCGArg rl, rh;
Richard Hendersona4ce0992014-03-30 17:14:02 -07001166 TCGOp *op2 = insert_op_before(s, op, INDEX_op_movi_i32, 2);
1167 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson14149682012-10-02 11:32:30 -07001168
1169 rl = args[0];
1170 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001171 tcg_opt_gen_movi(s, op, args, rl, (int32_t)r);
1172 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(r >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001173
1174 /* We've done all we need to do with the movi. Skip it. */
1175 oi_next = op2->next;
Richard Henderson14149682012-10-02 11:32:30 -07001176 break;
1177 }
1178 goto do_default;
1179
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001180 case INDEX_op_brcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001181 tmp = do_constant_folding_cond2(&args[0], &args[2], args[4]);
1182 if (tmp != 2) {
1183 if (tmp) {
Richard Hendersona7635512014-04-23 22:18:30 -07001184 do_brcond_true:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001185 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001186 op->opc = INDEX_op_br;
1187 args[0] = args[5];
Richard Henderson6c4382f2012-10-02 11:32:27 -07001188 } else {
Richard Hendersona7635512014-04-23 22:18:30 -07001189 do_brcond_false:
Richard Henderson0c627cd2014-03-30 16:51:54 -07001190 tcg_op_remove(s, op);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001191 }
1192 } else if ((args[4] == TCG_COND_LT || args[4] == TCG_COND_GE)
1193 && temps[args[2]].state == TCG_TEMP_CONST
1194 && temps[args[3]].state == TCG_TEMP_CONST
1195 && temps[args[2]].val == 0
1196 && temps[args[3]].val == 0) {
1197 /* Simplify LT/GE comparisons vs zero to a single compare
1198 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001199 do_brcond_high:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001200 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001201 op->opc = INDEX_op_brcond_i32;
1202 args[0] = args[1];
1203 args[1] = args[3];
1204 args[2] = args[4];
1205 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001206 } else if (args[4] == TCG_COND_EQ) {
1207 /* Simplify EQ comparisons where one of the pairs
1208 can be simplified. */
1209 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1210 args[0], args[2], TCG_COND_EQ);
1211 if (tmp == 0) {
1212 goto do_brcond_false;
1213 } else if (tmp == 1) {
1214 goto do_brcond_high;
1215 }
1216 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1217 args[1], args[3], TCG_COND_EQ);
1218 if (tmp == 0) {
1219 goto do_brcond_false;
1220 } else if (tmp != 1) {
1221 goto do_default;
1222 }
1223 do_brcond_low:
1224 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001225 op->opc = INDEX_op_brcond_i32;
1226 args[1] = args[2];
1227 args[2] = args[4];
1228 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001229 } else if (args[4] == TCG_COND_NE) {
1230 /* Simplify NE comparisons where one of the pairs
1231 can be simplified. */
1232 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1233 args[0], args[2], TCG_COND_NE);
1234 if (tmp == 0) {
1235 goto do_brcond_high;
1236 } else if (tmp == 1) {
1237 goto do_brcond_true;
1238 }
1239 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1240 args[1], args[3], TCG_COND_NE);
1241 if (tmp == 0) {
1242 goto do_brcond_low;
1243 } else if (tmp == 1) {
1244 goto do_brcond_true;
1245 }
1246 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001247 } else {
1248 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001249 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001250 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001251
1252 case INDEX_op_setcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001253 tmp = do_constant_folding_cond2(&args[1], &args[3], args[5]);
1254 if (tmp != 2) {
Richard Hendersona7635512014-04-23 22:18:30 -07001255 do_setcond_const:
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001256 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001257 } else if ((args[5] == TCG_COND_LT || args[5] == TCG_COND_GE)
1258 && temps[args[3]].state == TCG_TEMP_CONST
1259 && temps[args[4]].state == TCG_TEMP_CONST
1260 && temps[args[3]].val == 0
1261 && temps[args[4]].val == 0) {
1262 /* Simplify LT/GE comparisons vs zero to a single compare
1263 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001264 do_setcond_high:
Aurelien Jarno66e61b52013-05-08 22:36:39 +02001265 reset_temp(args[0]);
Richard Hendersona7635512014-04-23 22:18:30 -07001266 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001267 op->opc = INDEX_op_setcond_i32;
1268 args[1] = args[2];
1269 args[2] = args[4];
1270 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001271 } else if (args[5] == TCG_COND_EQ) {
1272 /* Simplify EQ comparisons where one of the pairs
1273 can be simplified. */
1274 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1275 args[1], args[3], TCG_COND_EQ);
1276 if (tmp == 0) {
1277 goto do_setcond_const;
1278 } else if (tmp == 1) {
1279 goto do_setcond_high;
1280 }
1281 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1282 args[2], args[4], TCG_COND_EQ);
1283 if (tmp == 0) {
1284 goto do_setcond_high;
1285 } else if (tmp != 1) {
1286 goto do_default;
1287 }
1288 do_setcond_low:
1289 reset_temp(args[0]);
1290 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001291 op->opc = INDEX_op_setcond_i32;
1292 args[2] = args[3];
1293 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001294 } else if (args[5] == TCG_COND_NE) {
1295 /* Simplify NE comparisons where one of the pairs
1296 can be simplified. */
1297 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1298 args[1], args[3], TCG_COND_NE);
1299 if (tmp == 0) {
1300 goto do_setcond_high;
1301 } else if (tmp == 1) {
1302 goto do_setcond_const;
1303 }
1304 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1305 args[2], args[4], TCG_COND_NE);
1306 if (tmp == 0) {
1307 goto do_setcond_low;
1308 } else if (tmp == 1) {
1309 goto do_setcond_const;
1310 }
1311 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001312 } else {
1313 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001314 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001315 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001316
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001317 case INDEX_op_call:
Richard Hendersoncf066672014-03-22 20:06:52 -07001318 if (!(args[nb_oargs + nb_iargs + 1]
1319 & (TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_WRITE_GLOBALS))) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001320 for (i = 0; i < nb_globals; i++) {
Aurelien Jarno1208d7d2015-07-27 12:41:44 +02001321 if (test_bit(i, temps_used.l)) {
1322 reset_temp(i);
1323 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001324 }
1325 }
Richard Hendersoncf066672014-03-22 20:06:52 -07001326 goto do_reset_output;
Richard Henderson6e14e912012-10-02 11:32:24 -07001327
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001328 default:
Richard Henderson6e14e912012-10-02 11:32:24 -07001329 do_default:
1330 /* Default case: we know nothing about operation (or were unable
1331 to compute the operation result) so no propagation is done.
1332 We trash everything if the operation is the end of a basic
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -08001333 block, otherwise we only trash the output args. "mask" is
1334 the non-zero bits mask for the first output arg. */
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001335 if (def->flags & TCG_OPF_BB_END) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001336 reset_all_temps(nb_temps);
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001337 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -07001338 do_reset_output:
1339 for (i = 0; i < nb_oargs; i++) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +02001340 reset_temp(args[i]);
Aurelien Jarno30312442013-09-03 08:27:38 +02001341 /* Save the corresponding known-zero bits mask for the
1342 first output argument (only one supported so far). */
1343 if (i == 0) {
1344 temps[args[i]].mask = mask;
1345 }
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001346 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001347 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001348 break;
1349 }
1350 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001351}