blob: d5c0398f4457e95bfd9b12399a087fa0e51e67f2 [file] [log] [blame]
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001/*
2 * Optimizations for Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2010 Samsung Electronics.
5 * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
24 */
25
26#include "config.h"
27
28#include <stdlib.h>
29#include <stdio.h>
30
31#include "qemu-common.h"
32#include "tcg-op.h"
33
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040034#define CASE_OP_32_64(x) \
35 glue(glue(case INDEX_op_, x), _i32): \
36 glue(glue(case INDEX_op_, x), _i64)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040037
Kirill Batuzov22613af2011-07-07 16:37:13 +040038typedef enum {
39 TCG_TEMP_UNDEF = 0,
40 TCG_TEMP_CONST,
41 TCG_TEMP_COPY,
Kirill Batuzov22613af2011-07-07 16:37:13 +040042} tcg_temp_state;
43
44struct tcg_temp_info {
45 tcg_temp_state state;
46 uint16_t prev_copy;
47 uint16_t next_copy;
48 tcg_target_ulong val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080049 tcg_target_ulong mask;
Kirill Batuzov22613af2011-07-07 16:37:13 +040050};
51
52static struct tcg_temp_info temps[TCG_MAX_TEMPS];
53
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +020054/* Reset TEMP's state to TCG_TEMP_UNDEF. If TEMP only had one copy, remove
55 the copy flag from the left temp. */
56static void reset_temp(TCGArg temp)
Kirill Batuzov22613af2011-07-07 16:37:13 +040057{
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +020058 if (temps[temp].state == TCG_TEMP_COPY) {
59 if (temps[temp].prev_copy == temps[temp].next_copy) {
60 temps[temps[temp].next_copy].state = TCG_TEMP_UNDEF;
61 } else {
62 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
63 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
Kirill Batuzov22613af2011-07-07 16:37:13 +040064 }
Kirill Batuzov22613af2011-07-07 16:37:13 +040065 }
Aurelien Jarno48b56ce2012-09-10 23:51:42 +020066 temps[temp].state = TCG_TEMP_UNDEF;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080067 temps[temp].mask = -1;
Kirill Batuzov22613af2011-07-07 16:37:13 +040068}
69
Richard Hendersona4ce0992014-03-30 17:14:02 -070070static TCGOp *insert_op_before(TCGContext *s, TCGOp *old_op,
71 TCGOpcode opc, int nargs)
72{
73 int oi = s->gen_next_op_idx;
74 int pi = s->gen_next_parm_idx;
75 int prev = old_op->prev;
76 int next = old_op - s->gen_op_buf;
77 TCGOp *new_op;
78
79 tcg_debug_assert(oi < OPC_BUF_SIZE);
80 tcg_debug_assert(pi + nargs <= OPPARAM_BUF_SIZE);
81 s->gen_next_op_idx = oi + 1;
82 s->gen_next_parm_idx = pi + nargs;
83
84 new_op = &s->gen_op_buf[oi];
85 *new_op = (TCGOp){
86 .opc = opc,
87 .args = pi,
88 .prev = prev,
89 .next = next
90 };
91 if (prev >= 0) {
92 s->gen_op_buf[prev].next = oi;
93 } else {
94 s->gen_first_op_idx = oi;
95 }
96 old_op->prev = oi;
97
98 return new_op;
99}
100
Paolo Bonzinid193a142013-01-11 15:42:51 -0800101/* Reset all temporaries, given that there are NB_TEMPS of them. */
102static void reset_all_temps(int nb_temps)
103{
104 int i;
105 for (i = 0; i < nb_temps; i++) {
106 temps[i].state = TCG_TEMP_UNDEF;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800107 temps[i].mask = -1;
Paolo Bonzinid193a142013-01-11 15:42:51 -0800108 }
109}
110
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000111static int op_bits(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400112{
Richard Henderson8399ad52011-08-17 14:11:45 -0700113 const TCGOpDef *def = &tcg_op_defs[op];
114 return def->flags & TCG_OPF_64BIT ? 64 : 32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400115}
116
Richard Hendersona62f6f52014-05-22 10:59:12 -0700117static TCGOpcode op_to_mov(TCGOpcode op)
118{
119 switch (op_bits(op)) {
120 case 32:
121 return INDEX_op_mov_i32;
122 case 64:
123 return INDEX_op_mov_i64;
124 default:
125 fprintf(stderr, "op_to_mov: unexpected return value of "
126 "function op_bits.\n");
127 tcg_abort();
128 }
129}
130
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000131static TCGOpcode op_to_movi(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400132{
133 switch (op_bits(op)) {
134 case 32:
135 return INDEX_op_movi_i32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400136 case 64:
137 return INDEX_op_movi_i64;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400138 default:
139 fprintf(stderr, "op_to_movi: unexpected return value of "
140 "function op_bits.\n");
141 tcg_abort();
142 }
143}
144
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200145static TCGArg find_better_copy(TCGContext *s, TCGArg temp)
146{
147 TCGArg i;
148
149 /* If this is already a global, we can't do better. */
150 if (temp < s->nb_globals) {
151 return temp;
152 }
153
154 /* Search for a global first. */
155 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
156 if (i < s->nb_globals) {
157 return i;
158 }
159 }
160
161 /* If it is a temp, search for a temp local. */
162 if (!s->temps[temp].temp_local) {
163 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
164 if (s->temps[i].temp_local) {
165 return i;
166 }
167 }
168 }
169
170 /* Failure to find a better representation, return the same temp. */
171 return temp;
172}
173
174static bool temps_are_copies(TCGArg arg1, TCGArg arg2)
175{
176 TCGArg i;
177
178 if (arg1 == arg2) {
179 return true;
180 }
181
182 if (temps[arg1].state != TCG_TEMP_COPY
183 || temps[arg2].state != TCG_TEMP_COPY) {
184 return false;
185 }
186
187 for (i = temps[arg1].next_copy ; i != arg1 ; i = temps[i].next_copy) {
188 if (i == arg2) {
189 return true;
190 }
191 }
192
193 return false;
194}
195
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700196static void tcg_opt_gen_mov(TCGContext *s, TCGOp *op, TCGArg *args,
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200197 TCGArg dst, TCGArg src)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400198{
Aurelien Jarno53657182015-06-04 21:53:25 +0200199 if (temps_are_copies(dst, src)) {
200 tcg_op_remove(s, op);
201 return;
202 }
203
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200204 TCGOpcode new_op = op_to_mov(op->opc);
Richard Henderson24666ba2014-05-22 11:14:10 -0700205 tcg_target_ulong mask;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700206
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700207 op->opc = new_op;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700208
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800209 reset_temp(dst);
Richard Henderson24666ba2014-05-22 11:14:10 -0700210 mask = temps[src].mask;
211 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_mov_i32) {
212 /* High bits of the destination are now garbage. */
213 mask |= ~0xffffffffull;
214 }
215 temps[dst].mask = mask;
216
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800217 assert(temps[src].state != TCG_TEMP_CONST);
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200218
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800219 if (s->temps[src].type == s->temps[dst].type) {
220 if (temps[src].state != TCG_TEMP_COPY) {
221 temps[src].state = TCG_TEMP_COPY;
222 temps[src].next_copy = src;
223 temps[src].prev_copy = src;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400224 }
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800225 temps[dst].state = TCG_TEMP_COPY;
226 temps[dst].next_copy = temps[src].next_copy;
227 temps[dst].prev_copy = src;
228 temps[temps[dst].next_copy].prev_copy = dst;
229 temps[src].next_copy = dst;
230 }
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200231
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700232 args[0] = dst;
233 args[1] = src;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400234}
235
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700236static void tcg_opt_gen_movi(TCGContext *s, TCGOp *op, TCGArg *args,
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200237 TCGArg dst, TCGArg val)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400238{
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200239 TCGOpcode new_op = op_to_movi(op->opc);
Richard Henderson24666ba2014-05-22 11:14:10 -0700240 tcg_target_ulong mask;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700241
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700242 op->opc = new_op;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700243
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800244 reset_temp(dst);
245 temps[dst].state = TCG_TEMP_CONST;
246 temps[dst].val = val;
Richard Henderson24666ba2014-05-22 11:14:10 -0700247 mask = val;
248 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_mov_i32) {
249 /* High bits of the destination are now garbage. */
250 mask |= ~0xffffffffull;
251 }
252 temps[dst].mask = mask;
253
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700254 args[0] = dst;
255 args[1] = val;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400256}
257
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000258static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400259{
Richard Henderson03271522013-08-14 14:35:56 -0700260 uint64_t l64, h64;
261
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400262 switch (op) {
263 CASE_OP_32_64(add):
264 return x + y;
265
266 CASE_OP_32_64(sub):
267 return x - y;
268
269 CASE_OP_32_64(mul):
270 return x * y;
271
Kirill Batuzov9a810902011-07-07 16:37:15 +0400272 CASE_OP_32_64(and):
273 return x & y;
274
275 CASE_OP_32_64(or):
276 return x | y;
277
278 CASE_OP_32_64(xor):
279 return x ^ y;
280
Kirill Batuzov55c09752011-07-07 16:37:16 +0400281 case INDEX_op_shl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700282 return (uint32_t)x << (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400283
Kirill Batuzov55c09752011-07-07 16:37:16 +0400284 case INDEX_op_shl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700285 return (uint64_t)x << (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400286
287 case INDEX_op_shr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700288 return (uint32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400289
Richard Henderson4bb7a412013-09-09 17:03:24 -0700290 case INDEX_op_trunc_shr_i32:
Kirill Batuzov55c09752011-07-07 16:37:16 +0400291 case INDEX_op_shr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700292 return (uint64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400293
294 case INDEX_op_sar_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700295 return (int32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400296
Kirill Batuzov55c09752011-07-07 16:37:16 +0400297 case INDEX_op_sar_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700298 return (int64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400299
300 case INDEX_op_rotr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700301 return ror32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400302
Kirill Batuzov55c09752011-07-07 16:37:16 +0400303 case INDEX_op_rotr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700304 return ror64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400305
306 case INDEX_op_rotl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700307 return rol32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400308
Kirill Batuzov55c09752011-07-07 16:37:16 +0400309 case INDEX_op_rotl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700310 return rol64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400311
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700312 CASE_OP_32_64(not):
Kirill Batuzova640f032011-07-07 16:37:17 +0400313 return ~x;
314
Richard Hendersoncb25c802011-08-17 14:11:47 -0700315 CASE_OP_32_64(neg):
316 return -x;
317
318 CASE_OP_32_64(andc):
319 return x & ~y;
320
321 CASE_OP_32_64(orc):
322 return x | ~y;
323
324 CASE_OP_32_64(eqv):
325 return ~(x ^ y);
326
327 CASE_OP_32_64(nand):
328 return ~(x & y);
329
330 CASE_OP_32_64(nor):
331 return ~(x | y);
332
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700333 CASE_OP_32_64(ext8s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400334 return (int8_t)x;
335
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700336 CASE_OP_32_64(ext16s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400337 return (int16_t)x;
338
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700339 CASE_OP_32_64(ext8u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400340 return (uint8_t)x;
341
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700342 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400343 return (uint16_t)x;
344
Kirill Batuzova640f032011-07-07 16:37:17 +0400345 case INDEX_op_ext32s_i64:
346 return (int32_t)x;
347
348 case INDEX_op_ext32u_i64:
349 return (uint32_t)x;
Kirill Batuzova640f032011-07-07 16:37:17 +0400350
Richard Henderson03271522013-08-14 14:35:56 -0700351 case INDEX_op_muluh_i32:
352 return ((uint64_t)(uint32_t)x * (uint32_t)y) >> 32;
353 case INDEX_op_mulsh_i32:
354 return ((int64_t)(int32_t)x * (int32_t)y) >> 32;
355
356 case INDEX_op_muluh_i64:
357 mulu64(&l64, &h64, x, y);
358 return h64;
359 case INDEX_op_mulsh_i64:
360 muls64(&l64, &h64, x, y);
361 return h64;
362
Richard Henderson01547f72013-08-14 15:22:46 -0700363 case INDEX_op_div_i32:
364 /* Avoid crashing on divide by zero, otherwise undefined. */
365 return (int32_t)x / ((int32_t)y ? : 1);
366 case INDEX_op_divu_i32:
367 return (uint32_t)x / ((uint32_t)y ? : 1);
368 case INDEX_op_div_i64:
369 return (int64_t)x / ((int64_t)y ? : 1);
370 case INDEX_op_divu_i64:
371 return (uint64_t)x / ((uint64_t)y ? : 1);
372
373 case INDEX_op_rem_i32:
374 return (int32_t)x % ((int32_t)y ? : 1);
375 case INDEX_op_remu_i32:
376 return (uint32_t)x % ((uint32_t)y ? : 1);
377 case INDEX_op_rem_i64:
378 return (int64_t)x % ((int64_t)y ? : 1);
379 case INDEX_op_remu_i64:
380 return (uint64_t)x % ((uint64_t)y ? : 1);
381
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400382 default:
383 fprintf(stderr,
384 "Unrecognized operation %d in do_constant_folding.\n", op);
385 tcg_abort();
386 }
387}
388
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000389static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400390{
391 TCGArg res = do_constant_folding_2(op, x, y);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400392 if (op_bits(op) == 32) {
393 res &= 0xffffffff;
394 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400395 return res;
396}
397
Richard Henderson9519da72012-10-02 11:32:26 -0700398static bool do_constant_folding_cond_32(uint32_t x, uint32_t y, TCGCond c)
399{
400 switch (c) {
401 case TCG_COND_EQ:
402 return x == y;
403 case TCG_COND_NE:
404 return x != y;
405 case TCG_COND_LT:
406 return (int32_t)x < (int32_t)y;
407 case TCG_COND_GE:
408 return (int32_t)x >= (int32_t)y;
409 case TCG_COND_LE:
410 return (int32_t)x <= (int32_t)y;
411 case TCG_COND_GT:
412 return (int32_t)x > (int32_t)y;
413 case TCG_COND_LTU:
414 return x < y;
415 case TCG_COND_GEU:
416 return x >= y;
417 case TCG_COND_LEU:
418 return x <= y;
419 case TCG_COND_GTU:
420 return x > y;
421 default:
422 tcg_abort();
423 }
424}
425
426static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c)
427{
428 switch (c) {
429 case TCG_COND_EQ:
430 return x == y;
431 case TCG_COND_NE:
432 return x != y;
433 case TCG_COND_LT:
434 return (int64_t)x < (int64_t)y;
435 case TCG_COND_GE:
436 return (int64_t)x >= (int64_t)y;
437 case TCG_COND_LE:
438 return (int64_t)x <= (int64_t)y;
439 case TCG_COND_GT:
440 return (int64_t)x > (int64_t)y;
441 case TCG_COND_LTU:
442 return x < y;
443 case TCG_COND_GEU:
444 return x >= y;
445 case TCG_COND_LEU:
446 return x <= y;
447 case TCG_COND_GTU:
448 return x > y;
449 default:
450 tcg_abort();
451 }
452}
453
454static bool do_constant_folding_cond_eq(TCGCond c)
455{
456 switch (c) {
457 case TCG_COND_GT:
458 case TCG_COND_LTU:
459 case TCG_COND_LT:
460 case TCG_COND_GTU:
461 case TCG_COND_NE:
462 return 0;
463 case TCG_COND_GE:
464 case TCG_COND_GEU:
465 case TCG_COND_LE:
466 case TCG_COND_LEU:
467 case TCG_COND_EQ:
468 return 1;
469 default:
470 tcg_abort();
471 }
472}
473
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200474/* Return 2 if the condition can't be simplified, and the result
475 of the condition (0 or 1) if it can */
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200476static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
477 TCGArg y, TCGCond c)
478{
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200479 if (temps[x].state == TCG_TEMP_CONST && temps[y].state == TCG_TEMP_CONST) {
480 switch (op_bits(op)) {
481 case 32:
Richard Henderson9519da72012-10-02 11:32:26 -0700482 return do_constant_folding_cond_32(temps[x].val, temps[y].val, c);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200483 case 64:
Richard Henderson9519da72012-10-02 11:32:26 -0700484 return do_constant_folding_cond_64(temps[x].val, temps[y].val, c);
485 default:
486 tcg_abort();
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200487 }
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200488 } else if (temps_are_copies(x, y)) {
Richard Henderson9519da72012-10-02 11:32:26 -0700489 return do_constant_folding_cond_eq(c);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200490 } else if (temps[y].state == TCG_TEMP_CONST && temps[y].val == 0) {
491 switch (c) {
492 case TCG_COND_LTU:
493 return 0;
494 case TCG_COND_GEU:
495 return 1;
496 default:
497 return 2;
498 }
499 } else {
500 return 2;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200501 }
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200502}
503
Richard Henderson6c4382f2012-10-02 11:32:27 -0700504/* Return 2 if the condition can't be simplified, and the result
505 of the condition (0 or 1) if it can */
506static TCGArg do_constant_folding_cond2(TCGArg *p1, TCGArg *p2, TCGCond c)
507{
508 TCGArg al = p1[0], ah = p1[1];
509 TCGArg bl = p2[0], bh = p2[1];
510
511 if (temps[bl].state == TCG_TEMP_CONST
512 && temps[bh].state == TCG_TEMP_CONST) {
513 uint64_t b = ((uint64_t)temps[bh].val << 32) | (uint32_t)temps[bl].val;
514
515 if (temps[al].state == TCG_TEMP_CONST
516 && temps[ah].state == TCG_TEMP_CONST) {
517 uint64_t a;
518 a = ((uint64_t)temps[ah].val << 32) | (uint32_t)temps[al].val;
519 return do_constant_folding_cond_64(a, b, c);
520 }
521 if (b == 0) {
522 switch (c) {
523 case TCG_COND_LTU:
524 return 0;
525 case TCG_COND_GEU:
526 return 1;
527 default:
528 break;
529 }
530 }
531 }
532 if (temps_are_copies(al, bl) && temps_are_copies(ah, bh)) {
533 return do_constant_folding_cond_eq(c);
534 }
535 return 2;
536}
537
Richard Henderson24c9ae42012-10-02 11:32:21 -0700538static bool swap_commutative(TCGArg dest, TCGArg *p1, TCGArg *p2)
539{
540 TCGArg a1 = *p1, a2 = *p2;
541 int sum = 0;
542 sum += temps[a1].state == TCG_TEMP_CONST;
543 sum -= temps[a2].state == TCG_TEMP_CONST;
544
545 /* Prefer the constant in second argument, and then the form
546 op a, a, b, which is better handled on non-RISC hosts. */
547 if (sum > 0 || (sum == 0 && dest == a2)) {
548 *p1 = a2;
549 *p2 = a1;
550 return true;
551 }
552 return false;
553}
554
Richard Henderson0bfcb862012-10-02 11:32:23 -0700555static bool swap_commutative2(TCGArg *p1, TCGArg *p2)
556{
557 int sum = 0;
558 sum += temps[p1[0]].state == TCG_TEMP_CONST;
559 sum += temps[p1[1]].state == TCG_TEMP_CONST;
560 sum -= temps[p2[0]].state == TCG_TEMP_CONST;
561 sum -= temps[p2[1]].state == TCG_TEMP_CONST;
562 if (sum > 0) {
563 TCGArg t;
564 t = p1[0], p1[0] = p2[0], p2[0] = t;
565 t = p1[1], p1[1] = p2[1], p2[1] = t;
566 return true;
567 }
568 return false;
569}
570
Kirill Batuzov22613af2011-07-07 16:37:13 +0400571/* Propagate constants and copies, fold constant expressions. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700572static void tcg_constant_folding(TCGContext *s)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400573{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700574 int oi, oi_next, nb_temps, nb_globals;
Richard Henderson5d8f5362012-09-21 10:13:38 -0700575
Kirill Batuzov22613af2011-07-07 16:37:13 +0400576 /* Array VALS has an element for each temp.
577 If this temp holds a constant then its value is kept in VALS' element.
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200578 If this temp is a copy of other ones then the other copies are
579 available through the doubly linked circular list. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400580
581 nb_temps = s->nb_temps;
582 nb_globals = s->nb_globals;
Paolo Bonzinid193a142013-01-11 15:42:51 -0800583 reset_all_temps(nb_temps);
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400584
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700585 for (oi = s->gen_first_op_idx; oi >= 0; oi = oi_next) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700586 tcg_target_ulong mask, partmask, affected;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700587 int nb_oargs, nb_iargs, i;
Richard Hendersoncf066672014-03-22 20:06:52 -0700588 TCGArg tmp;
589
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700590 TCGOp * const op = &s->gen_op_buf[oi];
591 TCGArg * const args = &s->gen_opparam_buf[op->args];
592 TCGOpcode opc = op->opc;
593 const TCGOpDef *def = &tcg_op_defs[opc];
594
595 oi_next = op->next;
596 if (opc == INDEX_op_call) {
597 nb_oargs = op->callo;
598 nb_iargs = op->calli;
Aurelien Jarno1ff8c542012-09-11 16:18:49 +0200599 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -0700600 nb_oargs = def->nb_oargs;
601 nb_iargs = def->nb_iargs;
Richard Hendersoncf066672014-03-22 20:06:52 -0700602 }
603
604 /* Do copy propagation */
605 for (i = nb_oargs; i < nb_oargs + nb_iargs; i++) {
606 if (temps[args[i]].state == TCG_TEMP_COPY) {
607 args[i] = find_better_copy(s, args[i]);
Kirill Batuzov22613af2011-07-07 16:37:13 +0400608 }
609 }
610
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400611 /* For commutative operations make constant second argument */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700612 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400613 CASE_OP_32_64(add):
614 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400615 CASE_OP_32_64(and):
616 CASE_OP_32_64(or):
617 CASE_OP_32_64(xor):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700618 CASE_OP_32_64(eqv):
619 CASE_OP_32_64(nand):
620 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -0700621 CASE_OP_32_64(muluh):
622 CASE_OP_32_64(mulsh):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700623 swap_commutative(args[0], &args[1], &args[2]);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400624 break;
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200625 CASE_OP_32_64(brcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700626 if (swap_commutative(-1, &args[0], &args[1])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200627 args[2] = tcg_swap_cond(args[2]);
628 }
629 break;
630 CASE_OP_32_64(setcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700631 if (swap_commutative(args[0], &args[1], &args[2])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200632 args[3] = tcg_swap_cond(args[3]);
633 }
634 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -0700635 CASE_OP_32_64(movcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700636 if (swap_commutative(-1, &args[1], &args[2])) {
637 args[5] = tcg_swap_cond(args[5]);
Richard Hendersonfa01a202012-09-21 10:13:37 -0700638 }
Richard Henderson5d8f5362012-09-21 10:13:38 -0700639 /* For movcond, we canonicalize the "false" input reg to match
640 the destination reg so that the tcg backend can implement
641 a "move if true" operation. */
Richard Henderson24c9ae42012-10-02 11:32:21 -0700642 if (swap_commutative(args[0], &args[4], &args[3])) {
643 args[5] = tcg_invert_cond(args[5]);
Richard Henderson5d8f5362012-09-21 10:13:38 -0700644 }
Richard Henderson1e484e62012-10-02 11:32:22 -0700645 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800646 CASE_OP_32_64(add2):
Richard Henderson1e484e62012-10-02 11:32:22 -0700647 swap_commutative(args[0], &args[2], &args[4]);
648 swap_commutative(args[1], &args[3], &args[5]);
649 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800650 CASE_OP_32_64(mulu2):
Richard Henderson4d3203f2013-02-19 23:51:53 -0800651 CASE_OP_32_64(muls2):
Richard Henderson14149682012-10-02 11:32:30 -0700652 swap_commutative(args[0], &args[2], &args[3]);
653 break;
Richard Henderson0bfcb862012-10-02 11:32:23 -0700654 case INDEX_op_brcond2_i32:
655 if (swap_commutative2(&args[0], &args[2])) {
656 args[4] = tcg_swap_cond(args[4]);
657 }
658 break;
659 case INDEX_op_setcond2_i32:
660 if (swap_commutative2(&args[1], &args[3])) {
661 args[5] = tcg_swap_cond(args[5]);
662 }
663 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400664 default:
665 break;
666 }
667
Richard Henderson2d497542013-03-21 09:13:33 -0700668 /* Simplify expressions for "shift/rot r, 0, a => movi r, 0",
669 and "sub r, 0, a => neg r, a" case. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700670 switch (opc) {
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200671 CASE_OP_32_64(shl):
672 CASE_OP_32_64(shr):
673 CASE_OP_32_64(sar):
674 CASE_OP_32_64(rotl):
675 CASE_OP_32_64(rotr):
676 if (temps[args[1]].state == TCG_TEMP_CONST
677 && temps[args[1]].val == 0) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200678 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200679 continue;
680 }
681 break;
Richard Henderson2d497542013-03-21 09:13:33 -0700682 CASE_OP_32_64(sub):
683 {
684 TCGOpcode neg_op;
685 bool have_neg;
686
687 if (temps[args[2]].state == TCG_TEMP_CONST) {
688 /* Proceed with possible constant folding. */
689 break;
690 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700691 if (opc == INDEX_op_sub_i32) {
Richard Henderson2d497542013-03-21 09:13:33 -0700692 neg_op = INDEX_op_neg_i32;
693 have_neg = TCG_TARGET_HAS_neg_i32;
694 } else {
695 neg_op = INDEX_op_neg_i64;
696 have_neg = TCG_TARGET_HAS_neg_i64;
697 }
698 if (!have_neg) {
699 break;
700 }
701 if (temps[args[1]].state == TCG_TEMP_CONST
702 && temps[args[1]].val == 0) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700703 op->opc = neg_op;
Richard Henderson2d497542013-03-21 09:13:33 -0700704 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700705 args[1] = args[2];
Richard Henderson2d497542013-03-21 09:13:33 -0700706 continue;
707 }
708 }
709 break;
Richard Hendersone201b562014-01-28 13:15:38 -0800710 CASE_OP_32_64(xor):
711 CASE_OP_32_64(nand):
712 if (temps[args[1]].state != TCG_TEMP_CONST
713 && temps[args[2]].state == TCG_TEMP_CONST
714 && temps[args[2]].val == -1) {
715 i = 1;
716 goto try_not;
717 }
718 break;
719 CASE_OP_32_64(nor):
720 if (temps[args[1]].state != TCG_TEMP_CONST
721 && temps[args[2]].state == TCG_TEMP_CONST
722 && temps[args[2]].val == 0) {
723 i = 1;
724 goto try_not;
725 }
726 break;
727 CASE_OP_32_64(andc):
728 if (temps[args[2]].state != TCG_TEMP_CONST
729 && temps[args[1]].state == TCG_TEMP_CONST
730 && temps[args[1]].val == -1) {
731 i = 2;
732 goto try_not;
733 }
734 break;
735 CASE_OP_32_64(orc):
736 CASE_OP_32_64(eqv):
737 if (temps[args[2]].state != TCG_TEMP_CONST
738 && temps[args[1]].state == TCG_TEMP_CONST
739 && temps[args[1]].val == 0) {
740 i = 2;
741 goto try_not;
742 }
743 break;
744 try_not:
745 {
746 TCGOpcode not_op;
747 bool have_not;
748
749 if (def->flags & TCG_OPF_64BIT) {
750 not_op = INDEX_op_not_i64;
751 have_not = TCG_TARGET_HAS_not_i64;
752 } else {
753 not_op = INDEX_op_not_i32;
754 have_not = TCG_TARGET_HAS_not_i32;
755 }
756 if (!have_not) {
757 break;
758 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700759 op->opc = not_op;
Richard Hendersone201b562014-01-28 13:15:38 -0800760 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700761 args[1] = args[i];
Richard Hendersone201b562014-01-28 13:15:38 -0800762 continue;
763 }
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200764 default:
765 break;
766 }
767
Richard Henderson464a1442014-01-31 07:42:11 -0600768 /* Simplify expression for "op r, a, const => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700769 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400770 CASE_OP_32_64(add):
771 CASE_OP_32_64(sub):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400772 CASE_OP_32_64(shl):
773 CASE_OP_32_64(shr):
774 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700775 CASE_OP_32_64(rotl):
776 CASE_OP_32_64(rotr):
Aurelien Jarno38ee1882012-09-06 16:47:14 +0200777 CASE_OP_32_64(or):
778 CASE_OP_32_64(xor):
Richard Henderson464a1442014-01-31 07:42:11 -0600779 CASE_OP_32_64(andc):
780 if (temps[args[1]].state != TCG_TEMP_CONST
781 && temps[args[2]].state == TCG_TEMP_CONST
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400782 && temps[args[2]].val == 0) {
Richard Henderson464a1442014-01-31 07:42:11 -0600783 goto do_mov3;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400784 }
785 break;
Richard Henderson464a1442014-01-31 07:42:11 -0600786 CASE_OP_32_64(and):
787 CASE_OP_32_64(orc):
788 CASE_OP_32_64(eqv):
789 if (temps[args[1]].state != TCG_TEMP_CONST
790 && temps[args[2]].state == TCG_TEMP_CONST
791 && temps[args[2]].val == -1) {
792 goto do_mov3;
793 }
794 break;
795 do_mov3:
Aurelien Jarno53657182015-06-04 21:53:25 +0200796 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Richard Henderson464a1442014-01-31 07:42:11 -0600797 continue;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200798 default:
799 break;
800 }
801
Aurelien Jarno30312442013-09-03 08:27:38 +0200802 /* Simplify using known-zero bits. Currently only ops with a single
803 output argument is supported. */
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800804 mask = -1;
Paolo Bonzini633f6502013-01-11 15:42:53 -0800805 affected = -1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700806 switch (opc) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800807 CASE_OP_32_64(ext8s):
808 if ((temps[args[1]].mask & 0x80) != 0) {
809 break;
810 }
811 CASE_OP_32_64(ext8u):
812 mask = 0xff;
813 goto and_const;
814 CASE_OP_32_64(ext16s):
815 if ((temps[args[1]].mask & 0x8000) != 0) {
816 break;
817 }
818 CASE_OP_32_64(ext16u):
819 mask = 0xffff;
820 goto and_const;
821 case INDEX_op_ext32s_i64:
822 if ((temps[args[1]].mask & 0x80000000) != 0) {
823 break;
824 }
825 case INDEX_op_ext32u_i64:
826 mask = 0xffffffffU;
827 goto and_const;
828
829 CASE_OP_32_64(and):
830 mask = temps[args[2]].mask;
831 if (temps[args[2]].state == TCG_TEMP_CONST) {
832 and_const:
Paolo Bonzini633f6502013-01-11 15:42:53 -0800833 affected = temps[args[1]].mask & ~mask;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800834 }
835 mask = temps[args[1]].mask & mask;
836 break;
837
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800838 CASE_OP_32_64(andc):
839 /* Known-zeros does not imply known-ones. Therefore unless
840 args[2] is constant, we can't infer anything from it. */
841 if (temps[args[2]].state == TCG_TEMP_CONST) {
842 mask = ~temps[args[2]].mask;
843 goto and_const;
844 }
845 /* But we certainly know nothing outside args[1] may be set. */
846 mask = temps[args[1]].mask;
847 break;
848
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200849 case INDEX_op_sar_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800850 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700851 tmp = temps[args[2]].val & 31;
852 mask = (int32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200853 }
854 break;
855 case INDEX_op_sar_i64:
856 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700857 tmp = temps[args[2]].val & 63;
858 mask = (int64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800859 }
860 break;
861
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200862 case INDEX_op_shr_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800863 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700864 tmp = temps[args[2]].val & 31;
865 mask = (uint32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200866 }
867 break;
868 case INDEX_op_shr_i64:
869 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700870 tmp = temps[args[2]].val & 63;
871 mask = (uint64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800872 }
873 break;
874
Richard Henderson4bb7a412013-09-09 17:03:24 -0700875 case INDEX_op_trunc_shr_i32:
876 mask = (uint64_t)temps[args[1]].mask >> args[2];
877 break;
878
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800879 CASE_OP_32_64(shl):
880 if (temps[args[2]].state == TCG_TEMP_CONST) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700881 tmp = temps[args[2]].val & (TCG_TARGET_REG_BITS - 1);
882 mask = temps[args[1]].mask << tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800883 }
884 break;
885
886 CASE_OP_32_64(neg):
887 /* Set to 1 all bits to the left of the rightmost. */
888 mask = -(temps[args[1]].mask & -temps[args[1]].mask);
889 break;
890
891 CASE_OP_32_64(deposit):
Richard Hendersond998e552014-03-18 14:23:52 -0700892 mask = deposit64(temps[args[1]].mask, args[3], args[4],
893 temps[args[2]].mask);
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800894 break;
895
896 CASE_OP_32_64(or):
897 CASE_OP_32_64(xor):
898 mask = temps[args[1]].mask | temps[args[2]].mask;
899 break;
900
901 CASE_OP_32_64(setcond):
Richard Hendersona7635512014-04-23 22:18:30 -0700902 case INDEX_op_setcond2_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800903 mask = 1;
904 break;
905
906 CASE_OP_32_64(movcond):
907 mask = temps[args[3]].mask | temps[args[4]].mask;
908 break;
909
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200910 CASE_OP_32_64(ld8u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200911 mask = 0xff;
912 break;
913 CASE_OP_32_64(ld16u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200914 mask = 0xffff;
915 break;
916 case INDEX_op_ld32u_i64:
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200917 mask = 0xffffffffu;
918 break;
919
920 CASE_OP_32_64(qemu_ld):
921 {
Richard Henderson59227d52015-05-12 11:51:44 -0700922 TCGMemOpIdx oi = args[nb_oargs + nb_iargs];
923 TCGMemOp mop = get_memop(oi);
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200924 if (!(mop & MO_SIGN)) {
925 mask = (2ULL << ((8 << (mop & MO_SIZE)) - 1)) - 1;
926 }
927 }
928 break;
929
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800930 default:
931 break;
932 }
933
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700934 /* 32-bit ops generate 32-bit results. For the result is zero test
935 below, we can ignore high bits, but for further optimizations we
936 need to record that the high bits contain garbage. */
Richard Henderson24666ba2014-05-22 11:14:10 -0700937 partmask = mask;
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700938 if (!(def->flags & TCG_OPF_64BIT)) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700939 mask |= ~(tcg_target_ulong)0xffffffffu;
940 partmask &= 0xffffffffu;
941 affected &= 0xffffffffu;
Aurelien Jarnof096dc92013-09-03 08:27:38 +0200942 }
943
Richard Henderson24666ba2014-05-22 11:14:10 -0700944 if (partmask == 0) {
Richard Hendersoncf066672014-03-22 20:06:52 -0700945 assert(nb_oargs == 1);
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200946 tcg_opt_gen_movi(s, op, args, args[0], 0);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800947 continue;
948 }
949 if (affected == 0) {
Richard Hendersoncf066672014-03-22 20:06:52 -0700950 assert(nb_oargs == 1);
Aurelien Jarno53657182015-06-04 21:53:25 +0200951 if (temps[args[1]].state != TCG_TEMP_CONST) {
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200952 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800953 } else {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200954 tcg_opt_gen_movi(s, op, args,
Richard Hendersona62f6f52014-05-22 10:59:12 -0700955 args[0], temps[args[1]].val);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800956 }
Paolo Bonzini633f6502013-01-11 15:42:53 -0800957 continue;
958 }
959
Aurelien Jarno56e49432012-09-06 16:47:13 +0200960 /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700961 switch (opc) {
Aurelien Jarno61251c02012-09-06 16:47:14 +0200962 CASE_OP_32_64(and):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400963 CASE_OP_32_64(mul):
Richard Henderson03271522013-08-14 14:35:56 -0700964 CASE_OP_32_64(muluh):
965 CASE_OP_32_64(mulsh):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400966 if ((temps[args[2]].state == TCG_TEMP_CONST
967 && temps[args[2]].val == 0)) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200968 tcg_opt_gen_movi(s, op, args, args[0], 0);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400969 continue;
970 }
971 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200972 default:
973 break;
974 }
975
976 /* Simplify expression for "op r, a, a => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700977 switch (opc) {
Kirill Batuzov9a810902011-07-07 16:37:15 +0400978 CASE_OP_32_64(or):
979 CASE_OP_32_64(and):
Aurelien Jarno0aba1c72012-09-18 19:11:32 +0200980 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarno53657182015-06-04 21:53:25 +0200981 if (temps[args[1]].state != TCG_TEMP_CONST) {
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200982 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Richard Henderson2374c4b2015-03-13 12:26:21 -0700983 } else {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200984 tcg_opt_gen_movi(s, op, args,
Richard Henderson2374c4b2015-03-13 12:26:21 -0700985 args[0], temps[args[1]].val);
Kirill Batuzov9a810902011-07-07 16:37:15 +0400986 }
987 continue;
988 }
989 break;
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000990 default:
991 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400992 }
993
Aurelien Jarno3c941932012-09-18 19:12:36 +0200994 /* Simplify expression for "op r, a, a => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700995 switch (opc) {
Richard Hendersone64e9582014-01-28 13:26:17 -0800996 CASE_OP_32_64(andc):
Aurelien Jarno3c941932012-09-18 19:12:36 +0200997 CASE_OP_32_64(sub):
998 CASE_OP_32_64(xor):
999 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001000 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno3c941932012-09-18 19:12:36 +02001001 continue;
1002 }
1003 break;
1004 default:
1005 break;
1006 }
1007
Kirill Batuzov22613af2011-07-07 16:37:13 +04001008 /* Propagate constants through copy operations and do constant
1009 folding. Constants will be substituted to arguments by register
1010 allocator where needed and possible. Also detect copies. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001011 switch (opc) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001012 CASE_OP_32_64(mov):
Kirill Batuzov22613af2011-07-07 16:37:13 +04001013 if (temps[args[1]].state != TCG_TEMP_CONST) {
Aurelien Jarno8d6a9162015-06-04 21:53:24 +02001014 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Kirill Batuzov22613af2011-07-07 16:37:13 +04001015 break;
1016 }
1017 /* Source argument is constant. Rewrite the operation and
1018 let movi case handle it. */
Kirill Batuzov22613af2011-07-07 16:37:13 +04001019 args[1] = temps[args[1]].val;
1020 /* fallthrough */
1021 CASE_OP_32_64(movi):
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001022 tcg_opt_gen_movi(s, op, args, args[0], args[1]);
Kirill Batuzov22613af2011-07-07 16:37:13 +04001023 break;
Richard Henderson6e14e912012-10-02 11:32:24 -07001024
Kirill Batuzova640f032011-07-07 16:37:17 +04001025 CASE_OP_32_64(not):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001026 CASE_OP_32_64(neg):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001027 CASE_OP_32_64(ext8s):
1028 CASE_OP_32_64(ext8u):
1029 CASE_OP_32_64(ext16s):
1030 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +04001031 case INDEX_op_ext32s_i64:
1032 case INDEX_op_ext32u_i64:
Kirill Batuzova640f032011-07-07 16:37:17 +04001033 if (temps[args[1]].state == TCG_TEMP_CONST) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001034 tmp = do_constant_folding(opc, temps[args[1]].val, 0);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001035 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001036 break;
Kirill Batuzova640f032011-07-07 16:37:17 +04001037 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001038 goto do_default;
1039
Richard Henderson4bb7a412013-09-09 17:03:24 -07001040 case INDEX_op_trunc_shr_i32:
1041 if (temps[args[1]].state == TCG_TEMP_CONST) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001042 tmp = do_constant_folding(opc, temps[args[1]].val, args[2]);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001043 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson4bb7a412013-09-09 17:03:24 -07001044 break;
1045 }
1046 goto do_default;
1047
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001048 CASE_OP_32_64(add):
1049 CASE_OP_32_64(sub):
1050 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +04001051 CASE_OP_32_64(or):
1052 CASE_OP_32_64(and):
1053 CASE_OP_32_64(xor):
Kirill Batuzov55c09752011-07-07 16:37:16 +04001054 CASE_OP_32_64(shl):
1055 CASE_OP_32_64(shr):
1056 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001057 CASE_OP_32_64(rotl):
1058 CASE_OP_32_64(rotr):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001059 CASE_OP_32_64(andc):
1060 CASE_OP_32_64(orc):
1061 CASE_OP_32_64(eqv):
1062 CASE_OP_32_64(nand):
1063 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -07001064 CASE_OP_32_64(muluh):
1065 CASE_OP_32_64(mulsh):
Richard Henderson01547f72013-08-14 15:22:46 -07001066 CASE_OP_32_64(div):
1067 CASE_OP_32_64(divu):
1068 CASE_OP_32_64(rem):
1069 CASE_OP_32_64(remu):
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001070 if (temps[args[1]].state == TCG_TEMP_CONST
1071 && temps[args[2]].state == TCG_TEMP_CONST) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001072 tmp = do_constant_folding(opc, temps[args[1]].val,
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001073 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001074 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001075 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001076 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001077 goto do_default;
1078
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001079 CASE_OP_32_64(deposit):
1080 if (temps[args[1]].state == TCG_TEMP_CONST
1081 && temps[args[2]].state == TCG_TEMP_CONST) {
Richard Hendersond998e552014-03-18 14:23:52 -07001082 tmp = deposit64(temps[args[1]].val, args[3], args[4],
1083 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001084 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001085 break;
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001086 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001087 goto do_default;
1088
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001089 CASE_OP_32_64(setcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001090 tmp = do_constant_folding_cond(opc, args[1], args[2], args[3]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001091 if (tmp != 2) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001092 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001093 break;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001094 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001095 goto do_default;
1096
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001097 CASE_OP_32_64(brcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001098 tmp = do_constant_folding_cond(opc, args[0], args[1], args[2]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001099 if (tmp != 2) {
1100 if (tmp) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001101 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001102 op->opc = INDEX_op_br;
1103 args[0] = args[3];
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001104 } else {
Richard Henderson0c627cd2014-03-30 16:51:54 -07001105 tcg_op_remove(s, op);
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001106 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001107 break;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001108 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001109 goto do_default;
1110
Richard Hendersonfa01a202012-09-21 10:13:37 -07001111 CASE_OP_32_64(movcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001112 tmp = do_constant_folding_cond(opc, args[1], args[2], args[5]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001113 if (tmp != 2) {
Aurelien Jarno53657182015-06-04 21:53:25 +02001114 if (temps[args[4-tmp]].state == TCG_TEMP_CONST) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001115 tcg_opt_gen_movi(s, op, args,
Richard Hendersona62f6f52014-05-22 10:59:12 -07001116 args[0], temps[args[4-tmp]].val);
Richard Hendersonfa01a202012-09-21 10:13:37 -07001117 } else {
Aurelien Jarno8d6a9162015-06-04 21:53:24 +02001118 tcg_opt_gen_mov(s, op, args, args[0], args[4-tmp]);
Richard Hendersonfa01a202012-09-21 10:13:37 -07001119 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001120 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -07001121 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001122 goto do_default;
1123
Richard Henderson212c3282012-10-02 11:32:28 -07001124 case INDEX_op_add2_i32:
1125 case INDEX_op_sub2_i32:
1126 if (temps[args[2]].state == TCG_TEMP_CONST
1127 && temps[args[3]].state == TCG_TEMP_CONST
1128 && temps[args[4]].state == TCG_TEMP_CONST
1129 && temps[args[5]].state == TCG_TEMP_CONST) {
1130 uint32_t al = temps[args[2]].val;
1131 uint32_t ah = temps[args[3]].val;
1132 uint32_t bl = temps[args[4]].val;
1133 uint32_t bh = temps[args[5]].val;
1134 uint64_t a = ((uint64_t)ah << 32) | al;
1135 uint64_t b = ((uint64_t)bh << 32) | bl;
1136 TCGArg rl, rh;
Richard Hendersona4ce0992014-03-30 17:14:02 -07001137 TCGOp *op2 = insert_op_before(s, op, INDEX_op_movi_i32, 2);
1138 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson212c3282012-10-02 11:32:28 -07001139
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001140 if (opc == INDEX_op_add2_i32) {
Richard Henderson212c3282012-10-02 11:32:28 -07001141 a += b;
1142 } else {
1143 a -= b;
1144 }
1145
Richard Henderson212c3282012-10-02 11:32:28 -07001146 rl = args[0];
1147 rh = args[1];
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001148 tcg_opt_gen_movi(s, op, args, rl, (uint32_t)a);
1149 tcg_opt_gen_movi(s, op2, args2, rh, (uint32_t)(a >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001150
1151 /* We've done all we need to do with the movi. Skip it. */
1152 oi_next = op2->next;
Richard Henderson212c3282012-10-02 11:32:28 -07001153 break;
1154 }
1155 goto do_default;
1156
Richard Henderson14149682012-10-02 11:32:30 -07001157 case INDEX_op_mulu2_i32:
1158 if (temps[args[2]].state == TCG_TEMP_CONST
1159 && temps[args[3]].state == TCG_TEMP_CONST) {
1160 uint32_t a = temps[args[2]].val;
1161 uint32_t b = temps[args[3]].val;
1162 uint64_t r = (uint64_t)a * b;
1163 TCGArg rl, rh;
Richard Hendersona4ce0992014-03-30 17:14:02 -07001164 TCGOp *op2 = insert_op_before(s, op, INDEX_op_movi_i32, 2);
1165 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson14149682012-10-02 11:32:30 -07001166
1167 rl = args[0];
1168 rh = args[1];
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001169 tcg_opt_gen_movi(s, op, args, rl, (uint32_t)r);
1170 tcg_opt_gen_movi(s, op2, args2, rh, (uint32_t)(r >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001171
1172 /* We've done all we need to do with the movi. Skip it. */
1173 oi_next = op2->next;
Richard Henderson14149682012-10-02 11:32:30 -07001174 break;
1175 }
1176 goto do_default;
1177
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001178 case INDEX_op_brcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001179 tmp = do_constant_folding_cond2(&args[0], &args[2], args[4]);
1180 if (tmp != 2) {
1181 if (tmp) {
Richard Hendersona7635512014-04-23 22:18:30 -07001182 do_brcond_true:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001183 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001184 op->opc = INDEX_op_br;
1185 args[0] = args[5];
Richard Henderson6c4382f2012-10-02 11:32:27 -07001186 } else {
Richard Hendersona7635512014-04-23 22:18:30 -07001187 do_brcond_false:
Richard Henderson0c627cd2014-03-30 16:51:54 -07001188 tcg_op_remove(s, op);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001189 }
1190 } else if ((args[4] == TCG_COND_LT || args[4] == TCG_COND_GE)
1191 && temps[args[2]].state == TCG_TEMP_CONST
1192 && temps[args[3]].state == TCG_TEMP_CONST
1193 && temps[args[2]].val == 0
1194 && temps[args[3]].val == 0) {
1195 /* Simplify LT/GE comparisons vs zero to a single compare
1196 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001197 do_brcond_high:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001198 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001199 op->opc = INDEX_op_brcond_i32;
1200 args[0] = args[1];
1201 args[1] = args[3];
1202 args[2] = args[4];
1203 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001204 } else if (args[4] == TCG_COND_EQ) {
1205 /* Simplify EQ comparisons where one of the pairs
1206 can be simplified. */
1207 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1208 args[0], args[2], TCG_COND_EQ);
1209 if (tmp == 0) {
1210 goto do_brcond_false;
1211 } else if (tmp == 1) {
1212 goto do_brcond_high;
1213 }
1214 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1215 args[1], args[3], TCG_COND_EQ);
1216 if (tmp == 0) {
1217 goto do_brcond_false;
1218 } else if (tmp != 1) {
1219 goto do_default;
1220 }
1221 do_brcond_low:
1222 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001223 op->opc = INDEX_op_brcond_i32;
1224 args[1] = args[2];
1225 args[2] = args[4];
1226 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001227 } else if (args[4] == TCG_COND_NE) {
1228 /* Simplify NE comparisons where one of the pairs
1229 can be simplified. */
1230 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1231 args[0], args[2], TCG_COND_NE);
1232 if (tmp == 0) {
1233 goto do_brcond_high;
1234 } else if (tmp == 1) {
1235 goto do_brcond_true;
1236 }
1237 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1238 args[1], args[3], TCG_COND_NE);
1239 if (tmp == 0) {
1240 goto do_brcond_low;
1241 } else if (tmp == 1) {
1242 goto do_brcond_true;
1243 }
1244 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001245 } else {
1246 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001247 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001248 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001249
1250 case INDEX_op_setcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001251 tmp = do_constant_folding_cond2(&args[1], &args[3], args[5]);
1252 if (tmp != 2) {
Richard Hendersona7635512014-04-23 22:18:30 -07001253 do_setcond_const:
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001254 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001255 } else if ((args[5] == TCG_COND_LT || args[5] == TCG_COND_GE)
1256 && temps[args[3]].state == TCG_TEMP_CONST
1257 && temps[args[4]].state == TCG_TEMP_CONST
1258 && temps[args[3]].val == 0
1259 && temps[args[4]].val == 0) {
1260 /* Simplify LT/GE comparisons vs zero to a single compare
1261 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001262 do_setcond_high:
Aurelien Jarno66e61b52013-05-08 22:36:39 +02001263 reset_temp(args[0]);
Richard Hendersona7635512014-04-23 22:18:30 -07001264 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001265 op->opc = INDEX_op_setcond_i32;
1266 args[1] = args[2];
1267 args[2] = args[4];
1268 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001269 } else if (args[5] == TCG_COND_EQ) {
1270 /* Simplify EQ comparisons where one of the pairs
1271 can be simplified. */
1272 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1273 args[1], args[3], TCG_COND_EQ);
1274 if (tmp == 0) {
1275 goto do_setcond_const;
1276 } else if (tmp == 1) {
1277 goto do_setcond_high;
1278 }
1279 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1280 args[2], args[4], TCG_COND_EQ);
1281 if (tmp == 0) {
1282 goto do_setcond_high;
1283 } else if (tmp != 1) {
1284 goto do_default;
1285 }
1286 do_setcond_low:
1287 reset_temp(args[0]);
1288 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001289 op->opc = INDEX_op_setcond_i32;
1290 args[2] = args[3];
1291 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001292 } else if (args[5] == TCG_COND_NE) {
1293 /* Simplify NE comparisons where one of the pairs
1294 can be simplified. */
1295 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1296 args[1], args[3], TCG_COND_NE);
1297 if (tmp == 0) {
1298 goto do_setcond_high;
1299 } else if (tmp == 1) {
1300 goto do_setcond_const;
1301 }
1302 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1303 args[2], args[4], TCG_COND_NE);
1304 if (tmp == 0) {
1305 goto do_setcond_low;
1306 } else if (tmp == 1) {
1307 goto do_setcond_const;
1308 }
1309 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001310 } else {
1311 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001312 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001313 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001314
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001315 case INDEX_op_call:
Richard Hendersoncf066672014-03-22 20:06:52 -07001316 if (!(args[nb_oargs + nb_iargs + 1]
1317 & (TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_WRITE_GLOBALS))) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001318 for (i = 0; i < nb_globals; i++) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +02001319 reset_temp(i);
Kirill Batuzov22613af2011-07-07 16:37:13 +04001320 }
1321 }
Richard Hendersoncf066672014-03-22 20:06:52 -07001322 goto do_reset_output;
Richard Henderson6e14e912012-10-02 11:32:24 -07001323
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001324 default:
Richard Henderson6e14e912012-10-02 11:32:24 -07001325 do_default:
1326 /* Default case: we know nothing about operation (or were unable
1327 to compute the operation result) so no propagation is done.
1328 We trash everything if the operation is the end of a basic
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -08001329 block, otherwise we only trash the output args. "mask" is
1330 the non-zero bits mask for the first output arg. */
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001331 if (def->flags & TCG_OPF_BB_END) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001332 reset_all_temps(nb_temps);
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001333 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -07001334 do_reset_output:
1335 for (i = 0; i < nb_oargs; i++) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +02001336 reset_temp(args[i]);
Aurelien Jarno30312442013-09-03 08:27:38 +02001337 /* Save the corresponding known-zero bits mask for the
1338 first output argument (only one supported so far). */
1339 if (i == 0) {
1340 temps[args[i]].mask = mask;
1341 }
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001342 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001343 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001344 break;
1345 }
1346 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001347}
1348
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001349void tcg_optimize(TCGContext *s)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001350{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001351 tcg_constant_folding(s);
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001352}