blob: e7ecce478e2dde6833596604ae64b83a710ac960 [file] [log] [blame]
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001/*
2 * Optimizations for Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2010 Samsung Electronics.
5 * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
24 */
25
Peter Maydell757e7252016-01-26 18:17:08 +000026#include "qemu/osdep.h"
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040027#include "qemu-common.h"
Paolo Bonzini00f6da62016-03-15 13:16:36 +010028#include "exec/cpu-common.h"
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040029#include "tcg-op.h"
30
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040031#define CASE_OP_32_64(x) \
32 glue(glue(case INDEX_op_, x), _i32): \
33 glue(glue(case INDEX_op_, x), _i64)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040034
Kirill Batuzov22613af2011-07-07 16:37:13 +040035struct tcg_temp_info {
Aurelien Jarnob41059d2015-07-27 12:41:44 +020036 bool is_const;
Kirill Batuzov22613af2011-07-07 16:37:13 +040037 uint16_t prev_copy;
38 uint16_t next_copy;
39 tcg_target_ulong val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080040 tcg_target_ulong mask;
Kirill Batuzov22613af2011-07-07 16:37:13 +040041};
42
43static struct tcg_temp_info temps[TCG_MAX_TEMPS];
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020044static TCGTempSet temps_used;
Kirill Batuzov22613af2011-07-07 16:37:13 +040045
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020046static inline bool temp_is_const(TCGArg arg)
47{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020048 return temps[arg].is_const;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020049}
50
51static inline bool temp_is_copy(TCGArg arg)
52{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020053 return temps[arg].next_copy != arg;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020054}
55
Aurelien Jarnob41059d2015-07-27 12:41:44 +020056/* Reset TEMP's state, possibly removing the temp for the list of copies. */
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +020057static void reset_temp(TCGArg temp)
Kirill Batuzov22613af2011-07-07 16:37:13 +040058{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020059 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
60 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
61 temps[temp].next_copy = temp;
62 temps[temp].prev_copy = temp;
63 temps[temp].is_const = false;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080064 temps[temp].mask = -1;
Kirill Batuzov22613af2011-07-07 16:37:13 +040065}
66
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020067/* Reset all temporaries, given that there are NB_TEMPS of them. */
68static void reset_all_temps(int nb_temps)
69{
70 bitmap_zero(temps_used.l, nb_temps);
71}
72
73/* Initialize and activate a temporary. */
74static void init_temp_info(TCGArg temp)
75{
76 if (!test_bit(temp, temps_used.l)) {
Aurelien Jarnob41059d2015-07-27 12:41:44 +020077 temps[temp].next_copy = temp;
78 temps[temp].prev_copy = temp;
79 temps[temp].is_const = false;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020080 temps[temp].mask = -1;
81 set_bit(temp, temps_used.l);
82 }
83}
84
Blue Swirlfe0de7a2011-07-30 19:18:32 +000085static int op_bits(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +040086{
Richard Henderson8399ad52011-08-17 14:11:45 -070087 const TCGOpDef *def = &tcg_op_defs[op];
88 return def->flags & TCG_OPF_64BIT ? 64 : 32;
Kirill Batuzov22613af2011-07-07 16:37:13 +040089}
90
Richard Hendersona62f6f52014-05-22 10:59:12 -070091static TCGOpcode op_to_mov(TCGOpcode op)
92{
93 switch (op_bits(op)) {
94 case 32:
95 return INDEX_op_mov_i32;
96 case 64:
97 return INDEX_op_mov_i64;
98 default:
99 fprintf(stderr, "op_to_mov: unexpected return value of "
100 "function op_bits.\n");
101 tcg_abort();
102 }
103}
104
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000105static TCGOpcode op_to_movi(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400106{
107 switch (op_bits(op)) {
108 case 32:
109 return INDEX_op_movi_i32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400110 case 64:
111 return INDEX_op_movi_i64;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400112 default:
113 fprintf(stderr, "op_to_movi: unexpected return value of "
114 "function op_bits.\n");
115 tcg_abort();
116 }
117}
118
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200119static TCGArg find_better_copy(TCGContext *s, TCGArg temp)
120{
121 TCGArg i;
122
123 /* If this is already a global, we can't do better. */
124 if (temp < s->nb_globals) {
125 return temp;
126 }
127
128 /* Search for a global first. */
129 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
130 if (i < s->nb_globals) {
131 return i;
132 }
133 }
134
135 /* If it is a temp, search for a temp local. */
136 if (!s->temps[temp].temp_local) {
137 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
138 if (s->temps[i].temp_local) {
139 return i;
140 }
141 }
142 }
143
144 /* Failure to find a better representation, return the same temp. */
145 return temp;
146}
147
148static bool temps_are_copies(TCGArg arg1, TCGArg arg2)
149{
150 TCGArg i;
151
152 if (arg1 == arg2) {
153 return true;
154 }
155
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200156 if (!temp_is_copy(arg1) || !temp_is_copy(arg2)) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200157 return false;
158 }
159
160 for (i = temps[arg1].next_copy ; i != arg1 ; i = temps[i].next_copy) {
161 if (i == arg2) {
162 return true;
163 }
164 }
165
166 return false;
167}
168
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200169static void tcg_opt_gen_movi(TCGContext *s, TCGOp *op, TCGArg *args,
170 TCGArg dst, TCGArg val)
171{
172 TCGOpcode new_op = op_to_movi(op->opc);
173 tcg_target_ulong mask;
174
175 op->opc = new_op;
176
177 reset_temp(dst);
Aurelien Jarnob41059d2015-07-27 12:41:44 +0200178 temps[dst].is_const = true;
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200179 temps[dst].val = val;
180 mask = val;
Aurelien Jarno96152122015-07-10 18:03:30 +0200181 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_movi_i32) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200182 /* High bits of the destination are now garbage. */
183 mask |= ~0xffffffffull;
184 }
185 temps[dst].mask = mask;
186
187 args[0] = dst;
188 args[1] = val;
189}
190
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700191static void tcg_opt_gen_mov(TCGContext *s, TCGOp *op, TCGArg *args,
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200192 TCGArg dst, TCGArg src)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400193{
Aurelien Jarno53657182015-06-04 21:53:25 +0200194 if (temps_are_copies(dst, src)) {
195 tcg_op_remove(s, op);
196 return;
197 }
198
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200199 TCGOpcode new_op = op_to_mov(op->opc);
Richard Henderson24666ba2014-05-22 11:14:10 -0700200 tcg_target_ulong mask;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700201
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700202 op->opc = new_op;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700203
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800204 reset_temp(dst);
Richard Henderson24666ba2014-05-22 11:14:10 -0700205 mask = temps[src].mask;
206 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_mov_i32) {
207 /* High bits of the destination are now garbage. */
208 mask |= ~0xffffffffull;
209 }
210 temps[dst].mask = mask;
211
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800212 if (s->temps[src].type == s->temps[dst].type) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800213 temps[dst].next_copy = temps[src].next_copy;
214 temps[dst].prev_copy = src;
215 temps[temps[dst].next_copy].prev_copy = dst;
216 temps[src].next_copy = dst;
Aurelien Jarno299f8012015-07-27 12:41:44 +0200217 temps[dst].is_const = temps[src].is_const;
218 temps[dst].val = temps[src].val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800219 }
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200220
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700221 args[0] = dst;
222 args[1] = src;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400223}
224
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000225static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400226{
Richard Henderson03271522013-08-14 14:35:56 -0700227 uint64_t l64, h64;
228
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400229 switch (op) {
230 CASE_OP_32_64(add):
231 return x + y;
232
233 CASE_OP_32_64(sub):
234 return x - y;
235
236 CASE_OP_32_64(mul):
237 return x * y;
238
Kirill Batuzov9a810902011-07-07 16:37:15 +0400239 CASE_OP_32_64(and):
240 return x & y;
241
242 CASE_OP_32_64(or):
243 return x | y;
244
245 CASE_OP_32_64(xor):
246 return x ^ y;
247
Kirill Batuzov55c09752011-07-07 16:37:16 +0400248 case INDEX_op_shl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700249 return (uint32_t)x << (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400250
Kirill Batuzov55c09752011-07-07 16:37:16 +0400251 case INDEX_op_shl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700252 return (uint64_t)x << (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400253
254 case INDEX_op_shr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700255 return (uint32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400256
Kirill Batuzov55c09752011-07-07 16:37:16 +0400257 case INDEX_op_shr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700258 return (uint64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400259
260 case INDEX_op_sar_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700261 return (int32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400262
Kirill Batuzov55c09752011-07-07 16:37:16 +0400263 case INDEX_op_sar_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700264 return (int64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400265
266 case INDEX_op_rotr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700267 return ror32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400268
Kirill Batuzov55c09752011-07-07 16:37:16 +0400269 case INDEX_op_rotr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700270 return ror64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400271
272 case INDEX_op_rotl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700273 return rol32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400274
Kirill Batuzov55c09752011-07-07 16:37:16 +0400275 case INDEX_op_rotl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700276 return rol64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400277
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700278 CASE_OP_32_64(not):
Kirill Batuzova640f032011-07-07 16:37:17 +0400279 return ~x;
280
Richard Hendersoncb25c802011-08-17 14:11:47 -0700281 CASE_OP_32_64(neg):
282 return -x;
283
284 CASE_OP_32_64(andc):
285 return x & ~y;
286
287 CASE_OP_32_64(orc):
288 return x | ~y;
289
290 CASE_OP_32_64(eqv):
291 return ~(x ^ y);
292
293 CASE_OP_32_64(nand):
294 return ~(x & y);
295
296 CASE_OP_32_64(nor):
297 return ~(x | y);
298
Richard Henderson0e28d002016-11-16 09:23:28 +0100299 case INDEX_op_clz_i32:
300 return (uint32_t)x ? clz32(x) : y;
301
302 case INDEX_op_clz_i64:
303 return x ? clz64(x) : y;
304
305 case INDEX_op_ctz_i32:
306 return (uint32_t)x ? ctz32(x) : y;
307
308 case INDEX_op_ctz_i64:
309 return x ? ctz64(x) : y;
310
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700311 CASE_OP_32_64(ext8s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400312 return (int8_t)x;
313
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700314 CASE_OP_32_64(ext16s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400315 return (int16_t)x;
316
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700317 CASE_OP_32_64(ext8u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400318 return (uint8_t)x;
319
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700320 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400321 return (uint16_t)x;
322
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200323 case INDEX_op_ext_i32_i64:
Kirill Batuzova640f032011-07-07 16:37:17 +0400324 case INDEX_op_ext32s_i64:
325 return (int32_t)x;
326
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200327 case INDEX_op_extu_i32_i64:
Richard Henderson609ad702015-07-24 07:16:00 -0700328 case INDEX_op_extrl_i64_i32:
Kirill Batuzova640f032011-07-07 16:37:17 +0400329 case INDEX_op_ext32u_i64:
330 return (uint32_t)x;
Kirill Batuzova640f032011-07-07 16:37:17 +0400331
Richard Henderson609ad702015-07-24 07:16:00 -0700332 case INDEX_op_extrh_i64_i32:
333 return (uint64_t)x >> 32;
334
Richard Henderson03271522013-08-14 14:35:56 -0700335 case INDEX_op_muluh_i32:
336 return ((uint64_t)(uint32_t)x * (uint32_t)y) >> 32;
337 case INDEX_op_mulsh_i32:
338 return ((int64_t)(int32_t)x * (int32_t)y) >> 32;
339
340 case INDEX_op_muluh_i64:
341 mulu64(&l64, &h64, x, y);
342 return h64;
343 case INDEX_op_mulsh_i64:
344 muls64(&l64, &h64, x, y);
345 return h64;
346
Richard Henderson01547f72013-08-14 15:22:46 -0700347 case INDEX_op_div_i32:
348 /* Avoid crashing on divide by zero, otherwise undefined. */
349 return (int32_t)x / ((int32_t)y ? : 1);
350 case INDEX_op_divu_i32:
351 return (uint32_t)x / ((uint32_t)y ? : 1);
352 case INDEX_op_div_i64:
353 return (int64_t)x / ((int64_t)y ? : 1);
354 case INDEX_op_divu_i64:
355 return (uint64_t)x / ((uint64_t)y ? : 1);
356
357 case INDEX_op_rem_i32:
358 return (int32_t)x % ((int32_t)y ? : 1);
359 case INDEX_op_remu_i32:
360 return (uint32_t)x % ((uint32_t)y ? : 1);
361 case INDEX_op_rem_i64:
362 return (int64_t)x % ((int64_t)y ? : 1);
363 case INDEX_op_remu_i64:
364 return (uint64_t)x % ((uint64_t)y ? : 1);
365
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400366 default:
367 fprintf(stderr,
368 "Unrecognized operation %d in do_constant_folding.\n", op);
369 tcg_abort();
370 }
371}
372
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000373static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400374{
375 TCGArg res = do_constant_folding_2(op, x, y);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400376 if (op_bits(op) == 32) {
Aurelien Jarno29f3ff82015-07-10 18:03:31 +0200377 res = (int32_t)res;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400378 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400379 return res;
380}
381
Richard Henderson9519da72012-10-02 11:32:26 -0700382static bool do_constant_folding_cond_32(uint32_t x, uint32_t y, TCGCond c)
383{
384 switch (c) {
385 case TCG_COND_EQ:
386 return x == y;
387 case TCG_COND_NE:
388 return x != y;
389 case TCG_COND_LT:
390 return (int32_t)x < (int32_t)y;
391 case TCG_COND_GE:
392 return (int32_t)x >= (int32_t)y;
393 case TCG_COND_LE:
394 return (int32_t)x <= (int32_t)y;
395 case TCG_COND_GT:
396 return (int32_t)x > (int32_t)y;
397 case TCG_COND_LTU:
398 return x < y;
399 case TCG_COND_GEU:
400 return x >= y;
401 case TCG_COND_LEU:
402 return x <= y;
403 case TCG_COND_GTU:
404 return x > y;
405 default:
406 tcg_abort();
407 }
408}
409
410static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c)
411{
412 switch (c) {
413 case TCG_COND_EQ:
414 return x == y;
415 case TCG_COND_NE:
416 return x != y;
417 case TCG_COND_LT:
418 return (int64_t)x < (int64_t)y;
419 case TCG_COND_GE:
420 return (int64_t)x >= (int64_t)y;
421 case TCG_COND_LE:
422 return (int64_t)x <= (int64_t)y;
423 case TCG_COND_GT:
424 return (int64_t)x > (int64_t)y;
425 case TCG_COND_LTU:
426 return x < y;
427 case TCG_COND_GEU:
428 return x >= y;
429 case TCG_COND_LEU:
430 return x <= y;
431 case TCG_COND_GTU:
432 return x > y;
433 default:
434 tcg_abort();
435 }
436}
437
438static bool do_constant_folding_cond_eq(TCGCond c)
439{
440 switch (c) {
441 case TCG_COND_GT:
442 case TCG_COND_LTU:
443 case TCG_COND_LT:
444 case TCG_COND_GTU:
445 case TCG_COND_NE:
446 return 0;
447 case TCG_COND_GE:
448 case TCG_COND_GEU:
449 case TCG_COND_LE:
450 case TCG_COND_LEU:
451 case TCG_COND_EQ:
452 return 1;
453 default:
454 tcg_abort();
455 }
456}
457
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200458/* Return 2 if the condition can't be simplified, and the result
459 of the condition (0 or 1) if it can */
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200460static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
461 TCGArg y, TCGCond c)
462{
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200463 if (temp_is_const(x) && temp_is_const(y)) {
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200464 switch (op_bits(op)) {
465 case 32:
Richard Henderson9519da72012-10-02 11:32:26 -0700466 return do_constant_folding_cond_32(temps[x].val, temps[y].val, c);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200467 case 64:
Richard Henderson9519da72012-10-02 11:32:26 -0700468 return do_constant_folding_cond_64(temps[x].val, temps[y].val, c);
469 default:
470 tcg_abort();
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200471 }
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200472 } else if (temps_are_copies(x, y)) {
Richard Henderson9519da72012-10-02 11:32:26 -0700473 return do_constant_folding_cond_eq(c);
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200474 } else if (temp_is_const(y) && temps[y].val == 0) {
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200475 switch (c) {
476 case TCG_COND_LTU:
477 return 0;
478 case TCG_COND_GEU:
479 return 1;
480 default:
481 return 2;
482 }
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200483 }
Alex Bennée550276a2016-09-30 22:30:55 +0100484 return 2;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200485}
486
Richard Henderson6c4382f2012-10-02 11:32:27 -0700487/* Return 2 if the condition can't be simplified, and the result
488 of the condition (0 or 1) if it can */
489static TCGArg do_constant_folding_cond2(TCGArg *p1, TCGArg *p2, TCGCond c)
490{
491 TCGArg al = p1[0], ah = p1[1];
492 TCGArg bl = p2[0], bh = p2[1];
493
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200494 if (temp_is_const(bl) && temp_is_const(bh)) {
Richard Henderson6c4382f2012-10-02 11:32:27 -0700495 uint64_t b = ((uint64_t)temps[bh].val << 32) | (uint32_t)temps[bl].val;
496
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200497 if (temp_is_const(al) && temp_is_const(ah)) {
Richard Henderson6c4382f2012-10-02 11:32:27 -0700498 uint64_t a;
499 a = ((uint64_t)temps[ah].val << 32) | (uint32_t)temps[al].val;
500 return do_constant_folding_cond_64(a, b, c);
501 }
502 if (b == 0) {
503 switch (c) {
504 case TCG_COND_LTU:
505 return 0;
506 case TCG_COND_GEU:
507 return 1;
508 default:
509 break;
510 }
511 }
512 }
513 if (temps_are_copies(al, bl) && temps_are_copies(ah, bh)) {
514 return do_constant_folding_cond_eq(c);
515 }
516 return 2;
517}
518
Richard Henderson24c9ae42012-10-02 11:32:21 -0700519static bool swap_commutative(TCGArg dest, TCGArg *p1, TCGArg *p2)
520{
521 TCGArg a1 = *p1, a2 = *p2;
522 int sum = 0;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200523 sum += temp_is_const(a1);
524 sum -= temp_is_const(a2);
Richard Henderson24c9ae42012-10-02 11:32:21 -0700525
526 /* Prefer the constant in second argument, and then the form
527 op a, a, b, which is better handled on non-RISC hosts. */
528 if (sum > 0 || (sum == 0 && dest == a2)) {
529 *p1 = a2;
530 *p2 = a1;
531 return true;
532 }
533 return false;
534}
535
Richard Henderson0bfcb862012-10-02 11:32:23 -0700536static bool swap_commutative2(TCGArg *p1, TCGArg *p2)
537{
538 int sum = 0;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200539 sum += temp_is_const(p1[0]);
540 sum += temp_is_const(p1[1]);
541 sum -= temp_is_const(p2[0]);
542 sum -= temp_is_const(p2[1]);
Richard Henderson0bfcb862012-10-02 11:32:23 -0700543 if (sum > 0) {
544 TCGArg t;
545 t = p1[0], p1[0] = p2[0], p2[0] = t;
546 t = p1[1], p1[1] = p2[1], p2[1] = t;
547 return true;
548 }
549 return false;
550}
551
Kirill Batuzov22613af2011-07-07 16:37:13 +0400552/* Propagate constants and copies, fold constant expressions. */
Aurelien Jarno36e60ef2015-06-04 21:53:27 +0200553void tcg_optimize(TCGContext *s)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400554{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700555 int oi, oi_next, nb_temps, nb_globals;
Pranith Kumar34f93922016-08-23 09:48:25 -0400556 TCGArg *prev_mb_args = NULL;
Richard Henderson5d8f5362012-09-21 10:13:38 -0700557
Kirill Batuzov22613af2011-07-07 16:37:13 +0400558 /* Array VALS has an element for each temp.
559 If this temp holds a constant then its value is kept in VALS' element.
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200560 If this temp is a copy of other ones then the other copies are
561 available through the doubly linked circular list. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400562
563 nb_temps = s->nb_temps;
564 nb_globals = s->nb_globals;
Paolo Bonzinid193a142013-01-11 15:42:51 -0800565 reset_all_temps(nb_temps);
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400566
Richard Hendersondcb8e752016-06-22 19:42:31 -0700567 for (oi = s->gen_op_buf[0].next; oi != 0; oi = oi_next) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700568 tcg_target_ulong mask, partmask, affected;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700569 int nb_oargs, nb_iargs, i;
Richard Hendersoncf066672014-03-22 20:06:52 -0700570 TCGArg tmp;
571
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700572 TCGOp * const op = &s->gen_op_buf[oi];
573 TCGArg * const args = &s->gen_opparam_buf[op->args];
574 TCGOpcode opc = op->opc;
575 const TCGOpDef *def = &tcg_op_defs[opc];
576
577 oi_next = op->next;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200578
579 /* Count the arguments, and initialize the temps that are
580 going to be used */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700581 if (opc == INDEX_op_call) {
582 nb_oargs = op->callo;
583 nb_iargs = op->calli;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200584 for (i = 0; i < nb_oargs + nb_iargs; i++) {
585 tmp = args[i];
586 if (tmp != TCG_CALL_DUMMY_ARG) {
587 init_temp_info(tmp);
588 }
589 }
Aurelien Jarno1ff8c542012-09-11 16:18:49 +0200590 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -0700591 nb_oargs = def->nb_oargs;
592 nb_iargs = def->nb_iargs;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200593 for (i = 0; i < nb_oargs + nb_iargs; i++) {
594 init_temp_info(args[i]);
595 }
Richard Hendersoncf066672014-03-22 20:06:52 -0700596 }
597
598 /* Do copy propagation */
599 for (i = nb_oargs; i < nb_oargs + nb_iargs; i++) {
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200600 if (temp_is_copy(args[i])) {
Richard Hendersoncf066672014-03-22 20:06:52 -0700601 args[i] = find_better_copy(s, args[i]);
Kirill Batuzov22613af2011-07-07 16:37:13 +0400602 }
603 }
604
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400605 /* For commutative operations make constant second argument */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700606 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400607 CASE_OP_32_64(add):
608 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400609 CASE_OP_32_64(and):
610 CASE_OP_32_64(or):
611 CASE_OP_32_64(xor):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700612 CASE_OP_32_64(eqv):
613 CASE_OP_32_64(nand):
614 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -0700615 CASE_OP_32_64(muluh):
616 CASE_OP_32_64(mulsh):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700617 swap_commutative(args[0], &args[1], &args[2]);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400618 break;
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200619 CASE_OP_32_64(brcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700620 if (swap_commutative(-1, &args[0], &args[1])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200621 args[2] = tcg_swap_cond(args[2]);
622 }
623 break;
624 CASE_OP_32_64(setcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700625 if (swap_commutative(args[0], &args[1], &args[2])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200626 args[3] = tcg_swap_cond(args[3]);
627 }
628 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -0700629 CASE_OP_32_64(movcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700630 if (swap_commutative(-1, &args[1], &args[2])) {
631 args[5] = tcg_swap_cond(args[5]);
Richard Hendersonfa01a202012-09-21 10:13:37 -0700632 }
Richard Henderson5d8f5362012-09-21 10:13:38 -0700633 /* For movcond, we canonicalize the "false" input reg to match
634 the destination reg so that the tcg backend can implement
635 a "move if true" operation. */
Richard Henderson24c9ae42012-10-02 11:32:21 -0700636 if (swap_commutative(args[0], &args[4], &args[3])) {
637 args[5] = tcg_invert_cond(args[5]);
Richard Henderson5d8f5362012-09-21 10:13:38 -0700638 }
Richard Henderson1e484e62012-10-02 11:32:22 -0700639 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800640 CASE_OP_32_64(add2):
Richard Henderson1e484e62012-10-02 11:32:22 -0700641 swap_commutative(args[0], &args[2], &args[4]);
642 swap_commutative(args[1], &args[3], &args[5]);
643 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800644 CASE_OP_32_64(mulu2):
Richard Henderson4d3203f2013-02-19 23:51:53 -0800645 CASE_OP_32_64(muls2):
Richard Henderson14149682012-10-02 11:32:30 -0700646 swap_commutative(args[0], &args[2], &args[3]);
647 break;
Richard Henderson0bfcb862012-10-02 11:32:23 -0700648 case INDEX_op_brcond2_i32:
649 if (swap_commutative2(&args[0], &args[2])) {
650 args[4] = tcg_swap_cond(args[4]);
651 }
652 break;
653 case INDEX_op_setcond2_i32:
654 if (swap_commutative2(&args[1], &args[3])) {
655 args[5] = tcg_swap_cond(args[5]);
656 }
657 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400658 default:
659 break;
660 }
661
Richard Henderson2d497542013-03-21 09:13:33 -0700662 /* Simplify expressions for "shift/rot r, 0, a => movi r, 0",
663 and "sub r, 0, a => neg r, a" case. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700664 switch (opc) {
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200665 CASE_OP_32_64(shl):
666 CASE_OP_32_64(shr):
667 CASE_OP_32_64(sar):
668 CASE_OP_32_64(rotl):
669 CASE_OP_32_64(rotr):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200670 if (temp_is_const(args[1]) && temps[args[1]].val == 0) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200671 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200672 continue;
673 }
674 break;
Richard Henderson2d497542013-03-21 09:13:33 -0700675 CASE_OP_32_64(sub):
676 {
677 TCGOpcode neg_op;
678 bool have_neg;
679
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200680 if (temp_is_const(args[2])) {
Richard Henderson2d497542013-03-21 09:13:33 -0700681 /* Proceed with possible constant folding. */
682 break;
683 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700684 if (opc == INDEX_op_sub_i32) {
Richard Henderson2d497542013-03-21 09:13:33 -0700685 neg_op = INDEX_op_neg_i32;
686 have_neg = TCG_TARGET_HAS_neg_i32;
687 } else {
688 neg_op = INDEX_op_neg_i64;
689 have_neg = TCG_TARGET_HAS_neg_i64;
690 }
691 if (!have_neg) {
692 break;
693 }
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200694 if (temp_is_const(args[1]) && temps[args[1]].val == 0) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700695 op->opc = neg_op;
Richard Henderson2d497542013-03-21 09:13:33 -0700696 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700697 args[1] = args[2];
Richard Henderson2d497542013-03-21 09:13:33 -0700698 continue;
699 }
700 }
701 break;
Richard Hendersone201b562014-01-28 13:15:38 -0800702 CASE_OP_32_64(xor):
703 CASE_OP_32_64(nand):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200704 if (!temp_is_const(args[1])
705 && temp_is_const(args[2]) && temps[args[2]].val == -1) {
Richard Hendersone201b562014-01-28 13:15:38 -0800706 i = 1;
707 goto try_not;
708 }
709 break;
710 CASE_OP_32_64(nor):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200711 if (!temp_is_const(args[1])
712 && temp_is_const(args[2]) && temps[args[2]].val == 0) {
Richard Hendersone201b562014-01-28 13:15:38 -0800713 i = 1;
714 goto try_not;
715 }
716 break;
717 CASE_OP_32_64(andc):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200718 if (!temp_is_const(args[2])
719 && temp_is_const(args[1]) && temps[args[1]].val == -1) {
Richard Hendersone201b562014-01-28 13:15:38 -0800720 i = 2;
721 goto try_not;
722 }
723 break;
724 CASE_OP_32_64(orc):
725 CASE_OP_32_64(eqv):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200726 if (!temp_is_const(args[2])
727 && temp_is_const(args[1]) && temps[args[1]].val == 0) {
Richard Hendersone201b562014-01-28 13:15:38 -0800728 i = 2;
729 goto try_not;
730 }
731 break;
732 try_not:
733 {
734 TCGOpcode not_op;
735 bool have_not;
736
737 if (def->flags & TCG_OPF_64BIT) {
738 not_op = INDEX_op_not_i64;
739 have_not = TCG_TARGET_HAS_not_i64;
740 } else {
741 not_op = INDEX_op_not_i32;
742 have_not = TCG_TARGET_HAS_not_i32;
743 }
744 if (!have_not) {
745 break;
746 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700747 op->opc = not_op;
Richard Hendersone201b562014-01-28 13:15:38 -0800748 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700749 args[1] = args[i];
Richard Hendersone201b562014-01-28 13:15:38 -0800750 continue;
751 }
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200752 default:
753 break;
754 }
755
Richard Henderson464a1442014-01-31 07:42:11 -0600756 /* Simplify expression for "op r, a, const => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700757 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400758 CASE_OP_32_64(add):
759 CASE_OP_32_64(sub):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400760 CASE_OP_32_64(shl):
761 CASE_OP_32_64(shr):
762 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700763 CASE_OP_32_64(rotl):
764 CASE_OP_32_64(rotr):
Aurelien Jarno38ee1882012-09-06 16:47:14 +0200765 CASE_OP_32_64(or):
766 CASE_OP_32_64(xor):
Richard Henderson464a1442014-01-31 07:42:11 -0600767 CASE_OP_32_64(andc):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200768 if (!temp_is_const(args[1])
769 && temp_is_const(args[2]) && temps[args[2]].val == 0) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200770 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
771 continue;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400772 }
773 break;
Richard Henderson464a1442014-01-31 07:42:11 -0600774 CASE_OP_32_64(and):
775 CASE_OP_32_64(orc):
776 CASE_OP_32_64(eqv):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200777 if (!temp_is_const(args[1])
778 && temp_is_const(args[2]) && temps[args[2]].val == -1) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200779 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
780 continue;
Richard Henderson464a1442014-01-31 07:42:11 -0600781 }
782 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200783 default:
784 break;
785 }
786
Aurelien Jarno30312442013-09-03 08:27:38 +0200787 /* Simplify using known-zero bits. Currently only ops with a single
788 output argument is supported. */
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800789 mask = -1;
Paolo Bonzini633f6502013-01-11 15:42:53 -0800790 affected = -1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700791 switch (opc) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800792 CASE_OP_32_64(ext8s):
793 if ((temps[args[1]].mask & 0x80) != 0) {
794 break;
795 }
796 CASE_OP_32_64(ext8u):
797 mask = 0xff;
798 goto and_const;
799 CASE_OP_32_64(ext16s):
800 if ((temps[args[1]].mask & 0x8000) != 0) {
801 break;
802 }
803 CASE_OP_32_64(ext16u):
804 mask = 0xffff;
805 goto and_const;
806 case INDEX_op_ext32s_i64:
807 if ((temps[args[1]].mask & 0x80000000) != 0) {
808 break;
809 }
810 case INDEX_op_ext32u_i64:
811 mask = 0xffffffffU;
812 goto and_const;
813
814 CASE_OP_32_64(and):
815 mask = temps[args[2]].mask;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200816 if (temp_is_const(args[2])) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800817 and_const:
Paolo Bonzini633f6502013-01-11 15:42:53 -0800818 affected = temps[args[1]].mask & ~mask;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800819 }
820 mask = temps[args[1]].mask & mask;
821 break;
822
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200823 case INDEX_op_ext_i32_i64:
824 if ((temps[args[1]].mask & 0x80000000) != 0) {
825 break;
826 }
827 case INDEX_op_extu_i32_i64:
828 /* We do not compute affected as it is a size changing op. */
829 mask = (uint32_t)temps[args[1]].mask;
830 break;
831
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800832 CASE_OP_32_64(andc):
833 /* Known-zeros does not imply known-ones. Therefore unless
834 args[2] is constant, we can't infer anything from it. */
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200835 if (temp_is_const(args[2])) {
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800836 mask = ~temps[args[2]].mask;
837 goto and_const;
838 }
839 /* But we certainly know nothing outside args[1] may be set. */
840 mask = temps[args[1]].mask;
841 break;
842
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200843 case INDEX_op_sar_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200844 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700845 tmp = temps[args[2]].val & 31;
846 mask = (int32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200847 }
848 break;
849 case INDEX_op_sar_i64:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200850 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700851 tmp = temps[args[2]].val & 63;
852 mask = (int64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800853 }
854 break;
855
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200856 case INDEX_op_shr_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200857 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700858 tmp = temps[args[2]].val & 31;
859 mask = (uint32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200860 }
861 break;
862 case INDEX_op_shr_i64:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200863 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700864 tmp = temps[args[2]].val & 63;
865 mask = (uint64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800866 }
867 break;
868
Richard Henderson609ad702015-07-24 07:16:00 -0700869 case INDEX_op_extrl_i64_i32:
870 mask = (uint32_t)temps[args[1]].mask;
871 break;
872 case INDEX_op_extrh_i64_i32:
873 mask = (uint64_t)temps[args[1]].mask >> 32;
Richard Henderson4bb7a412013-09-09 17:03:24 -0700874 break;
875
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800876 CASE_OP_32_64(shl):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200877 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700878 tmp = temps[args[2]].val & (TCG_TARGET_REG_BITS - 1);
879 mask = temps[args[1]].mask << tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800880 }
881 break;
882
883 CASE_OP_32_64(neg):
884 /* Set to 1 all bits to the left of the rightmost. */
885 mask = -(temps[args[1]].mask & -temps[args[1]].mask);
886 break;
887
888 CASE_OP_32_64(deposit):
Richard Hendersond998e552014-03-18 14:23:52 -0700889 mask = deposit64(temps[args[1]].mask, args[3], args[4],
890 temps[args[2]].mask);
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800891 break;
892
Richard Henderson7ec8bab2016-10-14 12:04:32 -0500893 CASE_OP_32_64(extract):
894 mask = extract64(temps[args[1]].mask, args[2], args[3]);
895 if (args[2] == 0) {
896 affected = temps[args[1]].mask & ~mask;
897 }
898 break;
899 CASE_OP_32_64(sextract):
900 mask = sextract64(temps[args[1]].mask, args[2], args[3]);
901 if (args[2] == 0 && (tcg_target_long)mask >= 0) {
902 affected = temps[args[1]].mask & ~mask;
903 }
904 break;
905
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800906 CASE_OP_32_64(or):
907 CASE_OP_32_64(xor):
908 mask = temps[args[1]].mask | temps[args[2]].mask;
909 break;
910
Richard Henderson0e28d002016-11-16 09:23:28 +0100911 case INDEX_op_clz_i32:
912 case INDEX_op_ctz_i32:
913 mask = temps[args[2]].mask | 31;
914 break;
915
916 case INDEX_op_clz_i64:
917 case INDEX_op_ctz_i64:
918 mask = temps[args[2]].mask | 63;
919 break;
920
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800921 CASE_OP_32_64(setcond):
Richard Hendersona7635512014-04-23 22:18:30 -0700922 case INDEX_op_setcond2_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800923 mask = 1;
924 break;
925
926 CASE_OP_32_64(movcond):
927 mask = temps[args[3]].mask | temps[args[4]].mask;
928 break;
929
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200930 CASE_OP_32_64(ld8u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200931 mask = 0xff;
932 break;
933 CASE_OP_32_64(ld16u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200934 mask = 0xffff;
935 break;
936 case INDEX_op_ld32u_i64:
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200937 mask = 0xffffffffu;
938 break;
939
940 CASE_OP_32_64(qemu_ld):
941 {
Richard Henderson59227d52015-05-12 11:51:44 -0700942 TCGMemOpIdx oi = args[nb_oargs + nb_iargs];
943 TCGMemOp mop = get_memop(oi);
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200944 if (!(mop & MO_SIGN)) {
945 mask = (2ULL << ((8 << (mop & MO_SIZE)) - 1)) - 1;
946 }
947 }
948 break;
949
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800950 default:
951 break;
952 }
953
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700954 /* 32-bit ops generate 32-bit results. For the result is zero test
955 below, we can ignore high bits, but for further optimizations we
956 need to record that the high bits contain garbage. */
Richard Henderson24666ba2014-05-22 11:14:10 -0700957 partmask = mask;
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700958 if (!(def->flags & TCG_OPF_64BIT)) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700959 mask |= ~(tcg_target_ulong)0xffffffffu;
960 partmask &= 0xffffffffu;
961 affected &= 0xffffffffu;
Aurelien Jarnof096dc92013-09-03 08:27:38 +0200962 }
963
Richard Henderson24666ba2014-05-22 11:14:10 -0700964 if (partmask == 0) {
Aurelien Jarnoeabb7b92016-04-21 10:48:49 +0200965 tcg_debug_assert(nb_oargs == 1);
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200966 tcg_opt_gen_movi(s, op, args, args[0], 0);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800967 continue;
968 }
969 if (affected == 0) {
Aurelien Jarnoeabb7b92016-04-21 10:48:49 +0200970 tcg_debug_assert(nb_oargs == 1);
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200971 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800972 continue;
973 }
974
Aurelien Jarno56e49432012-09-06 16:47:13 +0200975 /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700976 switch (opc) {
Aurelien Jarno61251c02012-09-06 16:47:14 +0200977 CASE_OP_32_64(and):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400978 CASE_OP_32_64(mul):
Richard Henderson03271522013-08-14 14:35:56 -0700979 CASE_OP_32_64(muluh):
980 CASE_OP_32_64(mulsh):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200981 if ((temp_is_const(args[2]) && temps[args[2]].val == 0)) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200982 tcg_opt_gen_movi(s, op, args, args[0], 0);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400983 continue;
984 }
985 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200986 default:
987 break;
988 }
989
990 /* Simplify expression for "op r, a, a => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700991 switch (opc) {
Kirill Batuzov9a810902011-07-07 16:37:15 +0400992 CASE_OP_32_64(or):
993 CASE_OP_32_64(and):
Aurelien Jarno0aba1c72012-09-18 19:11:32 +0200994 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200995 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Kirill Batuzov9a810902011-07-07 16:37:15 +0400996 continue;
997 }
998 break;
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000999 default:
1000 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001001 }
1002
Aurelien Jarno3c941932012-09-18 19:12:36 +02001003 /* Simplify expression for "op r, a, a => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001004 switch (opc) {
Richard Hendersone64e9582014-01-28 13:26:17 -08001005 CASE_OP_32_64(andc):
Aurelien Jarno3c941932012-09-18 19:12:36 +02001006 CASE_OP_32_64(sub):
1007 CASE_OP_32_64(xor):
1008 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001009 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno3c941932012-09-18 19:12:36 +02001010 continue;
1011 }
1012 break;
1013 default:
1014 break;
1015 }
1016
Kirill Batuzov22613af2011-07-07 16:37:13 +04001017 /* Propagate constants through copy operations and do constant
1018 folding. Constants will be substituted to arguments by register
1019 allocator where needed and possible. Also detect copies. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001020 switch (opc) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001021 CASE_OP_32_64(mov):
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001022 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
1023 break;
Kirill Batuzov22613af2011-07-07 16:37:13 +04001024 CASE_OP_32_64(movi):
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001025 tcg_opt_gen_movi(s, op, args, args[0], args[1]);
Kirill Batuzov22613af2011-07-07 16:37:13 +04001026 break;
Richard Henderson6e14e912012-10-02 11:32:24 -07001027
Kirill Batuzova640f032011-07-07 16:37:17 +04001028 CASE_OP_32_64(not):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001029 CASE_OP_32_64(neg):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001030 CASE_OP_32_64(ext8s):
1031 CASE_OP_32_64(ext8u):
1032 CASE_OP_32_64(ext16s):
1033 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +04001034 case INDEX_op_ext32s_i64:
1035 case INDEX_op_ext32u_i64:
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +02001036 case INDEX_op_ext_i32_i64:
1037 case INDEX_op_extu_i32_i64:
Richard Henderson609ad702015-07-24 07:16:00 -07001038 case INDEX_op_extrl_i64_i32:
1039 case INDEX_op_extrh_i64_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001040 if (temp_is_const(args[1])) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001041 tmp = do_constant_folding(opc, temps[args[1]].val, 0);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001042 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001043 break;
Kirill Batuzova640f032011-07-07 16:37:17 +04001044 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001045 goto do_default;
1046
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001047 CASE_OP_32_64(add):
1048 CASE_OP_32_64(sub):
1049 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +04001050 CASE_OP_32_64(or):
1051 CASE_OP_32_64(and):
1052 CASE_OP_32_64(xor):
Kirill Batuzov55c09752011-07-07 16:37:16 +04001053 CASE_OP_32_64(shl):
1054 CASE_OP_32_64(shr):
1055 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001056 CASE_OP_32_64(rotl):
1057 CASE_OP_32_64(rotr):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001058 CASE_OP_32_64(andc):
1059 CASE_OP_32_64(orc):
1060 CASE_OP_32_64(eqv):
1061 CASE_OP_32_64(nand):
1062 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -07001063 CASE_OP_32_64(muluh):
1064 CASE_OP_32_64(mulsh):
Richard Henderson01547f72013-08-14 15:22:46 -07001065 CASE_OP_32_64(div):
1066 CASE_OP_32_64(divu):
1067 CASE_OP_32_64(rem):
1068 CASE_OP_32_64(remu):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001069 if (temp_is_const(args[1]) && temp_is_const(args[2])) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001070 tmp = do_constant_folding(opc, temps[args[1]].val,
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001071 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001072 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001073 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001074 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001075 goto do_default;
1076
Richard Henderson0e28d002016-11-16 09:23:28 +01001077 CASE_OP_32_64(clz):
1078 CASE_OP_32_64(ctz):
1079 if (temp_is_const(args[1])) {
1080 TCGArg v = temps[args[1]].val;
1081 if (v != 0) {
1082 tmp = do_constant_folding(opc, v, 0);
1083 tcg_opt_gen_movi(s, op, args, args[0], tmp);
1084 } else {
1085 tcg_opt_gen_mov(s, op, args, args[0], args[2]);
1086 }
1087 break;
1088 }
1089 goto do_default;
1090
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001091 CASE_OP_32_64(deposit):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001092 if (temp_is_const(args[1]) && temp_is_const(args[2])) {
Richard Hendersond998e552014-03-18 14:23:52 -07001093 tmp = deposit64(temps[args[1]].val, args[3], args[4],
1094 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001095 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001096 break;
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001097 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001098 goto do_default;
1099
Richard Henderson7ec8bab2016-10-14 12:04:32 -05001100 CASE_OP_32_64(extract):
1101 if (temp_is_const(args[1])) {
1102 tmp = extract64(temps[args[1]].val, args[2], args[3]);
1103 tcg_opt_gen_movi(s, op, args, args[0], tmp);
1104 break;
1105 }
1106 goto do_default;
1107
1108 CASE_OP_32_64(sextract):
1109 if (temp_is_const(args[1])) {
1110 tmp = sextract64(temps[args[1]].val, args[2], args[3]);
1111 tcg_opt_gen_movi(s, op, args, args[0], tmp);
1112 break;
1113 }
1114 goto do_default;
1115
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001116 CASE_OP_32_64(setcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001117 tmp = do_constant_folding_cond(opc, args[1], args[2], args[3]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001118 if (tmp != 2) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001119 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001120 break;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001121 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001122 goto do_default;
1123
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001124 CASE_OP_32_64(brcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001125 tmp = do_constant_folding_cond(opc, args[0], args[1], args[2]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001126 if (tmp != 2) {
1127 if (tmp) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001128 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001129 op->opc = INDEX_op_br;
1130 args[0] = args[3];
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001131 } else {
Richard Henderson0c627cd2014-03-30 16:51:54 -07001132 tcg_op_remove(s, op);
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001133 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001134 break;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001135 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001136 goto do_default;
1137
Richard Hendersonfa01a202012-09-21 10:13:37 -07001138 CASE_OP_32_64(movcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001139 tmp = do_constant_folding_cond(opc, args[1], args[2], args[5]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001140 if (tmp != 2) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001141 tcg_opt_gen_mov(s, op, args, args[0], args[4-tmp]);
Richard Henderson6e14e912012-10-02 11:32:24 -07001142 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -07001143 }
Richard Henderson333b21b2016-10-23 20:44:32 -07001144 if (temp_is_const(args[3]) && temp_is_const(args[4])) {
1145 tcg_target_ulong tv = temps[args[3]].val;
1146 tcg_target_ulong fv = temps[args[4]].val;
1147 TCGCond cond = args[5];
1148 if (fv == 1 && tv == 0) {
1149 cond = tcg_invert_cond(cond);
1150 } else if (!(tv == 1 && fv == 0)) {
1151 goto do_default;
1152 }
1153 args[3] = cond;
1154 op->opc = opc = (opc == INDEX_op_movcond_i32
1155 ? INDEX_op_setcond_i32
1156 : INDEX_op_setcond_i64);
1157 nb_iargs = 2;
1158 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001159 goto do_default;
1160
Richard Henderson212c3282012-10-02 11:32:28 -07001161 case INDEX_op_add2_i32:
1162 case INDEX_op_sub2_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001163 if (temp_is_const(args[2]) && temp_is_const(args[3])
1164 && temp_is_const(args[4]) && temp_is_const(args[5])) {
Richard Henderson212c3282012-10-02 11:32:28 -07001165 uint32_t al = temps[args[2]].val;
1166 uint32_t ah = temps[args[3]].val;
1167 uint32_t bl = temps[args[4]].val;
1168 uint32_t bh = temps[args[5]].val;
1169 uint64_t a = ((uint64_t)ah << 32) | al;
1170 uint64_t b = ((uint64_t)bh << 32) | bl;
1171 TCGArg rl, rh;
Richard Henderson5a184072016-06-23 20:34:33 -07001172 TCGOp *op2 = tcg_op_insert_before(s, op, INDEX_op_movi_i32, 2);
Richard Hendersona4ce0992014-03-30 17:14:02 -07001173 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson212c3282012-10-02 11:32:28 -07001174
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001175 if (opc == INDEX_op_add2_i32) {
Richard Henderson212c3282012-10-02 11:32:28 -07001176 a += b;
1177 } else {
1178 a -= b;
1179 }
1180
Richard Henderson212c3282012-10-02 11:32:28 -07001181 rl = args[0];
1182 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001183 tcg_opt_gen_movi(s, op, args, rl, (int32_t)a);
1184 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(a >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001185
1186 /* We've done all we need to do with the movi. Skip it. */
1187 oi_next = op2->next;
Richard Henderson212c3282012-10-02 11:32:28 -07001188 break;
1189 }
1190 goto do_default;
1191
Richard Henderson14149682012-10-02 11:32:30 -07001192 case INDEX_op_mulu2_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001193 if (temp_is_const(args[2]) && temp_is_const(args[3])) {
Richard Henderson14149682012-10-02 11:32:30 -07001194 uint32_t a = temps[args[2]].val;
1195 uint32_t b = temps[args[3]].val;
1196 uint64_t r = (uint64_t)a * b;
1197 TCGArg rl, rh;
Richard Henderson5a184072016-06-23 20:34:33 -07001198 TCGOp *op2 = tcg_op_insert_before(s, op, INDEX_op_movi_i32, 2);
Richard Hendersona4ce0992014-03-30 17:14:02 -07001199 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson14149682012-10-02 11:32:30 -07001200
1201 rl = args[0];
1202 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001203 tcg_opt_gen_movi(s, op, args, rl, (int32_t)r);
1204 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(r >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001205
1206 /* We've done all we need to do with the movi. Skip it. */
1207 oi_next = op2->next;
Richard Henderson14149682012-10-02 11:32:30 -07001208 break;
1209 }
1210 goto do_default;
1211
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001212 case INDEX_op_brcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001213 tmp = do_constant_folding_cond2(&args[0], &args[2], args[4]);
1214 if (tmp != 2) {
1215 if (tmp) {
Richard Hendersona7635512014-04-23 22:18:30 -07001216 do_brcond_true:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001217 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001218 op->opc = INDEX_op_br;
1219 args[0] = args[5];
Richard Henderson6c4382f2012-10-02 11:32:27 -07001220 } else {
Richard Hendersona7635512014-04-23 22:18:30 -07001221 do_brcond_false:
Richard Henderson0c627cd2014-03-30 16:51:54 -07001222 tcg_op_remove(s, op);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001223 }
1224 } else if ((args[4] == TCG_COND_LT || args[4] == TCG_COND_GE)
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001225 && temp_is_const(args[2]) && temps[args[2]].val == 0
1226 && temp_is_const(args[3]) && temps[args[3]].val == 0) {
Richard Henderson6c4382f2012-10-02 11:32:27 -07001227 /* Simplify LT/GE comparisons vs zero to a single compare
1228 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001229 do_brcond_high:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001230 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001231 op->opc = INDEX_op_brcond_i32;
1232 args[0] = args[1];
1233 args[1] = args[3];
1234 args[2] = args[4];
1235 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001236 } else if (args[4] == TCG_COND_EQ) {
1237 /* Simplify EQ comparisons where one of the pairs
1238 can be simplified. */
1239 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1240 args[0], args[2], TCG_COND_EQ);
1241 if (tmp == 0) {
1242 goto do_brcond_false;
1243 } else if (tmp == 1) {
1244 goto do_brcond_high;
1245 }
1246 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1247 args[1], args[3], TCG_COND_EQ);
1248 if (tmp == 0) {
1249 goto do_brcond_false;
1250 } else if (tmp != 1) {
1251 goto do_default;
1252 }
1253 do_brcond_low:
1254 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001255 op->opc = INDEX_op_brcond_i32;
1256 args[1] = args[2];
1257 args[2] = args[4];
1258 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001259 } else if (args[4] == TCG_COND_NE) {
1260 /* Simplify NE comparisons where one of the pairs
1261 can be simplified. */
1262 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1263 args[0], args[2], TCG_COND_NE);
1264 if (tmp == 0) {
1265 goto do_brcond_high;
1266 } else if (tmp == 1) {
1267 goto do_brcond_true;
1268 }
1269 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1270 args[1], args[3], TCG_COND_NE);
1271 if (tmp == 0) {
1272 goto do_brcond_low;
1273 } else if (tmp == 1) {
1274 goto do_brcond_true;
1275 }
1276 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001277 } else {
1278 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001279 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001280 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001281
1282 case INDEX_op_setcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001283 tmp = do_constant_folding_cond2(&args[1], &args[3], args[5]);
1284 if (tmp != 2) {
Richard Hendersona7635512014-04-23 22:18:30 -07001285 do_setcond_const:
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001286 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001287 } else if ((args[5] == TCG_COND_LT || args[5] == TCG_COND_GE)
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001288 && temp_is_const(args[3]) && temps[args[3]].val == 0
1289 && temp_is_const(args[4]) && temps[args[4]].val == 0) {
Richard Henderson6c4382f2012-10-02 11:32:27 -07001290 /* Simplify LT/GE comparisons vs zero to a single compare
1291 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001292 do_setcond_high:
Aurelien Jarno66e61b52013-05-08 22:36:39 +02001293 reset_temp(args[0]);
Richard Hendersona7635512014-04-23 22:18:30 -07001294 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001295 op->opc = INDEX_op_setcond_i32;
1296 args[1] = args[2];
1297 args[2] = args[4];
1298 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001299 } else if (args[5] == TCG_COND_EQ) {
1300 /* Simplify EQ comparisons where one of the pairs
1301 can be simplified. */
1302 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1303 args[1], args[3], TCG_COND_EQ);
1304 if (tmp == 0) {
1305 goto do_setcond_const;
1306 } else if (tmp == 1) {
1307 goto do_setcond_high;
1308 }
1309 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1310 args[2], args[4], TCG_COND_EQ);
1311 if (tmp == 0) {
1312 goto do_setcond_high;
1313 } else if (tmp != 1) {
1314 goto do_default;
1315 }
1316 do_setcond_low:
1317 reset_temp(args[0]);
1318 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001319 op->opc = INDEX_op_setcond_i32;
1320 args[2] = args[3];
1321 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001322 } else if (args[5] == TCG_COND_NE) {
1323 /* Simplify NE comparisons where one of the pairs
1324 can be simplified. */
1325 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1326 args[1], args[3], TCG_COND_NE);
1327 if (tmp == 0) {
1328 goto do_setcond_high;
1329 } else if (tmp == 1) {
1330 goto do_setcond_const;
1331 }
1332 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1333 args[2], args[4], TCG_COND_NE);
1334 if (tmp == 0) {
1335 goto do_setcond_low;
1336 } else if (tmp == 1) {
1337 goto do_setcond_const;
1338 }
1339 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001340 } else {
1341 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001342 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001343 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001344
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001345 case INDEX_op_call:
Richard Hendersoncf066672014-03-22 20:06:52 -07001346 if (!(args[nb_oargs + nb_iargs + 1]
1347 & (TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_WRITE_GLOBALS))) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001348 for (i = 0; i < nb_globals; i++) {
Aurelien Jarno1208d7d2015-07-27 12:41:44 +02001349 if (test_bit(i, temps_used.l)) {
1350 reset_temp(i);
1351 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001352 }
1353 }
Richard Hendersoncf066672014-03-22 20:06:52 -07001354 goto do_reset_output;
Richard Henderson6e14e912012-10-02 11:32:24 -07001355
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001356 default:
Richard Henderson6e14e912012-10-02 11:32:24 -07001357 do_default:
1358 /* Default case: we know nothing about operation (or were unable
1359 to compute the operation result) so no propagation is done.
1360 We trash everything if the operation is the end of a basic
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -08001361 block, otherwise we only trash the output args. "mask" is
1362 the non-zero bits mask for the first output arg. */
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001363 if (def->flags & TCG_OPF_BB_END) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001364 reset_all_temps(nb_temps);
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001365 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -07001366 do_reset_output:
1367 for (i = 0; i < nb_oargs; i++) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +02001368 reset_temp(args[i]);
Aurelien Jarno30312442013-09-03 08:27:38 +02001369 /* Save the corresponding known-zero bits mask for the
1370 first output argument (only one supported so far). */
1371 if (i == 0) {
1372 temps[args[i]].mask = mask;
1373 }
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001374 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001375 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001376 break;
1377 }
Pranith Kumar34f93922016-08-23 09:48:25 -04001378
1379 /* Eliminate duplicate and redundant fence instructions. */
1380 if (prev_mb_args) {
1381 switch (opc) {
1382 case INDEX_op_mb:
1383 /* Merge two barriers of the same type into one,
1384 * or a weaker barrier into a stronger one,
1385 * or two weaker barriers into a stronger one.
1386 * mb X; mb Y => mb X|Y
1387 * mb; strl => mb; st
1388 * ldaq; mb => ld; mb
1389 * ldaq; strl => ld; mb; st
1390 * Other combinations are also merged into a strong
1391 * barrier. This is stricter than specified but for
1392 * the purposes of TCG is better than not optimizing.
1393 */
1394 prev_mb_args[0] |= args[0];
1395 tcg_op_remove(s, op);
1396 break;
1397
1398 default:
1399 /* Opcodes that end the block stop the optimization. */
1400 if ((def->flags & TCG_OPF_BB_END) == 0) {
1401 break;
1402 }
1403 /* fallthru */
1404 case INDEX_op_qemu_ld_i32:
1405 case INDEX_op_qemu_ld_i64:
1406 case INDEX_op_qemu_st_i32:
1407 case INDEX_op_qemu_st_i64:
1408 case INDEX_op_call:
1409 /* Opcodes that touch guest memory stop the optimization. */
1410 prev_mb_args = NULL;
1411 break;
1412 }
1413 } else if (opc == INDEX_op_mb) {
1414 prev_mb_args = args;
1415 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001416 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001417}