blob: adfc56ce62023ac77a0b3d62fdfd28be6089ae27 [file] [log] [blame]
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001/*
2 * Optimizations for Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2010 Samsung Electronics.
5 * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
24 */
25
Peter Maydell757e7252016-01-26 18:17:08 +000026#include "qemu/osdep.h"
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040027#include "qemu-common.h"
Paolo Bonzini00f6da62016-03-15 13:16:36 +010028#include "exec/cpu-common.h"
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040029#include "tcg-op.h"
30
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040031#define CASE_OP_32_64(x) \
32 glue(glue(case INDEX_op_, x), _i32): \
33 glue(glue(case INDEX_op_, x), _i64)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040034
Kirill Batuzov22613af2011-07-07 16:37:13 +040035struct tcg_temp_info {
Aurelien Jarnob41059d2015-07-27 12:41:44 +020036 bool is_const;
Kirill Batuzov22613af2011-07-07 16:37:13 +040037 uint16_t prev_copy;
38 uint16_t next_copy;
39 tcg_target_ulong val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080040 tcg_target_ulong mask;
Kirill Batuzov22613af2011-07-07 16:37:13 +040041};
42
43static struct tcg_temp_info temps[TCG_MAX_TEMPS];
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020044static TCGTempSet temps_used;
Kirill Batuzov22613af2011-07-07 16:37:13 +040045
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020046static inline bool temp_is_const(TCGArg arg)
47{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020048 return temps[arg].is_const;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020049}
50
51static inline bool temp_is_copy(TCGArg arg)
52{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020053 return temps[arg].next_copy != arg;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020054}
55
Aurelien Jarnob41059d2015-07-27 12:41:44 +020056/* Reset TEMP's state, possibly removing the temp for the list of copies. */
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +020057static void reset_temp(TCGArg temp)
Kirill Batuzov22613af2011-07-07 16:37:13 +040058{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020059 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
60 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
61 temps[temp].next_copy = temp;
62 temps[temp].prev_copy = temp;
63 temps[temp].is_const = false;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080064 temps[temp].mask = -1;
Kirill Batuzov22613af2011-07-07 16:37:13 +040065}
66
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020067/* Reset all temporaries, given that there are NB_TEMPS of them. */
68static void reset_all_temps(int nb_temps)
69{
70 bitmap_zero(temps_used.l, nb_temps);
71}
72
73/* Initialize and activate a temporary. */
74static void init_temp_info(TCGArg temp)
75{
76 if (!test_bit(temp, temps_used.l)) {
Aurelien Jarnob41059d2015-07-27 12:41:44 +020077 temps[temp].next_copy = temp;
78 temps[temp].prev_copy = temp;
79 temps[temp].is_const = false;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020080 temps[temp].mask = -1;
81 set_bit(temp, temps_used.l);
82 }
83}
84
Blue Swirlfe0de7a2011-07-30 19:18:32 +000085static int op_bits(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +040086{
Richard Henderson8399ad52011-08-17 14:11:45 -070087 const TCGOpDef *def = &tcg_op_defs[op];
88 return def->flags & TCG_OPF_64BIT ? 64 : 32;
Kirill Batuzov22613af2011-07-07 16:37:13 +040089}
90
Richard Hendersona62f6f52014-05-22 10:59:12 -070091static TCGOpcode op_to_mov(TCGOpcode op)
92{
93 switch (op_bits(op)) {
94 case 32:
95 return INDEX_op_mov_i32;
96 case 64:
97 return INDEX_op_mov_i64;
98 default:
99 fprintf(stderr, "op_to_mov: unexpected return value of "
100 "function op_bits.\n");
101 tcg_abort();
102 }
103}
104
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000105static TCGOpcode op_to_movi(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400106{
107 switch (op_bits(op)) {
108 case 32:
109 return INDEX_op_movi_i32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400110 case 64:
111 return INDEX_op_movi_i64;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400112 default:
113 fprintf(stderr, "op_to_movi: unexpected return value of "
114 "function op_bits.\n");
115 tcg_abort();
116 }
117}
118
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200119static TCGArg find_better_copy(TCGContext *s, TCGArg temp)
120{
121 TCGArg i;
122
123 /* If this is already a global, we can't do better. */
124 if (temp < s->nb_globals) {
125 return temp;
126 }
127
128 /* Search for a global first. */
129 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
130 if (i < s->nb_globals) {
131 return i;
132 }
133 }
134
135 /* If it is a temp, search for a temp local. */
136 if (!s->temps[temp].temp_local) {
137 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
138 if (s->temps[i].temp_local) {
139 return i;
140 }
141 }
142 }
143
144 /* Failure to find a better representation, return the same temp. */
145 return temp;
146}
147
148static bool temps_are_copies(TCGArg arg1, TCGArg arg2)
149{
150 TCGArg i;
151
152 if (arg1 == arg2) {
153 return true;
154 }
155
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200156 if (!temp_is_copy(arg1) || !temp_is_copy(arg2)) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200157 return false;
158 }
159
160 for (i = temps[arg1].next_copy ; i != arg1 ; i = temps[i].next_copy) {
161 if (i == arg2) {
162 return true;
163 }
164 }
165
166 return false;
167}
168
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200169static void tcg_opt_gen_movi(TCGContext *s, TCGOp *op, TCGArg *args,
170 TCGArg dst, TCGArg val)
171{
172 TCGOpcode new_op = op_to_movi(op->opc);
173 tcg_target_ulong mask;
174
175 op->opc = new_op;
176
177 reset_temp(dst);
Aurelien Jarnob41059d2015-07-27 12:41:44 +0200178 temps[dst].is_const = true;
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200179 temps[dst].val = val;
180 mask = val;
Aurelien Jarno96152122015-07-10 18:03:30 +0200181 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_movi_i32) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200182 /* High bits of the destination are now garbage. */
183 mask |= ~0xffffffffull;
184 }
185 temps[dst].mask = mask;
186
187 args[0] = dst;
188 args[1] = val;
189}
190
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700191static void tcg_opt_gen_mov(TCGContext *s, TCGOp *op, TCGArg *args,
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200192 TCGArg dst, TCGArg src)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400193{
Aurelien Jarno53657182015-06-04 21:53:25 +0200194 if (temps_are_copies(dst, src)) {
195 tcg_op_remove(s, op);
196 return;
197 }
198
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200199 TCGOpcode new_op = op_to_mov(op->opc);
Richard Henderson24666ba2014-05-22 11:14:10 -0700200 tcg_target_ulong mask;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700201
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700202 op->opc = new_op;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700203
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800204 reset_temp(dst);
Richard Henderson24666ba2014-05-22 11:14:10 -0700205 mask = temps[src].mask;
206 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_mov_i32) {
207 /* High bits of the destination are now garbage. */
208 mask |= ~0xffffffffull;
209 }
210 temps[dst].mask = mask;
211
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800212 if (s->temps[src].type == s->temps[dst].type) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800213 temps[dst].next_copy = temps[src].next_copy;
214 temps[dst].prev_copy = src;
215 temps[temps[dst].next_copy].prev_copy = dst;
216 temps[src].next_copy = dst;
Aurelien Jarno299f8012015-07-27 12:41:44 +0200217 temps[dst].is_const = temps[src].is_const;
218 temps[dst].val = temps[src].val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800219 }
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200220
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700221 args[0] = dst;
222 args[1] = src;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400223}
224
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000225static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400226{
Richard Henderson03271522013-08-14 14:35:56 -0700227 uint64_t l64, h64;
228
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400229 switch (op) {
230 CASE_OP_32_64(add):
231 return x + y;
232
233 CASE_OP_32_64(sub):
234 return x - y;
235
236 CASE_OP_32_64(mul):
237 return x * y;
238
Kirill Batuzov9a810902011-07-07 16:37:15 +0400239 CASE_OP_32_64(and):
240 return x & y;
241
242 CASE_OP_32_64(or):
243 return x | y;
244
245 CASE_OP_32_64(xor):
246 return x ^ y;
247
Kirill Batuzov55c09752011-07-07 16:37:16 +0400248 case INDEX_op_shl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700249 return (uint32_t)x << (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400250
Kirill Batuzov55c09752011-07-07 16:37:16 +0400251 case INDEX_op_shl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700252 return (uint64_t)x << (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400253
254 case INDEX_op_shr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700255 return (uint32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400256
Kirill Batuzov55c09752011-07-07 16:37:16 +0400257 case INDEX_op_shr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700258 return (uint64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400259
260 case INDEX_op_sar_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700261 return (int32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400262
Kirill Batuzov55c09752011-07-07 16:37:16 +0400263 case INDEX_op_sar_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700264 return (int64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400265
266 case INDEX_op_rotr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700267 return ror32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400268
Kirill Batuzov55c09752011-07-07 16:37:16 +0400269 case INDEX_op_rotr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700270 return ror64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400271
272 case INDEX_op_rotl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700273 return rol32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400274
Kirill Batuzov55c09752011-07-07 16:37:16 +0400275 case INDEX_op_rotl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700276 return rol64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400277
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700278 CASE_OP_32_64(not):
Kirill Batuzova640f032011-07-07 16:37:17 +0400279 return ~x;
280
Richard Hendersoncb25c802011-08-17 14:11:47 -0700281 CASE_OP_32_64(neg):
282 return -x;
283
284 CASE_OP_32_64(andc):
285 return x & ~y;
286
287 CASE_OP_32_64(orc):
288 return x | ~y;
289
290 CASE_OP_32_64(eqv):
291 return ~(x ^ y);
292
293 CASE_OP_32_64(nand):
294 return ~(x & y);
295
296 CASE_OP_32_64(nor):
297 return ~(x | y);
298
Richard Henderson0e28d002016-11-16 09:23:28 +0100299 case INDEX_op_clz_i32:
300 return (uint32_t)x ? clz32(x) : y;
301
302 case INDEX_op_clz_i64:
303 return x ? clz64(x) : y;
304
305 case INDEX_op_ctz_i32:
306 return (uint32_t)x ? ctz32(x) : y;
307
308 case INDEX_op_ctz_i64:
309 return x ? ctz64(x) : y;
310
Richard Hendersona768e4e2016-11-21 11:13:39 +0100311 case INDEX_op_ctpop_i32:
312 return ctpop32(x);
313
314 case INDEX_op_ctpop_i64:
315 return ctpop64(x);
316
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700317 CASE_OP_32_64(ext8s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400318 return (int8_t)x;
319
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700320 CASE_OP_32_64(ext16s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400321 return (int16_t)x;
322
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700323 CASE_OP_32_64(ext8u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400324 return (uint8_t)x;
325
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700326 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400327 return (uint16_t)x;
328
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200329 case INDEX_op_ext_i32_i64:
Kirill Batuzova640f032011-07-07 16:37:17 +0400330 case INDEX_op_ext32s_i64:
331 return (int32_t)x;
332
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200333 case INDEX_op_extu_i32_i64:
Richard Henderson609ad702015-07-24 07:16:00 -0700334 case INDEX_op_extrl_i64_i32:
Kirill Batuzova640f032011-07-07 16:37:17 +0400335 case INDEX_op_ext32u_i64:
336 return (uint32_t)x;
Kirill Batuzova640f032011-07-07 16:37:17 +0400337
Richard Henderson609ad702015-07-24 07:16:00 -0700338 case INDEX_op_extrh_i64_i32:
339 return (uint64_t)x >> 32;
340
Richard Henderson03271522013-08-14 14:35:56 -0700341 case INDEX_op_muluh_i32:
342 return ((uint64_t)(uint32_t)x * (uint32_t)y) >> 32;
343 case INDEX_op_mulsh_i32:
344 return ((int64_t)(int32_t)x * (int32_t)y) >> 32;
345
346 case INDEX_op_muluh_i64:
347 mulu64(&l64, &h64, x, y);
348 return h64;
349 case INDEX_op_mulsh_i64:
350 muls64(&l64, &h64, x, y);
351 return h64;
352
Richard Henderson01547f72013-08-14 15:22:46 -0700353 case INDEX_op_div_i32:
354 /* Avoid crashing on divide by zero, otherwise undefined. */
355 return (int32_t)x / ((int32_t)y ? : 1);
356 case INDEX_op_divu_i32:
357 return (uint32_t)x / ((uint32_t)y ? : 1);
358 case INDEX_op_div_i64:
359 return (int64_t)x / ((int64_t)y ? : 1);
360 case INDEX_op_divu_i64:
361 return (uint64_t)x / ((uint64_t)y ? : 1);
362
363 case INDEX_op_rem_i32:
364 return (int32_t)x % ((int32_t)y ? : 1);
365 case INDEX_op_remu_i32:
366 return (uint32_t)x % ((uint32_t)y ? : 1);
367 case INDEX_op_rem_i64:
368 return (int64_t)x % ((int64_t)y ? : 1);
369 case INDEX_op_remu_i64:
370 return (uint64_t)x % ((uint64_t)y ? : 1);
371
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400372 default:
373 fprintf(stderr,
374 "Unrecognized operation %d in do_constant_folding.\n", op);
375 tcg_abort();
376 }
377}
378
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000379static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400380{
381 TCGArg res = do_constant_folding_2(op, x, y);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400382 if (op_bits(op) == 32) {
Aurelien Jarno29f3ff82015-07-10 18:03:31 +0200383 res = (int32_t)res;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400384 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400385 return res;
386}
387
Richard Henderson9519da72012-10-02 11:32:26 -0700388static bool do_constant_folding_cond_32(uint32_t x, uint32_t y, TCGCond c)
389{
390 switch (c) {
391 case TCG_COND_EQ:
392 return x == y;
393 case TCG_COND_NE:
394 return x != y;
395 case TCG_COND_LT:
396 return (int32_t)x < (int32_t)y;
397 case TCG_COND_GE:
398 return (int32_t)x >= (int32_t)y;
399 case TCG_COND_LE:
400 return (int32_t)x <= (int32_t)y;
401 case TCG_COND_GT:
402 return (int32_t)x > (int32_t)y;
403 case TCG_COND_LTU:
404 return x < y;
405 case TCG_COND_GEU:
406 return x >= y;
407 case TCG_COND_LEU:
408 return x <= y;
409 case TCG_COND_GTU:
410 return x > y;
411 default:
412 tcg_abort();
413 }
414}
415
416static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c)
417{
418 switch (c) {
419 case TCG_COND_EQ:
420 return x == y;
421 case TCG_COND_NE:
422 return x != y;
423 case TCG_COND_LT:
424 return (int64_t)x < (int64_t)y;
425 case TCG_COND_GE:
426 return (int64_t)x >= (int64_t)y;
427 case TCG_COND_LE:
428 return (int64_t)x <= (int64_t)y;
429 case TCG_COND_GT:
430 return (int64_t)x > (int64_t)y;
431 case TCG_COND_LTU:
432 return x < y;
433 case TCG_COND_GEU:
434 return x >= y;
435 case TCG_COND_LEU:
436 return x <= y;
437 case TCG_COND_GTU:
438 return x > y;
439 default:
440 tcg_abort();
441 }
442}
443
444static bool do_constant_folding_cond_eq(TCGCond c)
445{
446 switch (c) {
447 case TCG_COND_GT:
448 case TCG_COND_LTU:
449 case TCG_COND_LT:
450 case TCG_COND_GTU:
451 case TCG_COND_NE:
452 return 0;
453 case TCG_COND_GE:
454 case TCG_COND_GEU:
455 case TCG_COND_LE:
456 case TCG_COND_LEU:
457 case TCG_COND_EQ:
458 return 1;
459 default:
460 tcg_abort();
461 }
462}
463
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200464/* Return 2 if the condition can't be simplified, and the result
465 of the condition (0 or 1) if it can */
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200466static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
467 TCGArg y, TCGCond c)
468{
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200469 if (temp_is_const(x) && temp_is_const(y)) {
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200470 switch (op_bits(op)) {
471 case 32:
Richard Henderson9519da72012-10-02 11:32:26 -0700472 return do_constant_folding_cond_32(temps[x].val, temps[y].val, c);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200473 case 64:
Richard Henderson9519da72012-10-02 11:32:26 -0700474 return do_constant_folding_cond_64(temps[x].val, temps[y].val, c);
475 default:
476 tcg_abort();
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200477 }
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200478 } else if (temps_are_copies(x, y)) {
Richard Henderson9519da72012-10-02 11:32:26 -0700479 return do_constant_folding_cond_eq(c);
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200480 } else if (temp_is_const(y) && temps[y].val == 0) {
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200481 switch (c) {
482 case TCG_COND_LTU:
483 return 0;
484 case TCG_COND_GEU:
485 return 1;
486 default:
487 return 2;
488 }
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200489 }
Alex Bennée550276a2016-09-30 22:30:55 +0100490 return 2;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200491}
492
Richard Henderson6c4382f2012-10-02 11:32:27 -0700493/* Return 2 if the condition can't be simplified, and the result
494 of the condition (0 or 1) if it can */
495static TCGArg do_constant_folding_cond2(TCGArg *p1, TCGArg *p2, TCGCond c)
496{
497 TCGArg al = p1[0], ah = p1[1];
498 TCGArg bl = p2[0], bh = p2[1];
499
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200500 if (temp_is_const(bl) && temp_is_const(bh)) {
Richard Henderson6c4382f2012-10-02 11:32:27 -0700501 uint64_t b = ((uint64_t)temps[bh].val << 32) | (uint32_t)temps[bl].val;
502
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200503 if (temp_is_const(al) && temp_is_const(ah)) {
Richard Henderson6c4382f2012-10-02 11:32:27 -0700504 uint64_t a;
505 a = ((uint64_t)temps[ah].val << 32) | (uint32_t)temps[al].val;
506 return do_constant_folding_cond_64(a, b, c);
507 }
508 if (b == 0) {
509 switch (c) {
510 case TCG_COND_LTU:
511 return 0;
512 case TCG_COND_GEU:
513 return 1;
514 default:
515 break;
516 }
517 }
518 }
519 if (temps_are_copies(al, bl) && temps_are_copies(ah, bh)) {
520 return do_constant_folding_cond_eq(c);
521 }
522 return 2;
523}
524
Richard Henderson24c9ae42012-10-02 11:32:21 -0700525static bool swap_commutative(TCGArg dest, TCGArg *p1, TCGArg *p2)
526{
527 TCGArg a1 = *p1, a2 = *p2;
528 int sum = 0;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200529 sum += temp_is_const(a1);
530 sum -= temp_is_const(a2);
Richard Henderson24c9ae42012-10-02 11:32:21 -0700531
532 /* Prefer the constant in second argument, and then the form
533 op a, a, b, which is better handled on non-RISC hosts. */
534 if (sum > 0 || (sum == 0 && dest == a2)) {
535 *p1 = a2;
536 *p2 = a1;
537 return true;
538 }
539 return false;
540}
541
Richard Henderson0bfcb862012-10-02 11:32:23 -0700542static bool swap_commutative2(TCGArg *p1, TCGArg *p2)
543{
544 int sum = 0;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200545 sum += temp_is_const(p1[0]);
546 sum += temp_is_const(p1[1]);
547 sum -= temp_is_const(p2[0]);
548 sum -= temp_is_const(p2[1]);
Richard Henderson0bfcb862012-10-02 11:32:23 -0700549 if (sum > 0) {
550 TCGArg t;
551 t = p1[0], p1[0] = p2[0], p2[0] = t;
552 t = p1[1], p1[1] = p2[1], p2[1] = t;
553 return true;
554 }
555 return false;
556}
557
Kirill Batuzov22613af2011-07-07 16:37:13 +0400558/* Propagate constants and copies, fold constant expressions. */
Aurelien Jarno36e60ef2015-06-04 21:53:27 +0200559void tcg_optimize(TCGContext *s)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400560{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700561 int oi, oi_next, nb_temps, nb_globals;
Pranith Kumar34f93922016-08-23 09:48:25 -0400562 TCGArg *prev_mb_args = NULL;
Richard Henderson5d8f5362012-09-21 10:13:38 -0700563
Kirill Batuzov22613af2011-07-07 16:37:13 +0400564 /* Array VALS has an element for each temp.
565 If this temp holds a constant then its value is kept in VALS' element.
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200566 If this temp is a copy of other ones then the other copies are
567 available through the doubly linked circular list. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400568
569 nb_temps = s->nb_temps;
570 nb_globals = s->nb_globals;
Paolo Bonzinid193a142013-01-11 15:42:51 -0800571 reset_all_temps(nb_temps);
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400572
Richard Hendersondcb8e752016-06-22 19:42:31 -0700573 for (oi = s->gen_op_buf[0].next; oi != 0; oi = oi_next) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700574 tcg_target_ulong mask, partmask, affected;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700575 int nb_oargs, nb_iargs, i;
Richard Hendersoncf066672014-03-22 20:06:52 -0700576 TCGArg tmp;
577
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700578 TCGOp * const op = &s->gen_op_buf[oi];
579 TCGArg * const args = &s->gen_opparam_buf[op->args];
580 TCGOpcode opc = op->opc;
581 const TCGOpDef *def = &tcg_op_defs[opc];
582
583 oi_next = op->next;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200584
585 /* Count the arguments, and initialize the temps that are
586 going to be used */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700587 if (opc == INDEX_op_call) {
588 nb_oargs = op->callo;
589 nb_iargs = op->calli;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200590 for (i = 0; i < nb_oargs + nb_iargs; i++) {
591 tmp = args[i];
592 if (tmp != TCG_CALL_DUMMY_ARG) {
593 init_temp_info(tmp);
594 }
595 }
Aurelien Jarno1ff8c542012-09-11 16:18:49 +0200596 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -0700597 nb_oargs = def->nb_oargs;
598 nb_iargs = def->nb_iargs;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200599 for (i = 0; i < nb_oargs + nb_iargs; i++) {
600 init_temp_info(args[i]);
601 }
Richard Hendersoncf066672014-03-22 20:06:52 -0700602 }
603
604 /* Do copy propagation */
605 for (i = nb_oargs; i < nb_oargs + nb_iargs; i++) {
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200606 if (temp_is_copy(args[i])) {
Richard Hendersoncf066672014-03-22 20:06:52 -0700607 args[i] = find_better_copy(s, args[i]);
Kirill Batuzov22613af2011-07-07 16:37:13 +0400608 }
609 }
610
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400611 /* For commutative operations make constant second argument */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700612 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400613 CASE_OP_32_64(add):
614 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400615 CASE_OP_32_64(and):
616 CASE_OP_32_64(or):
617 CASE_OP_32_64(xor):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700618 CASE_OP_32_64(eqv):
619 CASE_OP_32_64(nand):
620 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -0700621 CASE_OP_32_64(muluh):
622 CASE_OP_32_64(mulsh):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700623 swap_commutative(args[0], &args[1], &args[2]);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400624 break;
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200625 CASE_OP_32_64(brcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700626 if (swap_commutative(-1, &args[0], &args[1])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200627 args[2] = tcg_swap_cond(args[2]);
628 }
629 break;
630 CASE_OP_32_64(setcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700631 if (swap_commutative(args[0], &args[1], &args[2])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200632 args[3] = tcg_swap_cond(args[3]);
633 }
634 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -0700635 CASE_OP_32_64(movcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700636 if (swap_commutative(-1, &args[1], &args[2])) {
637 args[5] = tcg_swap_cond(args[5]);
Richard Hendersonfa01a202012-09-21 10:13:37 -0700638 }
Richard Henderson5d8f5362012-09-21 10:13:38 -0700639 /* For movcond, we canonicalize the "false" input reg to match
640 the destination reg so that the tcg backend can implement
641 a "move if true" operation. */
Richard Henderson24c9ae42012-10-02 11:32:21 -0700642 if (swap_commutative(args[0], &args[4], &args[3])) {
643 args[5] = tcg_invert_cond(args[5]);
Richard Henderson5d8f5362012-09-21 10:13:38 -0700644 }
Richard Henderson1e484e62012-10-02 11:32:22 -0700645 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800646 CASE_OP_32_64(add2):
Richard Henderson1e484e62012-10-02 11:32:22 -0700647 swap_commutative(args[0], &args[2], &args[4]);
648 swap_commutative(args[1], &args[3], &args[5]);
649 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800650 CASE_OP_32_64(mulu2):
Richard Henderson4d3203f2013-02-19 23:51:53 -0800651 CASE_OP_32_64(muls2):
Richard Henderson14149682012-10-02 11:32:30 -0700652 swap_commutative(args[0], &args[2], &args[3]);
653 break;
Richard Henderson0bfcb862012-10-02 11:32:23 -0700654 case INDEX_op_brcond2_i32:
655 if (swap_commutative2(&args[0], &args[2])) {
656 args[4] = tcg_swap_cond(args[4]);
657 }
658 break;
659 case INDEX_op_setcond2_i32:
660 if (swap_commutative2(&args[1], &args[3])) {
661 args[5] = tcg_swap_cond(args[5]);
662 }
663 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400664 default:
665 break;
666 }
667
Richard Henderson2d497542013-03-21 09:13:33 -0700668 /* Simplify expressions for "shift/rot r, 0, a => movi r, 0",
669 and "sub r, 0, a => neg r, a" case. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700670 switch (opc) {
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200671 CASE_OP_32_64(shl):
672 CASE_OP_32_64(shr):
673 CASE_OP_32_64(sar):
674 CASE_OP_32_64(rotl):
675 CASE_OP_32_64(rotr):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200676 if (temp_is_const(args[1]) && temps[args[1]].val == 0) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200677 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200678 continue;
679 }
680 break;
Richard Henderson2d497542013-03-21 09:13:33 -0700681 CASE_OP_32_64(sub):
682 {
683 TCGOpcode neg_op;
684 bool have_neg;
685
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200686 if (temp_is_const(args[2])) {
Richard Henderson2d497542013-03-21 09:13:33 -0700687 /* Proceed with possible constant folding. */
688 break;
689 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700690 if (opc == INDEX_op_sub_i32) {
Richard Henderson2d497542013-03-21 09:13:33 -0700691 neg_op = INDEX_op_neg_i32;
692 have_neg = TCG_TARGET_HAS_neg_i32;
693 } else {
694 neg_op = INDEX_op_neg_i64;
695 have_neg = TCG_TARGET_HAS_neg_i64;
696 }
697 if (!have_neg) {
698 break;
699 }
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200700 if (temp_is_const(args[1]) && temps[args[1]].val == 0) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700701 op->opc = neg_op;
Richard Henderson2d497542013-03-21 09:13:33 -0700702 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700703 args[1] = args[2];
Richard Henderson2d497542013-03-21 09:13:33 -0700704 continue;
705 }
706 }
707 break;
Richard Hendersone201b562014-01-28 13:15:38 -0800708 CASE_OP_32_64(xor):
709 CASE_OP_32_64(nand):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200710 if (!temp_is_const(args[1])
711 && temp_is_const(args[2]) && temps[args[2]].val == -1) {
Richard Hendersone201b562014-01-28 13:15:38 -0800712 i = 1;
713 goto try_not;
714 }
715 break;
716 CASE_OP_32_64(nor):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200717 if (!temp_is_const(args[1])
718 && temp_is_const(args[2]) && temps[args[2]].val == 0) {
Richard Hendersone201b562014-01-28 13:15:38 -0800719 i = 1;
720 goto try_not;
721 }
722 break;
723 CASE_OP_32_64(andc):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200724 if (!temp_is_const(args[2])
725 && temp_is_const(args[1]) && temps[args[1]].val == -1) {
Richard Hendersone201b562014-01-28 13:15:38 -0800726 i = 2;
727 goto try_not;
728 }
729 break;
730 CASE_OP_32_64(orc):
731 CASE_OP_32_64(eqv):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200732 if (!temp_is_const(args[2])
733 && temp_is_const(args[1]) && temps[args[1]].val == 0) {
Richard Hendersone201b562014-01-28 13:15:38 -0800734 i = 2;
735 goto try_not;
736 }
737 break;
738 try_not:
739 {
740 TCGOpcode not_op;
741 bool have_not;
742
743 if (def->flags & TCG_OPF_64BIT) {
744 not_op = INDEX_op_not_i64;
745 have_not = TCG_TARGET_HAS_not_i64;
746 } else {
747 not_op = INDEX_op_not_i32;
748 have_not = TCG_TARGET_HAS_not_i32;
749 }
750 if (!have_not) {
751 break;
752 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700753 op->opc = not_op;
Richard Hendersone201b562014-01-28 13:15:38 -0800754 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700755 args[1] = args[i];
Richard Hendersone201b562014-01-28 13:15:38 -0800756 continue;
757 }
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200758 default:
759 break;
760 }
761
Richard Henderson464a1442014-01-31 07:42:11 -0600762 /* Simplify expression for "op r, a, const => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700763 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400764 CASE_OP_32_64(add):
765 CASE_OP_32_64(sub):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400766 CASE_OP_32_64(shl):
767 CASE_OP_32_64(shr):
768 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700769 CASE_OP_32_64(rotl):
770 CASE_OP_32_64(rotr):
Aurelien Jarno38ee1882012-09-06 16:47:14 +0200771 CASE_OP_32_64(or):
772 CASE_OP_32_64(xor):
Richard Henderson464a1442014-01-31 07:42:11 -0600773 CASE_OP_32_64(andc):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200774 if (!temp_is_const(args[1])
775 && temp_is_const(args[2]) && temps[args[2]].val == 0) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200776 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
777 continue;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400778 }
779 break;
Richard Henderson464a1442014-01-31 07:42:11 -0600780 CASE_OP_32_64(and):
781 CASE_OP_32_64(orc):
782 CASE_OP_32_64(eqv):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200783 if (!temp_is_const(args[1])
784 && temp_is_const(args[2]) && temps[args[2]].val == -1) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200785 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
786 continue;
Richard Henderson464a1442014-01-31 07:42:11 -0600787 }
788 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200789 default:
790 break;
791 }
792
Aurelien Jarno30312442013-09-03 08:27:38 +0200793 /* Simplify using known-zero bits. Currently only ops with a single
794 output argument is supported. */
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800795 mask = -1;
Paolo Bonzini633f6502013-01-11 15:42:53 -0800796 affected = -1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700797 switch (opc) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800798 CASE_OP_32_64(ext8s):
799 if ((temps[args[1]].mask & 0x80) != 0) {
800 break;
801 }
802 CASE_OP_32_64(ext8u):
803 mask = 0xff;
804 goto and_const;
805 CASE_OP_32_64(ext16s):
806 if ((temps[args[1]].mask & 0x8000) != 0) {
807 break;
808 }
809 CASE_OP_32_64(ext16u):
810 mask = 0xffff;
811 goto and_const;
812 case INDEX_op_ext32s_i64:
813 if ((temps[args[1]].mask & 0x80000000) != 0) {
814 break;
815 }
816 case INDEX_op_ext32u_i64:
817 mask = 0xffffffffU;
818 goto and_const;
819
820 CASE_OP_32_64(and):
821 mask = temps[args[2]].mask;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200822 if (temp_is_const(args[2])) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800823 and_const:
Paolo Bonzini633f6502013-01-11 15:42:53 -0800824 affected = temps[args[1]].mask & ~mask;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800825 }
826 mask = temps[args[1]].mask & mask;
827 break;
828
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200829 case INDEX_op_ext_i32_i64:
830 if ((temps[args[1]].mask & 0x80000000) != 0) {
831 break;
832 }
833 case INDEX_op_extu_i32_i64:
834 /* We do not compute affected as it is a size changing op. */
835 mask = (uint32_t)temps[args[1]].mask;
836 break;
837
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800838 CASE_OP_32_64(andc):
839 /* Known-zeros does not imply known-ones. Therefore unless
840 args[2] is constant, we can't infer anything from it. */
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200841 if (temp_is_const(args[2])) {
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800842 mask = ~temps[args[2]].mask;
843 goto and_const;
844 }
845 /* But we certainly know nothing outside args[1] may be set. */
846 mask = temps[args[1]].mask;
847 break;
848
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200849 case INDEX_op_sar_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200850 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700851 tmp = temps[args[2]].val & 31;
852 mask = (int32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200853 }
854 break;
855 case INDEX_op_sar_i64:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200856 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700857 tmp = temps[args[2]].val & 63;
858 mask = (int64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800859 }
860 break;
861
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200862 case INDEX_op_shr_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200863 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700864 tmp = temps[args[2]].val & 31;
865 mask = (uint32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200866 }
867 break;
868 case INDEX_op_shr_i64:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200869 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700870 tmp = temps[args[2]].val & 63;
871 mask = (uint64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800872 }
873 break;
874
Richard Henderson609ad702015-07-24 07:16:00 -0700875 case INDEX_op_extrl_i64_i32:
876 mask = (uint32_t)temps[args[1]].mask;
877 break;
878 case INDEX_op_extrh_i64_i32:
879 mask = (uint64_t)temps[args[1]].mask >> 32;
Richard Henderson4bb7a412013-09-09 17:03:24 -0700880 break;
881
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800882 CASE_OP_32_64(shl):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200883 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700884 tmp = temps[args[2]].val & (TCG_TARGET_REG_BITS - 1);
885 mask = temps[args[1]].mask << tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800886 }
887 break;
888
889 CASE_OP_32_64(neg):
890 /* Set to 1 all bits to the left of the rightmost. */
891 mask = -(temps[args[1]].mask & -temps[args[1]].mask);
892 break;
893
894 CASE_OP_32_64(deposit):
Richard Hendersond998e552014-03-18 14:23:52 -0700895 mask = deposit64(temps[args[1]].mask, args[3], args[4],
896 temps[args[2]].mask);
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800897 break;
898
Richard Henderson7ec8bab2016-10-14 12:04:32 -0500899 CASE_OP_32_64(extract):
900 mask = extract64(temps[args[1]].mask, args[2], args[3]);
901 if (args[2] == 0) {
902 affected = temps[args[1]].mask & ~mask;
903 }
904 break;
905 CASE_OP_32_64(sextract):
906 mask = sextract64(temps[args[1]].mask, args[2], args[3]);
907 if (args[2] == 0 && (tcg_target_long)mask >= 0) {
908 affected = temps[args[1]].mask & ~mask;
909 }
910 break;
911
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800912 CASE_OP_32_64(or):
913 CASE_OP_32_64(xor):
914 mask = temps[args[1]].mask | temps[args[2]].mask;
915 break;
916
Richard Henderson0e28d002016-11-16 09:23:28 +0100917 case INDEX_op_clz_i32:
918 case INDEX_op_ctz_i32:
919 mask = temps[args[2]].mask | 31;
920 break;
921
922 case INDEX_op_clz_i64:
923 case INDEX_op_ctz_i64:
924 mask = temps[args[2]].mask | 63;
925 break;
926
Richard Hendersona768e4e2016-11-21 11:13:39 +0100927 case INDEX_op_ctpop_i32:
928 mask = 32 | 31;
929 break;
930 case INDEX_op_ctpop_i64:
931 mask = 64 | 63;
932 break;
933
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800934 CASE_OP_32_64(setcond):
Richard Hendersona7635512014-04-23 22:18:30 -0700935 case INDEX_op_setcond2_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800936 mask = 1;
937 break;
938
939 CASE_OP_32_64(movcond):
940 mask = temps[args[3]].mask | temps[args[4]].mask;
941 break;
942
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200943 CASE_OP_32_64(ld8u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200944 mask = 0xff;
945 break;
946 CASE_OP_32_64(ld16u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200947 mask = 0xffff;
948 break;
949 case INDEX_op_ld32u_i64:
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200950 mask = 0xffffffffu;
951 break;
952
953 CASE_OP_32_64(qemu_ld):
954 {
Richard Henderson59227d52015-05-12 11:51:44 -0700955 TCGMemOpIdx oi = args[nb_oargs + nb_iargs];
956 TCGMemOp mop = get_memop(oi);
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200957 if (!(mop & MO_SIGN)) {
958 mask = (2ULL << ((8 << (mop & MO_SIZE)) - 1)) - 1;
959 }
960 }
961 break;
962
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800963 default:
964 break;
965 }
966
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700967 /* 32-bit ops generate 32-bit results. For the result is zero test
968 below, we can ignore high bits, but for further optimizations we
969 need to record that the high bits contain garbage. */
Richard Henderson24666ba2014-05-22 11:14:10 -0700970 partmask = mask;
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700971 if (!(def->flags & TCG_OPF_64BIT)) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700972 mask |= ~(tcg_target_ulong)0xffffffffu;
973 partmask &= 0xffffffffu;
974 affected &= 0xffffffffu;
Aurelien Jarnof096dc92013-09-03 08:27:38 +0200975 }
976
Richard Henderson24666ba2014-05-22 11:14:10 -0700977 if (partmask == 0) {
Aurelien Jarnoeabb7b92016-04-21 10:48:49 +0200978 tcg_debug_assert(nb_oargs == 1);
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200979 tcg_opt_gen_movi(s, op, args, args[0], 0);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800980 continue;
981 }
982 if (affected == 0) {
Aurelien Jarnoeabb7b92016-04-21 10:48:49 +0200983 tcg_debug_assert(nb_oargs == 1);
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200984 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800985 continue;
986 }
987
Aurelien Jarno56e49432012-09-06 16:47:13 +0200988 /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700989 switch (opc) {
Aurelien Jarno61251c02012-09-06 16:47:14 +0200990 CASE_OP_32_64(and):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400991 CASE_OP_32_64(mul):
Richard Henderson03271522013-08-14 14:35:56 -0700992 CASE_OP_32_64(muluh):
993 CASE_OP_32_64(mulsh):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200994 if ((temp_is_const(args[2]) && temps[args[2]].val == 0)) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200995 tcg_opt_gen_movi(s, op, args, args[0], 0);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400996 continue;
997 }
998 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200999 default:
1000 break;
1001 }
1002
1003 /* Simplify expression for "op r, a, a => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001004 switch (opc) {
Kirill Batuzov9a810902011-07-07 16:37:15 +04001005 CASE_OP_32_64(or):
1006 CASE_OP_32_64(and):
Aurelien Jarno0aba1c72012-09-18 19:11:32 +02001007 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001008 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Kirill Batuzov9a810902011-07-07 16:37:15 +04001009 continue;
1010 }
1011 break;
Blue Swirlfe0de7a2011-07-30 19:18:32 +00001012 default:
1013 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001014 }
1015
Aurelien Jarno3c941932012-09-18 19:12:36 +02001016 /* Simplify expression for "op r, a, a => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001017 switch (opc) {
Richard Hendersone64e9582014-01-28 13:26:17 -08001018 CASE_OP_32_64(andc):
Aurelien Jarno3c941932012-09-18 19:12:36 +02001019 CASE_OP_32_64(sub):
1020 CASE_OP_32_64(xor):
1021 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001022 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno3c941932012-09-18 19:12:36 +02001023 continue;
1024 }
1025 break;
1026 default:
1027 break;
1028 }
1029
Kirill Batuzov22613af2011-07-07 16:37:13 +04001030 /* Propagate constants through copy operations and do constant
1031 folding. Constants will be substituted to arguments by register
1032 allocator where needed and possible. Also detect copies. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001033 switch (opc) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001034 CASE_OP_32_64(mov):
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001035 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
1036 break;
Kirill Batuzov22613af2011-07-07 16:37:13 +04001037 CASE_OP_32_64(movi):
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001038 tcg_opt_gen_movi(s, op, args, args[0], args[1]);
Kirill Batuzov22613af2011-07-07 16:37:13 +04001039 break;
Richard Henderson6e14e912012-10-02 11:32:24 -07001040
Kirill Batuzova640f032011-07-07 16:37:17 +04001041 CASE_OP_32_64(not):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001042 CASE_OP_32_64(neg):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001043 CASE_OP_32_64(ext8s):
1044 CASE_OP_32_64(ext8u):
1045 CASE_OP_32_64(ext16s):
1046 CASE_OP_32_64(ext16u):
Richard Hendersona768e4e2016-11-21 11:13:39 +01001047 CASE_OP_32_64(ctpop):
Kirill Batuzova640f032011-07-07 16:37:17 +04001048 case INDEX_op_ext32s_i64:
1049 case INDEX_op_ext32u_i64:
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +02001050 case INDEX_op_ext_i32_i64:
1051 case INDEX_op_extu_i32_i64:
Richard Henderson609ad702015-07-24 07:16:00 -07001052 case INDEX_op_extrl_i64_i32:
1053 case INDEX_op_extrh_i64_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001054 if (temp_is_const(args[1])) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001055 tmp = do_constant_folding(opc, temps[args[1]].val, 0);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001056 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001057 break;
Kirill Batuzova640f032011-07-07 16:37:17 +04001058 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001059 goto do_default;
1060
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001061 CASE_OP_32_64(add):
1062 CASE_OP_32_64(sub):
1063 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +04001064 CASE_OP_32_64(or):
1065 CASE_OP_32_64(and):
1066 CASE_OP_32_64(xor):
Kirill Batuzov55c09752011-07-07 16:37:16 +04001067 CASE_OP_32_64(shl):
1068 CASE_OP_32_64(shr):
1069 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001070 CASE_OP_32_64(rotl):
1071 CASE_OP_32_64(rotr):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001072 CASE_OP_32_64(andc):
1073 CASE_OP_32_64(orc):
1074 CASE_OP_32_64(eqv):
1075 CASE_OP_32_64(nand):
1076 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -07001077 CASE_OP_32_64(muluh):
1078 CASE_OP_32_64(mulsh):
Richard Henderson01547f72013-08-14 15:22:46 -07001079 CASE_OP_32_64(div):
1080 CASE_OP_32_64(divu):
1081 CASE_OP_32_64(rem):
1082 CASE_OP_32_64(remu):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001083 if (temp_is_const(args[1]) && temp_is_const(args[2])) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001084 tmp = do_constant_folding(opc, temps[args[1]].val,
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001085 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001086 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001087 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001088 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001089 goto do_default;
1090
Richard Henderson0e28d002016-11-16 09:23:28 +01001091 CASE_OP_32_64(clz):
1092 CASE_OP_32_64(ctz):
1093 if (temp_is_const(args[1])) {
1094 TCGArg v = temps[args[1]].val;
1095 if (v != 0) {
1096 tmp = do_constant_folding(opc, v, 0);
1097 tcg_opt_gen_movi(s, op, args, args[0], tmp);
1098 } else {
1099 tcg_opt_gen_mov(s, op, args, args[0], args[2]);
1100 }
1101 break;
1102 }
1103 goto do_default;
1104
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001105 CASE_OP_32_64(deposit):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001106 if (temp_is_const(args[1]) && temp_is_const(args[2])) {
Richard Hendersond998e552014-03-18 14:23:52 -07001107 tmp = deposit64(temps[args[1]].val, args[3], args[4],
1108 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001109 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001110 break;
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001111 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001112 goto do_default;
1113
Richard Henderson7ec8bab2016-10-14 12:04:32 -05001114 CASE_OP_32_64(extract):
1115 if (temp_is_const(args[1])) {
1116 tmp = extract64(temps[args[1]].val, args[2], args[3]);
1117 tcg_opt_gen_movi(s, op, args, args[0], tmp);
1118 break;
1119 }
1120 goto do_default;
1121
1122 CASE_OP_32_64(sextract):
1123 if (temp_is_const(args[1])) {
1124 tmp = sextract64(temps[args[1]].val, args[2], args[3]);
1125 tcg_opt_gen_movi(s, op, args, args[0], tmp);
1126 break;
1127 }
1128 goto do_default;
1129
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001130 CASE_OP_32_64(setcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001131 tmp = do_constant_folding_cond(opc, args[1], args[2], args[3]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001132 if (tmp != 2) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001133 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001134 break;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001135 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001136 goto do_default;
1137
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001138 CASE_OP_32_64(brcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001139 tmp = do_constant_folding_cond(opc, args[0], args[1], args[2]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001140 if (tmp != 2) {
1141 if (tmp) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001142 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001143 op->opc = INDEX_op_br;
1144 args[0] = args[3];
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001145 } else {
Richard Henderson0c627cd2014-03-30 16:51:54 -07001146 tcg_op_remove(s, op);
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001147 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001148 break;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001149 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001150 goto do_default;
1151
Richard Hendersonfa01a202012-09-21 10:13:37 -07001152 CASE_OP_32_64(movcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001153 tmp = do_constant_folding_cond(opc, args[1], args[2], args[5]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001154 if (tmp != 2) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001155 tcg_opt_gen_mov(s, op, args, args[0], args[4-tmp]);
Richard Henderson6e14e912012-10-02 11:32:24 -07001156 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -07001157 }
Richard Henderson333b21b2016-10-23 20:44:32 -07001158 if (temp_is_const(args[3]) && temp_is_const(args[4])) {
1159 tcg_target_ulong tv = temps[args[3]].val;
1160 tcg_target_ulong fv = temps[args[4]].val;
1161 TCGCond cond = args[5];
1162 if (fv == 1 && tv == 0) {
1163 cond = tcg_invert_cond(cond);
1164 } else if (!(tv == 1 && fv == 0)) {
1165 goto do_default;
1166 }
1167 args[3] = cond;
1168 op->opc = opc = (opc == INDEX_op_movcond_i32
1169 ? INDEX_op_setcond_i32
1170 : INDEX_op_setcond_i64);
1171 nb_iargs = 2;
1172 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001173 goto do_default;
1174
Richard Henderson212c3282012-10-02 11:32:28 -07001175 case INDEX_op_add2_i32:
1176 case INDEX_op_sub2_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001177 if (temp_is_const(args[2]) && temp_is_const(args[3])
1178 && temp_is_const(args[4]) && temp_is_const(args[5])) {
Richard Henderson212c3282012-10-02 11:32:28 -07001179 uint32_t al = temps[args[2]].val;
1180 uint32_t ah = temps[args[3]].val;
1181 uint32_t bl = temps[args[4]].val;
1182 uint32_t bh = temps[args[5]].val;
1183 uint64_t a = ((uint64_t)ah << 32) | al;
1184 uint64_t b = ((uint64_t)bh << 32) | bl;
1185 TCGArg rl, rh;
Richard Henderson5a184072016-06-23 20:34:33 -07001186 TCGOp *op2 = tcg_op_insert_before(s, op, INDEX_op_movi_i32, 2);
Richard Hendersona4ce0992014-03-30 17:14:02 -07001187 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson212c3282012-10-02 11:32:28 -07001188
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001189 if (opc == INDEX_op_add2_i32) {
Richard Henderson212c3282012-10-02 11:32:28 -07001190 a += b;
1191 } else {
1192 a -= b;
1193 }
1194
Richard Henderson212c3282012-10-02 11:32:28 -07001195 rl = args[0];
1196 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001197 tcg_opt_gen_movi(s, op, args, rl, (int32_t)a);
1198 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(a >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001199
1200 /* We've done all we need to do with the movi. Skip it. */
1201 oi_next = op2->next;
Richard Henderson212c3282012-10-02 11:32:28 -07001202 break;
1203 }
1204 goto do_default;
1205
Richard Henderson14149682012-10-02 11:32:30 -07001206 case INDEX_op_mulu2_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001207 if (temp_is_const(args[2]) && temp_is_const(args[3])) {
Richard Henderson14149682012-10-02 11:32:30 -07001208 uint32_t a = temps[args[2]].val;
1209 uint32_t b = temps[args[3]].val;
1210 uint64_t r = (uint64_t)a * b;
1211 TCGArg rl, rh;
Richard Henderson5a184072016-06-23 20:34:33 -07001212 TCGOp *op2 = tcg_op_insert_before(s, op, INDEX_op_movi_i32, 2);
Richard Hendersona4ce0992014-03-30 17:14:02 -07001213 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson14149682012-10-02 11:32:30 -07001214
1215 rl = args[0];
1216 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001217 tcg_opt_gen_movi(s, op, args, rl, (int32_t)r);
1218 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(r >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001219
1220 /* We've done all we need to do with the movi. Skip it. */
1221 oi_next = op2->next;
Richard Henderson14149682012-10-02 11:32:30 -07001222 break;
1223 }
1224 goto do_default;
1225
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001226 case INDEX_op_brcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001227 tmp = do_constant_folding_cond2(&args[0], &args[2], args[4]);
1228 if (tmp != 2) {
1229 if (tmp) {
Richard Hendersona7635512014-04-23 22:18:30 -07001230 do_brcond_true:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001231 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001232 op->opc = INDEX_op_br;
1233 args[0] = args[5];
Richard Henderson6c4382f2012-10-02 11:32:27 -07001234 } else {
Richard Hendersona7635512014-04-23 22:18:30 -07001235 do_brcond_false:
Richard Henderson0c627cd2014-03-30 16:51:54 -07001236 tcg_op_remove(s, op);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001237 }
1238 } else if ((args[4] == TCG_COND_LT || args[4] == TCG_COND_GE)
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001239 && temp_is_const(args[2]) && temps[args[2]].val == 0
1240 && temp_is_const(args[3]) && temps[args[3]].val == 0) {
Richard Henderson6c4382f2012-10-02 11:32:27 -07001241 /* Simplify LT/GE comparisons vs zero to a single compare
1242 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001243 do_brcond_high:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001244 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001245 op->opc = INDEX_op_brcond_i32;
1246 args[0] = args[1];
1247 args[1] = args[3];
1248 args[2] = args[4];
1249 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001250 } else if (args[4] == TCG_COND_EQ) {
1251 /* Simplify EQ comparisons where one of the pairs
1252 can be simplified. */
1253 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1254 args[0], args[2], TCG_COND_EQ);
1255 if (tmp == 0) {
1256 goto do_brcond_false;
1257 } else if (tmp == 1) {
1258 goto do_brcond_high;
1259 }
1260 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1261 args[1], args[3], TCG_COND_EQ);
1262 if (tmp == 0) {
1263 goto do_brcond_false;
1264 } else if (tmp != 1) {
1265 goto do_default;
1266 }
1267 do_brcond_low:
1268 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001269 op->opc = INDEX_op_brcond_i32;
1270 args[1] = args[2];
1271 args[2] = args[4];
1272 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001273 } else if (args[4] == TCG_COND_NE) {
1274 /* Simplify NE comparisons where one of the pairs
1275 can be simplified. */
1276 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1277 args[0], args[2], TCG_COND_NE);
1278 if (tmp == 0) {
1279 goto do_brcond_high;
1280 } else if (tmp == 1) {
1281 goto do_brcond_true;
1282 }
1283 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1284 args[1], args[3], TCG_COND_NE);
1285 if (tmp == 0) {
1286 goto do_brcond_low;
1287 } else if (tmp == 1) {
1288 goto do_brcond_true;
1289 }
1290 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001291 } else {
1292 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001293 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001294 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001295
1296 case INDEX_op_setcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001297 tmp = do_constant_folding_cond2(&args[1], &args[3], args[5]);
1298 if (tmp != 2) {
Richard Hendersona7635512014-04-23 22:18:30 -07001299 do_setcond_const:
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001300 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001301 } else if ((args[5] == TCG_COND_LT || args[5] == TCG_COND_GE)
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001302 && temp_is_const(args[3]) && temps[args[3]].val == 0
1303 && temp_is_const(args[4]) && temps[args[4]].val == 0) {
Richard Henderson6c4382f2012-10-02 11:32:27 -07001304 /* Simplify LT/GE comparisons vs zero to a single compare
1305 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001306 do_setcond_high:
Aurelien Jarno66e61b52013-05-08 22:36:39 +02001307 reset_temp(args[0]);
Richard Hendersona7635512014-04-23 22:18:30 -07001308 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001309 op->opc = INDEX_op_setcond_i32;
1310 args[1] = args[2];
1311 args[2] = args[4];
1312 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001313 } else if (args[5] == TCG_COND_EQ) {
1314 /* Simplify EQ comparisons where one of the pairs
1315 can be simplified. */
1316 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1317 args[1], args[3], TCG_COND_EQ);
1318 if (tmp == 0) {
1319 goto do_setcond_const;
1320 } else if (tmp == 1) {
1321 goto do_setcond_high;
1322 }
1323 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1324 args[2], args[4], TCG_COND_EQ);
1325 if (tmp == 0) {
1326 goto do_setcond_high;
1327 } else if (tmp != 1) {
1328 goto do_default;
1329 }
1330 do_setcond_low:
1331 reset_temp(args[0]);
1332 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001333 op->opc = INDEX_op_setcond_i32;
1334 args[2] = args[3];
1335 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001336 } else if (args[5] == TCG_COND_NE) {
1337 /* Simplify NE comparisons where one of the pairs
1338 can be simplified. */
1339 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1340 args[1], args[3], TCG_COND_NE);
1341 if (tmp == 0) {
1342 goto do_setcond_high;
1343 } else if (tmp == 1) {
1344 goto do_setcond_const;
1345 }
1346 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1347 args[2], args[4], TCG_COND_NE);
1348 if (tmp == 0) {
1349 goto do_setcond_low;
1350 } else if (tmp == 1) {
1351 goto do_setcond_const;
1352 }
1353 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001354 } else {
1355 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001356 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001357 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001358
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001359 case INDEX_op_call:
Richard Hendersoncf066672014-03-22 20:06:52 -07001360 if (!(args[nb_oargs + nb_iargs + 1]
1361 & (TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_WRITE_GLOBALS))) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001362 for (i = 0; i < nb_globals; i++) {
Aurelien Jarno1208d7d2015-07-27 12:41:44 +02001363 if (test_bit(i, temps_used.l)) {
1364 reset_temp(i);
1365 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001366 }
1367 }
Richard Hendersoncf066672014-03-22 20:06:52 -07001368 goto do_reset_output;
Richard Henderson6e14e912012-10-02 11:32:24 -07001369
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001370 default:
Richard Henderson6e14e912012-10-02 11:32:24 -07001371 do_default:
1372 /* Default case: we know nothing about operation (or were unable
1373 to compute the operation result) so no propagation is done.
1374 We trash everything if the operation is the end of a basic
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -08001375 block, otherwise we only trash the output args. "mask" is
1376 the non-zero bits mask for the first output arg. */
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001377 if (def->flags & TCG_OPF_BB_END) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001378 reset_all_temps(nb_temps);
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001379 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -07001380 do_reset_output:
1381 for (i = 0; i < nb_oargs; i++) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +02001382 reset_temp(args[i]);
Aurelien Jarno30312442013-09-03 08:27:38 +02001383 /* Save the corresponding known-zero bits mask for the
1384 first output argument (only one supported so far). */
1385 if (i == 0) {
1386 temps[args[i]].mask = mask;
1387 }
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001388 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001389 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001390 break;
1391 }
Pranith Kumar34f93922016-08-23 09:48:25 -04001392
1393 /* Eliminate duplicate and redundant fence instructions. */
1394 if (prev_mb_args) {
1395 switch (opc) {
1396 case INDEX_op_mb:
1397 /* Merge two barriers of the same type into one,
1398 * or a weaker barrier into a stronger one,
1399 * or two weaker barriers into a stronger one.
1400 * mb X; mb Y => mb X|Y
1401 * mb; strl => mb; st
1402 * ldaq; mb => ld; mb
1403 * ldaq; strl => ld; mb; st
1404 * Other combinations are also merged into a strong
1405 * barrier. This is stricter than specified but for
1406 * the purposes of TCG is better than not optimizing.
1407 */
1408 prev_mb_args[0] |= args[0];
1409 tcg_op_remove(s, op);
1410 break;
1411
1412 default:
1413 /* Opcodes that end the block stop the optimization. */
1414 if ((def->flags & TCG_OPF_BB_END) == 0) {
1415 break;
1416 }
1417 /* fallthru */
1418 case INDEX_op_qemu_ld_i32:
1419 case INDEX_op_qemu_ld_i64:
1420 case INDEX_op_qemu_st_i32:
1421 case INDEX_op_qemu_st_i64:
1422 case INDEX_op_call:
1423 /* Opcodes that touch guest memory stop the optimization. */
1424 prev_mb_args = NULL;
1425 break;
1426 }
1427 } else if (opc == INDEX_op_mb) {
1428 prev_mb_args = args;
1429 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001430 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001431}