blob: 9998ac74138248edbdbc2d5f255a55da6767a7fb [file] [log] [blame]
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001/*
2 * Optimizations for Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2010 Samsung Electronics.
5 * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
24 */
25
Peter Maydell757e7252016-01-26 18:17:08 +000026#include "qemu/osdep.h"
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040027#include "qemu-common.h"
Paolo Bonzini00f6da62016-03-15 13:16:36 +010028#include "exec/cpu-common.h"
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040029#include "tcg-op.h"
30
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040031#define CASE_OP_32_64(x) \
32 glue(glue(case INDEX_op_, x), _i32): \
33 glue(glue(case INDEX_op_, x), _i64)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040034
Kirill Batuzov22613af2011-07-07 16:37:13 +040035struct tcg_temp_info {
Aurelien Jarnob41059d2015-07-27 12:41:44 +020036 bool is_const;
Kirill Batuzov22613af2011-07-07 16:37:13 +040037 uint16_t prev_copy;
38 uint16_t next_copy;
39 tcg_target_ulong val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080040 tcg_target_ulong mask;
Kirill Batuzov22613af2011-07-07 16:37:13 +040041};
42
43static struct tcg_temp_info temps[TCG_MAX_TEMPS];
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020044static TCGTempSet temps_used;
Kirill Batuzov22613af2011-07-07 16:37:13 +040045
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020046static inline bool temp_is_const(TCGArg arg)
47{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020048 return temps[arg].is_const;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020049}
50
51static inline bool temp_is_copy(TCGArg arg)
52{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020053 return temps[arg].next_copy != arg;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +020054}
55
Aurelien Jarnob41059d2015-07-27 12:41:44 +020056/* Reset TEMP's state, possibly removing the temp for the list of copies. */
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +020057static void reset_temp(TCGArg temp)
Kirill Batuzov22613af2011-07-07 16:37:13 +040058{
Aurelien Jarnob41059d2015-07-27 12:41:44 +020059 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
60 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
61 temps[temp].next_copy = temp;
62 temps[temp].prev_copy = temp;
63 temps[temp].is_const = false;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -080064 temps[temp].mask = -1;
Kirill Batuzov22613af2011-07-07 16:37:13 +040065}
66
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020067/* Reset all temporaries, given that there are NB_TEMPS of them. */
68static void reset_all_temps(int nb_temps)
69{
70 bitmap_zero(temps_used.l, nb_temps);
71}
72
73/* Initialize and activate a temporary. */
74static void init_temp_info(TCGArg temp)
75{
76 if (!test_bit(temp, temps_used.l)) {
Aurelien Jarnob41059d2015-07-27 12:41:44 +020077 temps[temp].next_copy = temp;
78 temps[temp].prev_copy = temp;
79 temps[temp].is_const = false;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +020080 temps[temp].mask = -1;
81 set_bit(temp, temps_used.l);
82 }
83}
84
Blue Swirlfe0de7a2011-07-30 19:18:32 +000085static int op_bits(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +040086{
Richard Henderson8399ad52011-08-17 14:11:45 -070087 const TCGOpDef *def = &tcg_op_defs[op];
88 return def->flags & TCG_OPF_64BIT ? 64 : 32;
Kirill Batuzov22613af2011-07-07 16:37:13 +040089}
90
Richard Hendersona62f6f52014-05-22 10:59:12 -070091static TCGOpcode op_to_mov(TCGOpcode op)
92{
93 switch (op_bits(op)) {
94 case 32:
95 return INDEX_op_mov_i32;
96 case 64:
97 return INDEX_op_mov_i64;
98 default:
99 fprintf(stderr, "op_to_mov: unexpected return value of "
100 "function op_bits.\n");
101 tcg_abort();
102 }
103}
104
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000105static TCGOpcode op_to_movi(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400106{
107 switch (op_bits(op)) {
108 case 32:
109 return INDEX_op_movi_i32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400110 case 64:
111 return INDEX_op_movi_i64;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400112 default:
113 fprintf(stderr, "op_to_movi: unexpected return value of "
114 "function op_bits.\n");
115 tcg_abort();
116 }
117}
118
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200119static TCGArg find_better_copy(TCGContext *s, TCGArg temp)
120{
121 TCGArg i;
122
123 /* If this is already a global, we can't do better. */
124 if (temp < s->nb_globals) {
125 return temp;
126 }
127
128 /* Search for a global first. */
129 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
130 if (i < s->nb_globals) {
131 return i;
132 }
133 }
134
135 /* If it is a temp, search for a temp local. */
136 if (!s->temps[temp].temp_local) {
137 for (i = temps[temp].next_copy ; i != temp ; i = temps[i].next_copy) {
138 if (s->temps[i].temp_local) {
139 return i;
140 }
141 }
142 }
143
144 /* Failure to find a better representation, return the same temp. */
145 return temp;
146}
147
148static bool temps_are_copies(TCGArg arg1, TCGArg arg2)
149{
150 TCGArg i;
151
152 if (arg1 == arg2) {
153 return true;
154 }
155
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200156 if (!temp_is_copy(arg1) || !temp_is_copy(arg2)) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200157 return false;
158 }
159
160 for (i = temps[arg1].next_copy ; i != arg1 ; i = temps[i].next_copy) {
161 if (i == arg2) {
162 return true;
163 }
164 }
165
166 return false;
167}
168
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200169static void tcg_opt_gen_movi(TCGContext *s, TCGOp *op, TCGArg *args,
170 TCGArg dst, TCGArg val)
171{
172 TCGOpcode new_op = op_to_movi(op->opc);
173 tcg_target_ulong mask;
174
175 op->opc = new_op;
176
177 reset_temp(dst);
Aurelien Jarnob41059d2015-07-27 12:41:44 +0200178 temps[dst].is_const = true;
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200179 temps[dst].val = val;
180 mask = val;
Aurelien Jarno96152122015-07-10 18:03:30 +0200181 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_movi_i32) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200182 /* High bits of the destination are now garbage. */
183 mask |= ~0xffffffffull;
184 }
185 temps[dst].mask = mask;
186
187 args[0] = dst;
188 args[1] = val;
189}
190
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700191static void tcg_opt_gen_mov(TCGContext *s, TCGOp *op, TCGArg *args,
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200192 TCGArg dst, TCGArg src)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400193{
Aurelien Jarno53657182015-06-04 21:53:25 +0200194 if (temps_are_copies(dst, src)) {
195 tcg_op_remove(s, op);
196 return;
197 }
198
Aurelien Jarno8d6a9162015-06-04 21:53:24 +0200199 TCGOpcode new_op = op_to_mov(op->opc);
Richard Henderson24666ba2014-05-22 11:14:10 -0700200 tcg_target_ulong mask;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700201
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700202 op->opc = new_op;
Richard Hendersona62f6f52014-05-22 10:59:12 -0700203
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800204 reset_temp(dst);
Richard Henderson24666ba2014-05-22 11:14:10 -0700205 mask = temps[src].mask;
206 if (TCG_TARGET_REG_BITS > 32 && new_op == INDEX_op_mov_i32) {
207 /* High bits of the destination are now garbage. */
208 mask |= ~0xffffffffull;
209 }
210 temps[dst].mask = mask;
211
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800212 if (s->temps[src].type == s->temps[dst].type) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800213 temps[dst].next_copy = temps[src].next_copy;
214 temps[dst].prev_copy = src;
215 temps[temps[dst].next_copy].prev_copy = dst;
216 temps[src].next_copy = dst;
Aurelien Jarno299f8012015-07-27 12:41:44 +0200217 temps[dst].is_const = temps[src].is_const;
218 temps[dst].val = temps[src].val;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800219 }
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200220
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700221 args[0] = dst;
222 args[1] = src;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400223}
224
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000225static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400226{
Richard Henderson03271522013-08-14 14:35:56 -0700227 uint64_t l64, h64;
228
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400229 switch (op) {
230 CASE_OP_32_64(add):
231 return x + y;
232
233 CASE_OP_32_64(sub):
234 return x - y;
235
236 CASE_OP_32_64(mul):
237 return x * y;
238
Kirill Batuzov9a810902011-07-07 16:37:15 +0400239 CASE_OP_32_64(and):
240 return x & y;
241
242 CASE_OP_32_64(or):
243 return x | y;
244
245 CASE_OP_32_64(xor):
246 return x ^ y;
247
Kirill Batuzov55c09752011-07-07 16:37:16 +0400248 case INDEX_op_shl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700249 return (uint32_t)x << (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400250
Kirill Batuzov55c09752011-07-07 16:37:16 +0400251 case INDEX_op_shl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700252 return (uint64_t)x << (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400253
254 case INDEX_op_shr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700255 return (uint32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400256
Kirill Batuzov55c09752011-07-07 16:37:16 +0400257 case INDEX_op_shr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700258 return (uint64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400259
260 case INDEX_op_sar_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700261 return (int32_t)x >> (y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400262
Kirill Batuzov55c09752011-07-07 16:37:16 +0400263 case INDEX_op_sar_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700264 return (int64_t)x >> (y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400265
266 case INDEX_op_rotr_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700267 return ror32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400268
Kirill Batuzov55c09752011-07-07 16:37:16 +0400269 case INDEX_op_rotr_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700270 return ror64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400271
272 case INDEX_op_rotl_i32:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700273 return rol32(x, y & 31);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400274
Kirill Batuzov55c09752011-07-07 16:37:16 +0400275 case INDEX_op_rotl_i64:
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700276 return rol64(x, y & 63);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400277
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700278 CASE_OP_32_64(not):
Kirill Batuzova640f032011-07-07 16:37:17 +0400279 return ~x;
280
Richard Hendersoncb25c802011-08-17 14:11:47 -0700281 CASE_OP_32_64(neg):
282 return -x;
283
284 CASE_OP_32_64(andc):
285 return x & ~y;
286
287 CASE_OP_32_64(orc):
288 return x | ~y;
289
290 CASE_OP_32_64(eqv):
291 return ~(x ^ y);
292
293 CASE_OP_32_64(nand):
294 return ~(x & y);
295
296 CASE_OP_32_64(nor):
297 return ~(x | y);
298
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700299 CASE_OP_32_64(ext8s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400300 return (int8_t)x;
301
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700302 CASE_OP_32_64(ext16s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400303 return (int16_t)x;
304
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700305 CASE_OP_32_64(ext8u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400306 return (uint8_t)x;
307
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700308 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400309 return (uint16_t)x;
310
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200311 case INDEX_op_ext_i32_i64:
Kirill Batuzova640f032011-07-07 16:37:17 +0400312 case INDEX_op_ext32s_i64:
313 return (int32_t)x;
314
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200315 case INDEX_op_extu_i32_i64:
Richard Henderson609ad702015-07-24 07:16:00 -0700316 case INDEX_op_extrl_i64_i32:
Kirill Batuzova640f032011-07-07 16:37:17 +0400317 case INDEX_op_ext32u_i64:
318 return (uint32_t)x;
Kirill Batuzova640f032011-07-07 16:37:17 +0400319
Richard Henderson609ad702015-07-24 07:16:00 -0700320 case INDEX_op_extrh_i64_i32:
321 return (uint64_t)x >> 32;
322
Richard Henderson03271522013-08-14 14:35:56 -0700323 case INDEX_op_muluh_i32:
324 return ((uint64_t)(uint32_t)x * (uint32_t)y) >> 32;
325 case INDEX_op_mulsh_i32:
326 return ((int64_t)(int32_t)x * (int32_t)y) >> 32;
327
328 case INDEX_op_muluh_i64:
329 mulu64(&l64, &h64, x, y);
330 return h64;
331 case INDEX_op_mulsh_i64:
332 muls64(&l64, &h64, x, y);
333 return h64;
334
Richard Henderson01547f72013-08-14 15:22:46 -0700335 case INDEX_op_div_i32:
336 /* Avoid crashing on divide by zero, otherwise undefined. */
337 return (int32_t)x / ((int32_t)y ? : 1);
338 case INDEX_op_divu_i32:
339 return (uint32_t)x / ((uint32_t)y ? : 1);
340 case INDEX_op_div_i64:
341 return (int64_t)x / ((int64_t)y ? : 1);
342 case INDEX_op_divu_i64:
343 return (uint64_t)x / ((uint64_t)y ? : 1);
344
345 case INDEX_op_rem_i32:
346 return (int32_t)x % ((int32_t)y ? : 1);
347 case INDEX_op_remu_i32:
348 return (uint32_t)x % ((uint32_t)y ? : 1);
349 case INDEX_op_rem_i64:
350 return (int64_t)x % ((int64_t)y ? : 1);
351 case INDEX_op_remu_i64:
352 return (uint64_t)x % ((uint64_t)y ? : 1);
353
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400354 default:
355 fprintf(stderr,
356 "Unrecognized operation %d in do_constant_folding.\n", op);
357 tcg_abort();
358 }
359}
360
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000361static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400362{
363 TCGArg res = do_constant_folding_2(op, x, y);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400364 if (op_bits(op) == 32) {
Aurelien Jarno29f3ff82015-07-10 18:03:31 +0200365 res = (int32_t)res;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400366 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400367 return res;
368}
369
Richard Henderson9519da72012-10-02 11:32:26 -0700370static bool do_constant_folding_cond_32(uint32_t x, uint32_t y, TCGCond c)
371{
372 switch (c) {
373 case TCG_COND_EQ:
374 return x == y;
375 case TCG_COND_NE:
376 return x != y;
377 case TCG_COND_LT:
378 return (int32_t)x < (int32_t)y;
379 case TCG_COND_GE:
380 return (int32_t)x >= (int32_t)y;
381 case TCG_COND_LE:
382 return (int32_t)x <= (int32_t)y;
383 case TCG_COND_GT:
384 return (int32_t)x > (int32_t)y;
385 case TCG_COND_LTU:
386 return x < y;
387 case TCG_COND_GEU:
388 return x >= y;
389 case TCG_COND_LEU:
390 return x <= y;
391 case TCG_COND_GTU:
392 return x > y;
393 default:
394 tcg_abort();
395 }
396}
397
398static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c)
399{
400 switch (c) {
401 case TCG_COND_EQ:
402 return x == y;
403 case TCG_COND_NE:
404 return x != y;
405 case TCG_COND_LT:
406 return (int64_t)x < (int64_t)y;
407 case TCG_COND_GE:
408 return (int64_t)x >= (int64_t)y;
409 case TCG_COND_LE:
410 return (int64_t)x <= (int64_t)y;
411 case TCG_COND_GT:
412 return (int64_t)x > (int64_t)y;
413 case TCG_COND_LTU:
414 return x < y;
415 case TCG_COND_GEU:
416 return x >= y;
417 case TCG_COND_LEU:
418 return x <= y;
419 case TCG_COND_GTU:
420 return x > y;
421 default:
422 tcg_abort();
423 }
424}
425
426static bool do_constant_folding_cond_eq(TCGCond c)
427{
428 switch (c) {
429 case TCG_COND_GT:
430 case TCG_COND_LTU:
431 case TCG_COND_LT:
432 case TCG_COND_GTU:
433 case TCG_COND_NE:
434 return 0;
435 case TCG_COND_GE:
436 case TCG_COND_GEU:
437 case TCG_COND_LE:
438 case TCG_COND_LEU:
439 case TCG_COND_EQ:
440 return 1;
441 default:
442 tcg_abort();
443 }
444}
445
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200446/* Return 2 if the condition can't be simplified, and the result
447 of the condition (0 or 1) if it can */
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200448static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
449 TCGArg y, TCGCond c)
450{
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200451 if (temp_is_const(x) && temp_is_const(y)) {
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200452 switch (op_bits(op)) {
453 case 32:
Richard Henderson9519da72012-10-02 11:32:26 -0700454 return do_constant_folding_cond_32(temps[x].val, temps[y].val, c);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200455 case 64:
Richard Henderson9519da72012-10-02 11:32:26 -0700456 return do_constant_folding_cond_64(temps[x].val, temps[y].val, c);
457 default:
458 tcg_abort();
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200459 }
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200460 } else if (temps_are_copies(x, y)) {
Richard Henderson9519da72012-10-02 11:32:26 -0700461 return do_constant_folding_cond_eq(c);
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200462 } else if (temp_is_const(y) && temps[y].val == 0) {
Aurelien Jarnob336ceb2012-09-18 19:37:00 +0200463 switch (c) {
464 case TCG_COND_LTU:
465 return 0;
466 case TCG_COND_GEU:
467 return 1;
468 default:
469 return 2;
470 }
471 } else {
472 return 2;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200473 }
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200474}
475
Richard Henderson6c4382f2012-10-02 11:32:27 -0700476/* Return 2 if the condition can't be simplified, and the result
477 of the condition (0 or 1) if it can */
478static TCGArg do_constant_folding_cond2(TCGArg *p1, TCGArg *p2, TCGCond c)
479{
480 TCGArg al = p1[0], ah = p1[1];
481 TCGArg bl = p2[0], bh = p2[1];
482
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200483 if (temp_is_const(bl) && temp_is_const(bh)) {
Richard Henderson6c4382f2012-10-02 11:32:27 -0700484 uint64_t b = ((uint64_t)temps[bh].val << 32) | (uint32_t)temps[bl].val;
485
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200486 if (temp_is_const(al) && temp_is_const(ah)) {
Richard Henderson6c4382f2012-10-02 11:32:27 -0700487 uint64_t a;
488 a = ((uint64_t)temps[ah].val << 32) | (uint32_t)temps[al].val;
489 return do_constant_folding_cond_64(a, b, c);
490 }
491 if (b == 0) {
492 switch (c) {
493 case TCG_COND_LTU:
494 return 0;
495 case TCG_COND_GEU:
496 return 1;
497 default:
498 break;
499 }
500 }
501 }
502 if (temps_are_copies(al, bl) && temps_are_copies(ah, bh)) {
503 return do_constant_folding_cond_eq(c);
504 }
505 return 2;
506}
507
Richard Henderson24c9ae42012-10-02 11:32:21 -0700508static bool swap_commutative(TCGArg dest, TCGArg *p1, TCGArg *p2)
509{
510 TCGArg a1 = *p1, a2 = *p2;
511 int sum = 0;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200512 sum += temp_is_const(a1);
513 sum -= temp_is_const(a2);
Richard Henderson24c9ae42012-10-02 11:32:21 -0700514
515 /* Prefer the constant in second argument, and then the form
516 op a, a, b, which is better handled on non-RISC hosts. */
517 if (sum > 0 || (sum == 0 && dest == a2)) {
518 *p1 = a2;
519 *p2 = a1;
520 return true;
521 }
522 return false;
523}
524
Richard Henderson0bfcb862012-10-02 11:32:23 -0700525static bool swap_commutative2(TCGArg *p1, TCGArg *p2)
526{
527 int sum = 0;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200528 sum += temp_is_const(p1[0]);
529 sum += temp_is_const(p1[1]);
530 sum -= temp_is_const(p2[0]);
531 sum -= temp_is_const(p2[1]);
Richard Henderson0bfcb862012-10-02 11:32:23 -0700532 if (sum > 0) {
533 TCGArg t;
534 t = p1[0], p1[0] = p2[0], p2[0] = t;
535 t = p1[1], p1[1] = p2[1], p2[1] = t;
536 return true;
537 }
538 return false;
539}
540
Kirill Batuzov22613af2011-07-07 16:37:13 +0400541/* Propagate constants and copies, fold constant expressions. */
Aurelien Jarno36e60ef2015-06-04 21:53:27 +0200542void tcg_optimize(TCGContext *s)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400543{
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700544 int oi, oi_next, nb_temps, nb_globals;
Pranith Kumar34f93922016-08-23 09:48:25 -0400545 TCGArg *prev_mb_args = NULL;
Richard Henderson5d8f5362012-09-21 10:13:38 -0700546
Kirill Batuzov22613af2011-07-07 16:37:13 +0400547 /* Array VALS has an element for each temp.
548 If this temp holds a constant then its value is kept in VALS' element.
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +0200549 If this temp is a copy of other ones then the other copies are
550 available through the doubly linked circular list. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400551
552 nb_temps = s->nb_temps;
553 nb_globals = s->nb_globals;
Paolo Bonzinid193a142013-01-11 15:42:51 -0800554 reset_all_temps(nb_temps);
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400555
Richard Hendersondcb8e752016-06-22 19:42:31 -0700556 for (oi = s->gen_op_buf[0].next; oi != 0; oi = oi_next) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700557 tcg_target_ulong mask, partmask, affected;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700558 int nb_oargs, nb_iargs, i;
Richard Hendersoncf066672014-03-22 20:06:52 -0700559 TCGArg tmp;
560
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700561 TCGOp * const op = &s->gen_op_buf[oi];
562 TCGArg * const args = &s->gen_opparam_buf[op->args];
563 TCGOpcode opc = op->opc;
564 const TCGOpDef *def = &tcg_op_defs[opc];
565
566 oi_next = op->next;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200567
568 /* Count the arguments, and initialize the temps that are
569 going to be used */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700570 if (opc == INDEX_op_call) {
571 nb_oargs = op->callo;
572 nb_iargs = op->calli;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200573 for (i = 0; i < nb_oargs + nb_iargs; i++) {
574 tmp = args[i];
575 if (tmp != TCG_CALL_DUMMY_ARG) {
576 init_temp_info(tmp);
577 }
578 }
Aurelien Jarno1ff8c542012-09-11 16:18:49 +0200579 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -0700580 nb_oargs = def->nb_oargs;
581 nb_iargs = def->nb_iargs;
Aurelien Jarno1208d7d2015-07-27 12:41:44 +0200582 for (i = 0; i < nb_oargs + nb_iargs; i++) {
583 init_temp_info(args[i]);
584 }
Richard Hendersoncf066672014-03-22 20:06:52 -0700585 }
586
587 /* Do copy propagation */
588 for (i = nb_oargs; i < nb_oargs + nb_iargs; i++) {
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200589 if (temp_is_copy(args[i])) {
Richard Hendersoncf066672014-03-22 20:06:52 -0700590 args[i] = find_better_copy(s, args[i]);
Kirill Batuzov22613af2011-07-07 16:37:13 +0400591 }
592 }
593
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400594 /* For commutative operations make constant second argument */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700595 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400596 CASE_OP_32_64(add):
597 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400598 CASE_OP_32_64(and):
599 CASE_OP_32_64(or):
600 CASE_OP_32_64(xor):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700601 CASE_OP_32_64(eqv):
602 CASE_OP_32_64(nand):
603 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -0700604 CASE_OP_32_64(muluh):
605 CASE_OP_32_64(mulsh):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700606 swap_commutative(args[0], &args[1], &args[2]);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400607 break;
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200608 CASE_OP_32_64(brcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700609 if (swap_commutative(-1, &args[0], &args[1])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200610 args[2] = tcg_swap_cond(args[2]);
611 }
612 break;
613 CASE_OP_32_64(setcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700614 if (swap_commutative(args[0], &args[1], &args[2])) {
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200615 args[3] = tcg_swap_cond(args[3]);
616 }
617 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -0700618 CASE_OP_32_64(movcond):
Richard Henderson24c9ae42012-10-02 11:32:21 -0700619 if (swap_commutative(-1, &args[1], &args[2])) {
620 args[5] = tcg_swap_cond(args[5]);
Richard Hendersonfa01a202012-09-21 10:13:37 -0700621 }
Richard Henderson5d8f5362012-09-21 10:13:38 -0700622 /* For movcond, we canonicalize the "false" input reg to match
623 the destination reg so that the tcg backend can implement
624 a "move if true" operation. */
Richard Henderson24c9ae42012-10-02 11:32:21 -0700625 if (swap_commutative(args[0], &args[4], &args[3])) {
626 args[5] = tcg_invert_cond(args[5]);
Richard Henderson5d8f5362012-09-21 10:13:38 -0700627 }
Richard Henderson1e484e62012-10-02 11:32:22 -0700628 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800629 CASE_OP_32_64(add2):
Richard Henderson1e484e62012-10-02 11:32:22 -0700630 swap_commutative(args[0], &args[2], &args[4]);
631 swap_commutative(args[1], &args[3], &args[5]);
632 break;
Richard Hendersond7156f72013-02-19 23:51:52 -0800633 CASE_OP_32_64(mulu2):
Richard Henderson4d3203f2013-02-19 23:51:53 -0800634 CASE_OP_32_64(muls2):
Richard Henderson14149682012-10-02 11:32:30 -0700635 swap_commutative(args[0], &args[2], &args[3]);
636 break;
Richard Henderson0bfcb862012-10-02 11:32:23 -0700637 case INDEX_op_brcond2_i32:
638 if (swap_commutative2(&args[0], &args[2])) {
639 args[4] = tcg_swap_cond(args[4]);
640 }
641 break;
642 case INDEX_op_setcond2_i32:
643 if (swap_commutative2(&args[1], &args[3])) {
644 args[5] = tcg_swap_cond(args[5]);
645 }
646 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400647 default:
648 break;
649 }
650
Richard Henderson2d497542013-03-21 09:13:33 -0700651 /* Simplify expressions for "shift/rot r, 0, a => movi r, 0",
652 and "sub r, 0, a => neg r, a" case. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700653 switch (opc) {
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200654 CASE_OP_32_64(shl):
655 CASE_OP_32_64(shr):
656 CASE_OP_32_64(sar):
657 CASE_OP_32_64(rotl):
658 CASE_OP_32_64(rotr):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200659 if (temp_is_const(args[1]) && temps[args[1]].val == 0) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200660 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200661 continue;
662 }
663 break;
Richard Henderson2d497542013-03-21 09:13:33 -0700664 CASE_OP_32_64(sub):
665 {
666 TCGOpcode neg_op;
667 bool have_neg;
668
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200669 if (temp_is_const(args[2])) {
Richard Henderson2d497542013-03-21 09:13:33 -0700670 /* Proceed with possible constant folding. */
671 break;
672 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700673 if (opc == INDEX_op_sub_i32) {
Richard Henderson2d497542013-03-21 09:13:33 -0700674 neg_op = INDEX_op_neg_i32;
675 have_neg = TCG_TARGET_HAS_neg_i32;
676 } else {
677 neg_op = INDEX_op_neg_i64;
678 have_neg = TCG_TARGET_HAS_neg_i64;
679 }
680 if (!have_neg) {
681 break;
682 }
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200683 if (temp_is_const(args[1]) && temps[args[1]].val == 0) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700684 op->opc = neg_op;
Richard Henderson2d497542013-03-21 09:13:33 -0700685 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700686 args[1] = args[2];
Richard Henderson2d497542013-03-21 09:13:33 -0700687 continue;
688 }
689 }
690 break;
Richard Hendersone201b562014-01-28 13:15:38 -0800691 CASE_OP_32_64(xor):
692 CASE_OP_32_64(nand):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200693 if (!temp_is_const(args[1])
694 && temp_is_const(args[2]) && temps[args[2]].val == -1) {
Richard Hendersone201b562014-01-28 13:15:38 -0800695 i = 1;
696 goto try_not;
697 }
698 break;
699 CASE_OP_32_64(nor):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200700 if (!temp_is_const(args[1])
701 && temp_is_const(args[2]) && temps[args[2]].val == 0) {
Richard Hendersone201b562014-01-28 13:15:38 -0800702 i = 1;
703 goto try_not;
704 }
705 break;
706 CASE_OP_32_64(andc):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200707 if (!temp_is_const(args[2])
708 && temp_is_const(args[1]) && temps[args[1]].val == -1) {
Richard Hendersone201b562014-01-28 13:15:38 -0800709 i = 2;
710 goto try_not;
711 }
712 break;
713 CASE_OP_32_64(orc):
714 CASE_OP_32_64(eqv):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200715 if (!temp_is_const(args[2])
716 && temp_is_const(args[1]) && temps[args[1]].val == 0) {
Richard Hendersone201b562014-01-28 13:15:38 -0800717 i = 2;
718 goto try_not;
719 }
720 break;
721 try_not:
722 {
723 TCGOpcode not_op;
724 bool have_not;
725
726 if (def->flags & TCG_OPF_64BIT) {
727 not_op = INDEX_op_not_i64;
728 have_not = TCG_TARGET_HAS_not_i64;
729 } else {
730 not_op = INDEX_op_not_i32;
731 have_not = TCG_TARGET_HAS_not_i32;
732 }
733 if (!have_not) {
734 break;
735 }
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700736 op->opc = not_op;
Richard Hendersone201b562014-01-28 13:15:38 -0800737 reset_temp(args[0]);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700738 args[1] = args[i];
Richard Hendersone201b562014-01-28 13:15:38 -0800739 continue;
740 }
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200741 default:
742 break;
743 }
744
Richard Henderson464a1442014-01-31 07:42:11 -0600745 /* Simplify expression for "op r, a, const => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700746 switch (opc) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400747 CASE_OP_32_64(add):
748 CASE_OP_32_64(sub):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400749 CASE_OP_32_64(shl):
750 CASE_OP_32_64(shr):
751 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700752 CASE_OP_32_64(rotl):
753 CASE_OP_32_64(rotr):
Aurelien Jarno38ee1882012-09-06 16:47:14 +0200754 CASE_OP_32_64(or):
755 CASE_OP_32_64(xor):
Richard Henderson464a1442014-01-31 07:42:11 -0600756 CASE_OP_32_64(andc):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200757 if (!temp_is_const(args[1])
758 && temp_is_const(args[2]) && temps[args[2]].val == 0) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200759 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
760 continue;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400761 }
762 break;
Richard Henderson464a1442014-01-31 07:42:11 -0600763 CASE_OP_32_64(and):
764 CASE_OP_32_64(orc):
765 CASE_OP_32_64(eqv):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200766 if (!temp_is_const(args[1])
767 && temp_is_const(args[2]) && temps[args[2]].val == -1) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200768 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
769 continue;
Richard Henderson464a1442014-01-31 07:42:11 -0600770 }
771 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200772 default:
773 break;
774 }
775
Aurelien Jarno30312442013-09-03 08:27:38 +0200776 /* Simplify using known-zero bits. Currently only ops with a single
777 output argument is supported. */
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800778 mask = -1;
Paolo Bonzini633f6502013-01-11 15:42:53 -0800779 affected = -1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700780 switch (opc) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800781 CASE_OP_32_64(ext8s):
782 if ((temps[args[1]].mask & 0x80) != 0) {
783 break;
784 }
785 CASE_OP_32_64(ext8u):
786 mask = 0xff;
787 goto and_const;
788 CASE_OP_32_64(ext16s):
789 if ((temps[args[1]].mask & 0x8000) != 0) {
790 break;
791 }
792 CASE_OP_32_64(ext16u):
793 mask = 0xffff;
794 goto and_const;
795 case INDEX_op_ext32s_i64:
796 if ((temps[args[1]].mask & 0x80000000) != 0) {
797 break;
798 }
799 case INDEX_op_ext32u_i64:
800 mask = 0xffffffffU;
801 goto and_const;
802
803 CASE_OP_32_64(and):
804 mask = temps[args[2]].mask;
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200805 if (temp_is_const(args[2])) {
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800806 and_const:
Paolo Bonzini633f6502013-01-11 15:42:53 -0800807 affected = temps[args[1]].mask & ~mask;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800808 }
809 mask = temps[args[1]].mask & mask;
810 break;
811
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +0200812 case INDEX_op_ext_i32_i64:
813 if ((temps[args[1]].mask & 0x80000000) != 0) {
814 break;
815 }
816 case INDEX_op_extu_i32_i64:
817 /* We do not compute affected as it is a size changing op. */
818 mask = (uint32_t)temps[args[1]].mask;
819 break;
820
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800821 CASE_OP_32_64(andc):
822 /* Known-zeros does not imply known-ones. Therefore unless
823 args[2] is constant, we can't infer anything from it. */
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200824 if (temp_is_const(args[2])) {
Richard Henderson23ec69ed2014-01-28 12:03:24 -0800825 mask = ~temps[args[2]].mask;
826 goto and_const;
827 }
828 /* But we certainly know nothing outside args[1] may be set. */
829 mask = temps[args[1]].mask;
830 break;
831
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200832 case INDEX_op_sar_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200833 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700834 tmp = temps[args[2]].val & 31;
835 mask = (int32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200836 }
837 break;
838 case INDEX_op_sar_i64:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200839 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700840 tmp = temps[args[2]].val & 63;
841 mask = (int64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800842 }
843 break;
844
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200845 case INDEX_op_shr_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200846 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700847 tmp = temps[args[2]].val & 31;
848 mask = (uint32_t)temps[args[1]].mask >> tmp;
Aurelien Jarnoe46b2252013-09-03 08:27:38 +0200849 }
850 break;
851 case INDEX_op_shr_i64:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200852 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700853 tmp = temps[args[2]].val & 63;
854 mask = (uint64_t)temps[args[1]].mask >> tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800855 }
856 break;
857
Richard Henderson609ad702015-07-24 07:16:00 -0700858 case INDEX_op_extrl_i64_i32:
859 mask = (uint32_t)temps[args[1]].mask;
860 break;
861 case INDEX_op_extrh_i64_i32:
862 mask = (uint64_t)temps[args[1]].mask >> 32;
Richard Henderson4bb7a412013-09-09 17:03:24 -0700863 break;
864
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800865 CASE_OP_32_64(shl):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200866 if (temp_is_const(args[2])) {
Richard Henderson50c5c4d2014-03-18 07:45:39 -0700867 tmp = temps[args[2]].val & (TCG_TARGET_REG_BITS - 1);
868 mask = temps[args[1]].mask << tmp;
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800869 }
870 break;
871
872 CASE_OP_32_64(neg):
873 /* Set to 1 all bits to the left of the rightmost. */
874 mask = -(temps[args[1]].mask & -temps[args[1]].mask);
875 break;
876
877 CASE_OP_32_64(deposit):
Richard Hendersond998e552014-03-18 14:23:52 -0700878 mask = deposit64(temps[args[1]].mask, args[3], args[4],
879 temps[args[2]].mask);
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800880 break;
881
882 CASE_OP_32_64(or):
883 CASE_OP_32_64(xor):
884 mask = temps[args[1]].mask | temps[args[2]].mask;
885 break;
886
887 CASE_OP_32_64(setcond):
Richard Hendersona7635512014-04-23 22:18:30 -0700888 case INDEX_op_setcond2_i32:
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800889 mask = 1;
890 break;
891
892 CASE_OP_32_64(movcond):
893 mask = temps[args[3]].mask | temps[args[4]].mask;
894 break;
895
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200896 CASE_OP_32_64(ld8u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200897 mask = 0xff;
898 break;
899 CASE_OP_32_64(ld16u):
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200900 mask = 0xffff;
901 break;
902 case INDEX_op_ld32u_i64:
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200903 mask = 0xffffffffu;
904 break;
905
906 CASE_OP_32_64(qemu_ld):
907 {
Richard Henderson59227d52015-05-12 11:51:44 -0700908 TCGMemOpIdx oi = args[nb_oargs + nb_iargs];
909 TCGMemOp mop = get_memop(oi);
Aurelien Jarnoc8d70272013-09-03 08:27:39 +0200910 if (!(mop & MO_SIGN)) {
911 mask = (2ULL << ((8 << (mop & MO_SIZE)) - 1)) - 1;
912 }
913 }
914 break;
915
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -0800916 default:
917 break;
918 }
919
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700920 /* 32-bit ops generate 32-bit results. For the result is zero test
921 below, we can ignore high bits, but for further optimizations we
922 need to record that the high bits contain garbage. */
Richard Henderson24666ba2014-05-22 11:14:10 -0700923 partmask = mask;
Richard Hendersonbc8d6882014-06-08 18:24:14 -0700924 if (!(def->flags & TCG_OPF_64BIT)) {
Richard Henderson24666ba2014-05-22 11:14:10 -0700925 mask |= ~(tcg_target_ulong)0xffffffffu;
926 partmask &= 0xffffffffu;
927 affected &= 0xffffffffu;
Aurelien Jarnof096dc92013-09-03 08:27:38 +0200928 }
929
Richard Henderson24666ba2014-05-22 11:14:10 -0700930 if (partmask == 0) {
Aurelien Jarnoeabb7b92016-04-21 10:48:49 +0200931 tcg_debug_assert(nb_oargs == 1);
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200932 tcg_opt_gen_movi(s, op, args, args[0], 0);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800933 continue;
934 }
935 if (affected == 0) {
Aurelien Jarnoeabb7b92016-04-21 10:48:49 +0200936 tcg_debug_assert(nb_oargs == 1);
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200937 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Paolo Bonzini633f6502013-01-11 15:42:53 -0800938 continue;
939 }
940
Aurelien Jarno56e49432012-09-06 16:47:13 +0200941 /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700942 switch (opc) {
Aurelien Jarno61251c02012-09-06 16:47:14 +0200943 CASE_OP_32_64(and):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400944 CASE_OP_32_64(mul):
Richard Henderson03271522013-08-14 14:35:56 -0700945 CASE_OP_32_64(muluh):
946 CASE_OP_32_64(mulsh):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +0200947 if ((temp_is_const(args[2]) && temps[args[2]].val == 0)) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200948 tcg_opt_gen_movi(s, op, args, args[0], 0);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400949 continue;
950 }
951 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200952 default:
953 break;
954 }
955
956 /* Simplify expression for "op r, a, a => mov r, a" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700957 switch (opc) {
Kirill Batuzov9a810902011-07-07 16:37:15 +0400958 CASE_OP_32_64(or):
959 CASE_OP_32_64(and):
Aurelien Jarno0aba1c72012-09-18 19:11:32 +0200960 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200961 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
Kirill Batuzov9a810902011-07-07 16:37:15 +0400962 continue;
963 }
964 break;
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000965 default:
966 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400967 }
968
Aurelien Jarno3c941932012-09-18 19:12:36 +0200969 /* Simplify expression for "op r, a, a => movi r, 0" cases */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700970 switch (opc) {
Richard Hendersone64e9582014-01-28 13:26:17 -0800971 CASE_OP_32_64(andc):
Aurelien Jarno3c941932012-09-18 19:12:36 +0200972 CASE_OP_32_64(sub):
973 CASE_OP_32_64(xor):
974 if (temps_are_copies(args[1], args[2])) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200975 tcg_opt_gen_movi(s, op, args, args[0], 0);
Aurelien Jarno3c941932012-09-18 19:12:36 +0200976 continue;
977 }
978 break;
979 default:
980 break;
981 }
982
Kirill Batuzov22613af2011-07-07 16:37:13 +0400983 /* Propagate constants through copy operations and do constant
984 folding. Constants will be substituted to arguments by register
985 allocator where needed and possible. Also detect copies. */
Richard Hendersonc45cb8b2014-09-19 13:49:15 -0700986 switch (opc) {
Kirill Batuzov22613af2011-07-07 16:37:13 +0400987 CASE_OP_32_64(mov):
Aurelien Jarno97a79eb2015-06-05 11:19:18 +0200988 tcg_opt_gen_mov(s, op, args, args[0], args[1]);
989 break;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400990 CASE_OP_32_64(movi):
Aurelien Jarnoebd27392015-06-04 21:53:23 +0200991 tcg_opt_gen_movi(s, op, args, args[0], args[1]);
Kirill Batuzov22613af2011-07-07 16:37:13 +0400992 break;
Richard Henderson6e14e912012-10-02 11:32:24 -0700993
Kirill Batuzova640f032011-07-07 16:37:17 +0400994 CASE_OP_32_64(not):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700995 CASE_OP_32_64(neg):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700996 CASE_OP_32_64(ext8s):
997 CASE_OP_32_64(ext8u):
998 CASE_OP_32_64(ext16s):
999 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +04001000 case INDEX_op_ext32s_i64:
1001 case INDEX_op_ext32u_i64:
Aurelien Jarno8bcb5c82015-07-27 12:41:45 +02001002 case INDEX_op_ext_i32_i64:
1003 case INDEX_op_extu_i32_i64:
Richard Henderson609ad702015-07-24 07:16:00 -07001004 case INDEX_op_extrl_i64_i32:
1005 case INDEX_op_extrh_i64_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001006 if (temp_is_const(args[1])) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001007 tmp = do_constant_folding(opc, temps[args[1]].val, 0);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001008 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001009 break;
Kirill Batuzova640f032011-07-07 16:37:17 +04001010 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001011 goto do_default;
1012
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001013 CASE_OP_32_64(add):
1014 CASE_OP_32_64(sub):
1015 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +04001016 CASE_OP_32_64(or):
1017 CASE_OP_32_64(and):
1018 CASE_OP_32_64(xor):
Kirill Batuzov55c09752011-07-07 16:37:16 +04001019 CASE_OP_32_64(shl):
1020 CASE_OP_32_64(shr):
1021 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -07001022 CASE_OP_32_64(rotl):
1023 CASE_OP_32_64(rotr):
Richard Hendersoncb25c802011-08-17 14:11:47 -07001024 CASE_OP_32_64(andc):
1025 CASE_OP_32_64(orc):
1026 CASE_OP_32_64(eqv):
1027 CASE_OP_32_64(nand):
1028 CASE_OP_32_64(nor):
Richard Henderson03271522013-08-14 14:35:56 -07001029 CASE_OP_32_64(muluh):
1030 CASE_OP_32_64(mulsh):
Richard Henderson01547f72013-08-14 15:22:46 -07001031 CASE_OP_32_64(div):
1032 CASE_OP_32_64(divu):
1033 CASE_OP_32_64(rem):
1034 CASE_OP_32_64(remu):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001035 if (temp_is_const(args[1]) && temp_is_const(args[2])) {
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001036 tmp = do_constant_folding(opc, temps[args[1]].val,
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001037 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001038 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001039 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +04001040 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001041 goto do_default;
1042
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001043 CASE_OP_32_64(deposit):
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001044 if (temp_is_const(args[1]) && temp_is_const(args[2])) {
Richard Hendersond998e552014-03-18 14:23:52 -07001045 tmp = deposit64(temps[args[1]].val, args[3], args[4],
1046 temps[args[2]].val);
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001047 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001048 break;
Aurelien Jarno7ef55fc2012-09-21 11:07:29 +02001049 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001050 goto do_default;
1051
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001052 CASE_OP_32_64(setcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001053 tmp = do_constant_folding_cond(opc, args[1], args[2], args[3]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001054 if (tmp != 2) {
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001055 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6e14e912012-10-02 11:32:24 -07001056 break;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +02001057 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001058 goto do_default;
1059
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001060 CASE_OP_32_64(brcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001061 tmp = do_constant_folding_cond(opc, args[0], args[1], args[2]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001062 if (tmp != 2) {
1063 if (tmp) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001064 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001065 op->opc = INDEX_op_br;
1066 args[0] = args[3];
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001067 } else {
Richard Henderson0c627cd2014-03-30 16:51:54 -07001068 tcg_op_remove(s, op);
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001069 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001070 break;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +02001071 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001072 goto do_default;
1073
Richard Hendersonfa01a202012-09-21 10:13:37 -07001074 CASE_OP_32_64(movcond):
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001075 tmp = do_constant_folding_cond(opc, args[1], args[2], args[5]);
Aurelien Jarnob336ceb2012-09-18 19:37:00 +02001076 if (tmp != 2) {
Aurelien Jarno97a79eb2015-06-05 11:19:18 +02001077 tcg_opt_gen_mov(s, op, args, args[0], args[4-tmp]);
Richard Henderson6e14e912012-10-02 11:32:24 -07001078 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -07001079 }
Richard Henderson6e14e912012-10-02 11:32:24 -07001080 goto do_default;
1081
Richard Henderson212c3282012-10-02 11:32:28 -07001082 case INDEX_op_add2_i32:
1083 case INDEX_op_sub2_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001084 if (temp_is_const(args[2]) && temp_is_const(args[3])
1085 && temp_is_const(args[4]) && temp_is_const(args[5])) {
Richard Henderson212c3282012-10-02 11:32:28 -07001086 uint32_t al = temps[args[2]].val;
1087 uint32_t ah = temps[args[3]].val;
1088 uint32_t bl = temps[args[4]].val;
1089 uint32_t bh = temps[args[5]].val;
1090 uint64_t a = ((uint64_t)ah << 32) | al;
1091 uint64_t b = ((uint64_t)bh << 32) | bl;
1092 TCGArg rl, rh;
Richard Henderson5a184072016-06-23 20:34:33 -07001093 TCGOp *op2 = tcg_op_insert_before(s, op, INDEX_op_movi_i32, 2);
Richard Hendersona4ce0992014-03-30 17:14:02 -07001094 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson212c3282012-10-02 11:32:28 -07001095
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001096 if (opc == INDEX_op_add2_i32) {
Richard Henderson212c3282012-10-02 11:32:28 -07001097 a += b;
1098 } else {
1099 a -= b;
1100 }
1101
Richard Henderson212c3282012-10-02 11:32:28 -07001102 rl = args[0];
1103 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001104 tcg_opt_gen_movi(s, op, args, rl, (int32_t)a);
1105 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(a >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001106
1107 /* We've done all we need to do with the movi. Skip it. */
1108 oi_next = op2->next;
Richard Henderson212c3282012-10-02 11:32:28 -07001109 break;
1110 }
1111 goto do_default;
1112
Richard Henderson14149682012-10-02 11:32:30 -07001113 case INDEX_op_mulu2_i32:
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001114 if (temp_is_const(args[2]) && temp_is_const(args[3])) {
Richard Henderson14149682012-10-02 11:32:30 -07001115 uint32_t a = temps[args[2]].val;
1116 uint32_t b = temps[args[3]].val;
1117 uint64_t r = (uint64_t)a * b;
1118 TCGArg rl, rh;
Richard Henderson5a184072016-06-23 20:34:33 -07001119 TCGOp *op2 = tcg_op_insert_before(s, op, INDEX_op_movi_i32, 2);
Richard Hendersona4ce0992014-03-30 17:14:02 -07001120 TCGArg *args2 = &s->gen_opparam_buf[op2->args];
Richard Henderson14149682012-10-02 11:32:30 -07001121
1122 rl = args[0];
1123 rh = args[1];
Aurelien Jarno29f3ff82015-07-10 18:03:31 +02001124 tcg_opt_gen_movi(s, op, args, rl, (int32_t)r);
1125 tcg_opt_gen_movi(s, op2, args2, rh, (int32_t)(r >> 32));
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001126
1127 /* We've done all we need to do with the movi. Skip it. */
1128 oi_next = op2->next;
Richard Henderson14149682012-10-02 11:32:30 -07001129 break;
1130 }
1131 goto do_default;
1132
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001133 case INDEX_op_brcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001134 tmp = do_constant_folding_cond2(&args[0], &args[2], args[4]);
1135 if (tmp != 2) {
1136 if (tmp) {
Richard Hendersona7635512014-04-23 22:18:30 -07001137 do_brcond_true:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001138 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001139 op->opc = INDEX_op_br;
1140 args[0] = args[5];
Richard Henderson6c4382f2012-10-02 11:32:27 -07001141 } else {
Richard Hendersona7635512014-04-23 22:18:30 -07001142 do_brcond_false:
Richard Henderson0c627cd2014-03-30 16:51:54 -07001143 tcg_op_remove(s, op);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001144 }
1145 } else if ((args[4] == TCG_COND_LT || args[4] == TCG_COND_GE)
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001146 && temp_is_const(args[2]) && temps[args[2]].val == 0
1147 && temp_is_const(args[3]) && temps[args[3]].val == 0) {
Richard Henderson6c4382f2012-10-02 11:32:27 -07001148 /* Simplify LT/GE comparisons vs zero to a single compare
1149 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001150 do_brcond_high:
Paolo Bonzinid193a142013-01-11 15:42:51 -08001151 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001152 op->opc = INDEX_op_brcond_i32;
1153 args[0] = args[1];
1154 args[1] = args[3];
1155 args[2] = args[4];
1156 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001157 } else if (args[4] == TCG_COND_EQ) {
1158 /* Simplify EQ comparisons where one of the pairs
1159 can be simplified. */
1160 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1161 args[0], args[2], TCG_COND_EQ);
1162 if (tmp == 0) {
1163 goto do_brcond_false;
1164 } else if (tmp == 1) {
1165 goto do_brcond_high;
1166 }
1167 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1168 args[1], args[3], TCG_COND_EQ);
1169 if (tmp == 0) {
1170 goto do_brcond_false;
1171 } else if (tmp != 1) {
1172 goto do_default;
1173 }
1174 do_brcond_low:
1175 reset_all_temps(nb_temps);
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001176 op->opc = INDEX_op_brcond_i32;
1177 args[1] = args[2];
1178 args[2] = args[4];
1179 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001180 } else if (args[4] == TCG_COND_NE) {
1181 /* Simplify NE comparisons where one of the pairs
1182 can be simplified. */
1183 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1184 args[0], args[2], TCG_COND_NE);
1185 if (tmp == 0) {
1186 goto do_brcond_high;
1187 } else if (tmp == 1) {
1188 goto do_brcond_true;
1189 }
1190 tmp = do_constant_folding_cond(INDEX_op_brcond_i32,
1191 args[1], args[3], TCG_COND_NE);
1192 if (tmp == 0) {
1193 goto do_brcond_low;
1194 } else if (tmp == 1) {
1195 goto do_brcond_true;
1196 }
1197 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001198 } else {
1199 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001200 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001201 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001202
1203 case INDEX_op_setcond2_i32:
Richard Henderson6c4382f2012-10-02 11:32:27 -07001204 tmp = do_constant_folding_cond2(&args[1], &args[3], args[5]);
1205 if (tmp != 2) {
Richard Hendersona7635512014-04-23 22:18:30 -07001206 do_setcond_const:
Aurelien Jarnoebd27392015-06-04 21:53:23 +02001207 tcg_opt_gen_movi(s, op, args, args[0], tmp);
Richard Henderson6c4382f2012-10-02 11:32:27 -07001208 } else if ((args[5] == TCG_COND_LT || args[5] == TCG_COND_GE)
Aurelien Jarnod9c769c2015-07-27 12:41:44 +02001209 && temp_is_const(args[3]) && temps[args[3]].val == 0
1210 && temp_is_const(args[4]) && temps[args[4]].val == 0) {
Richard Henderson6c4382f2012-10-02 11:32:27 -07001211 /* Simplify LT/GE comparisons vs zero to a single compare
1212 vs the high word of the input. */
Richard Hendersona7635512014-04-23 22:18:30 -07001213 do_setcond_high:
Aurelien Jarno66e61b52013-05-08 22:36:39 +02001214 reset_temp(args[0]);
Richard Hendersona7635512014-04-23 22:18:30 -07001215 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001216 op->opc = INDEX_op_setcond_i32;
1217 args[1] = args[2];
1218 args[2] = args[4];
1219 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001220 } else if (args[5] == TCG_COND_EQ) {
1221 /* Simplify EQ comparisons where one of the pairs
1222 can be simplified. */
1223 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1224 args[1], args[3], TCG_COND_EQ);
1225 if (tmp == 0) {
1226 goto do_setcond_const;
1227 } else if (tmp == 1) {
1228 goto do_setcond_high;
1229 }
1230 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1231 args[2], args[4], TCG_COND_EQ);
1232 if (tmp == 0) {
1233 goto do_setcond_high;
1234 } else if (tmp != 1) {
1235 goto do_default;
1236 }
1237 do_setcond_low:
1238 reset_temp(args[0]);
1239 temps[args[0]].mask = 1;
Richard Hendersonc45cb8b2014-09-19 13:49:15 -07001240 op->opc = INDEX_op_setcond_i32;
1241 args[2] = args[3];
1242 args[3] = args[5];
Richard Hendersona7635512014-04-23 22:18:30 -07001243 } else if (args[5] == TCG_COND_NE) {
1244 /* Simplify NE comparisons where one of the pairs
1245 can be simplified. */
1246 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1247 args[1], args[3], TCG_COND_NE);
1248 if (tmp == 0) {
1249 goto do_setcond_high;
1250 } else if (tmp == 1) {
1251 goto do_setcond_const;
1252 }
1253 tmp = do_constant_folding_cond(INDEX_op_setcond_i32,
1254 args[2], args[4], TCG_COND_NE);
1255 if (tmp == 0) {
1256 goto do_setcond_low;
1257 } else if (tmp == 1) {
1258 goto do_setcond_const;
1259 }
1260 goto do_default;
Richard Henderson6c4382f2012-10-02 11:32:27 -07001261 } else {
1262 goto do_default;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001263 }
Richard Henderson6c4382f2012-10-02 11:32:27 -07001264 break;
Richard Hendersonbc1473e2012-10-02 11:32:25 -07001265
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001266 case INDEX_op_call:
Richard Hendersoncf066672014-03-22 20:06:52 -07001267 if (!(args[nb_oargs + nb_iargs + 1]
1268 & (TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_WRITE_GLOBALS))) {
Kirill Batuzov22613af2011-07-07 16:37:13 +04001269 for (i = 0; i < nb_globals; i++) {
Aurelien Jarno1208d7d2015-07-27 12:41:44 +02001270 if (test_bit(i, temps_used.l)) {
1271 reset_temp(i);
1272 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001273 }
1274 }
Richard Hendersoncf066672014-03-22 20:06:52 -07001275 goto do_reset_output;
Richard Henderson6e14e912012-10-02 11:32:24 -07001276
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001277 default:
Richard Henderson6e14e912012-10-02 11:32:24 -07001278 do_default:
1279 /* Default case: we know nothing about operation (or were unable
1280 to compute the operation result) so no propagation is done.
1281 We trash everything if the operation is the end of a basic
Paolo Bonzini3a9d8b12013-01-11 15:42:52 -08001282 block, otherwise we only trash the output args. "mask" is
1283 the non-zero bits mask for the first output arg. */
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001284 if (def->flags & TCG_OPF_BB_END) {
Paolo Bonzinid193a142013-01-11 15:42:51 -08001285 reset_all_temps(nb_temps);
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001286 } else {
Richard Hendersoncf066672014-03-22 20:06:52 -07001287 do_reset_output:
1288 for (i = 0; i < nb_oargs; i++) {
Aurelien Jarnoe590d4e2012-09-11 12:31:21 +02001289 reset_temp(args[i]);
Aurelien Jarno30312442013-09-03 08:27:38 +02001290 /* Save the corresponding known-zero bits mask for the
1291 first output argument (only one supported so far). */
1292 if (i == 0) {
1293 temps[args[i]].mask = mask;
1294 }
Aurelien Jarnoa2550662012-09-19 21:40:30 +02001295 }
Kirill Batuzov22613af2011-07-07 16:37:13 +04001296 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001297 break;
1298 }
Pranith Kumar34f93922016-08-23 09:48:25 -04001299
1300 /* Eliminate duplicate and redundant fence instructions. */
1301 if (prev_mb_args) {
1302 switch (opc) {
1303 case INDEX_op_mb:
1304 /* Merge two barriers of the same type into one,
1305 * or a weaker barrier into a stronger one,
1306 * or two weaker barriers into a stronger one.
1307 * mb X; mb Y => mb X|Y
1308 * mb; strl => mb; st
1309 * ldaq; mb => ld; mb
1310 * ldaq; strl => ld; mb; st
1311 * Other combinations are also merged into a strong
1312 * barrier. This is stricter than specified but for
1313 * the purposes of TCG is better than not optimizing.
1314 */
1315 prev_mb_args[0] |= args[0];
1316 tcg_op_remove(s, op);
1317 break;
1318
1319 default:
1320 /* Opcodes that end the block stop the optimization. */
1321 if ((def->flags & TCG_OPF_BB_END) == 0) {
1322 break;
1323 }
1324 /* fallthru */
1325 case INDEX_op_qemu_ld_i32:
1326 case INDEX_op_qemu_ld_i64:
1327 case INDEX_op_qemu_st_i32:
1328 case INDEX_op_qemu_st_i64:
1329 case INDEX_op_call:
1330 /* Opcodes that touch guest memory stop the optimization. */
1331 prev_mb_args = NULL;
1332 break;
1333 }
1334 } else if (opc == INDEX_op_mb) {
1335 prev_mb_args = args;
1336 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001337 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001338}