blob: 26038a695dd8ce01f82393722afe46948926343b [file] [log] [blame]
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001/*
2 * Optimizations for Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2010 Samsung Electronics.
5 * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
24 */
25
26#include "config.h"
27
28#include <stdlib.h>
29#include <stdio.h>
30
31#include "qemu-common.h"
32#include "tcg-op.h"
33
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040034#define CASE_OP_32_64(x) \
35 glue(glue(case INDEX_op_, x), _i32): \
36 glue(glue(case INDEX_op_, x), _i64)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040037
Kirill Batuzov22613af2011-07-07 16:37:13 +040038typedef enum {
39 TCG_TEMP_UNDEF = 0,
40 TCG_TEMP_CONST,
41 TCG_TEMP_COPY,
42 TCG_TEMP_HAS_COPY,
43 TCG_TEMP_ANY
44} tcg_temp_state;
45
46struct tcg_temp_info {
47 tcg_temp_state state;
48 uint16_t prev_copy;
49 uint16_t next_copy;
50 tcg_target_ulong val;
51};
52
53static struct tcg_temp_info temps[TCG_MAX_TEMPS];
54
55/* Reset TEMP's state to TCG_TEMP_ANY. If TEMP was a representative of some
56 class of equivalent temp's, a new representative should be chosen in this
57 class. */
58static void reset_temp(TCGArg temp, int nb_temps, int nb_globals)
59{
60 int i;
61 TCGArg new_base = (TCGArg)-1;
62 if (temps[temp].state == TCG_TEMP_HAS_COPY) {
63 for (i = temps[temp].next_copy; i != temp; i = temps[i].next_copy) {
64 if (i >= nb_globals) {
65 temps[i].state = TCG_TEMP_HAS_COPY;
66 new_base = i;
67 break;
68 }
69 }
70 for (i = temps[temp].next_copy; i != temp; i = temps[i].next_copy) {
71 if (new_base == (TCGArg)-1) {
72 temps[i].state = TCG_TEMP_ANY;
73 } else {
74 temps[i].val = new_base;
75 }
76 }
77 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
78 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
79 } else if (temps[temp].state == TCG_TEMP_COPY) {
80 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
81 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
82 new_base = temps[temp].val;
83 }
84 temps[temp].state = TCG_TEMP_ANY;
85 if (new_base != (TCGArg)-1 && temps[new_base].next_copy == new_base) {
86 temps[new_base].state = TCG_TEMP_ANY;
87 }
88}
89
Blue Swirlfe0de7a2011-07-30 19:18:32 +000090static int op_bits(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +040091{
Richard Henderson8399ad52011-08-17 14:11:45 -070092 const TCGOpDef *def = &tcg_op_defs[op];
93 return def->flags & TCG_OPF_64BIT ? 64 : 32;
Kirill Batuzov22613af2011-07-07 16:37:13 +040094}
95
Blue Swirlfe0de7a2011-07-30 19:18:32 +000096static TCGOpcode op_to_movi(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +040097{
98 switch (op_bits(op)) {
99 case 32:
100 return INDEX_op_movi_i32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400101 case 64:
102 return INDEX_op_movi_i64;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400103 default:
104 fprintf(stderr, "op_to_movi: unexpected return value of "
105 "function op_bits.\n");
106 tcg_abort();
107 }
108}
109
Aurelien Jarnod104beb2012-09-10 13:14:12 +0200110static void tcg_opt_gen_mov(TCGArg *gen_args, TCGArg dst, TCGArg src,
111 int nb_temps, int nb_globals)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400112{
113 reset_temp(dst, nb_temps, nb_globals);
114 assert(temps[src].state != TCG_TEMP_COPY);
Aurelien Jarnod104beb2012-09-10 13:14:12 +0200115 if (src >= nb_globals) {
Kirill Batuzov22613af2011-07-07 16:37:13 +0400116 assert(temps[src].state != TCG_TEMP_CONST);
117 if (temps[src].state != TCG_TEMP_HAS_COPY) {
118 temps[src].state = TCG_TEMP_HAS_COPY;
119 temps[src].next_copy = src;
120 temps[src].prev_copy = src;
121 }
122 temps[dst].state = TCG_TEMP_COPY;
123 temps[dst].val = src;
124 temps[dst].next_copy = temps[src].next_copy;
125 temps[dst].prev_copy = src;
126 temps[temps[dst].next_copy].prev_copy = dst;
127 temps[src].next_copy = dst;
128 }
129 gen_args[0] = dst;
130 gen_args[1] = src;
131}
132
133static void tcg_opt_gen_movi(TCGArg *gen_args, TCGArg dst, TCGArg val,
134 int nb_temps, int nb_globals)
135{
136 reset_temp(dst, nb_temps, nb_globals);
137 temps[dst].state = TCG_TEMP_CONST;
138 temps[dst].val = val;
139 gen_args[0] = dst;
140 gen_args[1] = val;
141}
142
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000143static TCGOpcode op_to_mov(TCGOpcode op)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400144{
145 switch (op_bits(op)) {
146 case 32:
147 return INDEX_op_mov_i32;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400148 case 64:
149 return INDEX_op_mov_i64;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400150 default:
151 fprintf(stderr, "op_to_mov: unexpected return value of "
152 "function op_bits.\n");
153 tcg_abort();
154 }
155}
156
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000157static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400158{
159 switch (op) {
160 CASE_OP_32_64(add):
161 return x + y;
162
163 CASE_OP_32_64(sub):
164 return x - y;
165
166 CASE_OP_32_64(mul):
167 return x * y;
168
Kirill Batuzov9a810902011-07-07 16:37:15 +0400169 CASE_OP_32_64(and):
170 return x & y;
171
172 CASE_OP_32_64(or):
173 return x | y;
174
175 CASE_OP_32_64(xor):
176 return x ^ y;
177
Kirill Batuzov55c09752011-07-07 16:37:16 +0400178 case INDEX_op_shl_i32:
179 return (uint32_t)x << (uint32_t)y;
180
Kirill Batuzov55c09752011-07-07 16:37:16 +0400181 case INDEX_op_shl_i64:
182 return (uint64_t)x << (uint64_t)y;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400183
184 case INDEX_op_shr_i32:
185 return (uint32_t)x >> (uint32_t)y;
186
Kirill Batuzov55c09752011-07-07 16:37:16 +0400187 case INDEX_op_shr_i64:
188 return (uint64_t)x >> (uint64_t)y;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400189
190 case INDEX_op_sar_i32:
191 return (int32_t)x >> (int32_t)y;
192
Kirill Batuzov55c09752011-07-07 16:37:16 +0400193 case INDEX_op_sar_i64:
194 return (int64_t)x >> (int64_t)y;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400195
196 case INDEX_op_rotr_i32:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700197 x = ((uint32_t)x << (32 - y)) | ((uint32_t)x >> y);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400198 return x;
199
Kirill Batuzov55c09752011-07-07 16:37:16 +0400200 case INDEX_op_rotr_i64:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700201 x = ((uint64_t)x << (64 - y)) | ((uint64_t)x >> y);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400202 return x;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400203
204 case INDEX_op_rotl_i32:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700205 x = ((uint32_t)x << y) | ((uint32_t)x >> (32 - y));
Kirill Batuzov55c09752011-07-07 16:37:16 +0400206 return x;
207
Kirill Batuzov55c09752011-07-07 16:37:16 +0400208 case INDEX_op_rotl_i64:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700209 x = ((uint64_t)x << y) | ((uint64_t)x >> (64 - y));
Kirill Batuzov55c09752011-07-07 16:37:16 +0400210 return x;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400211
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700212 CASE_OP_32_64(not):
Kirill Batuzova640f032011-07-07 16:37:17 +0400213 return ~x;
214
Richard Hendersoncb25c802011-08-17 14:11:47 -0700215 CASE_OP_32_64(neg):
216 return -x;
217
218 CASE_OP_32_64(andc):
219 return x & ~y;
220
221 CASE_OP_32_64(orc):
222 return x | ~y;
223
224 CASE_OP_32_64(eqv):
225 return ~(x ^ y);
226
227 CASE_OP_32_64(nand):
228 return ~(x & y);
229
230 CASE_OP_32_64(nor):
231 return ~(x | y);
232
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700233 CASE_OP_32_64(ext8s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400234 return (int8_t)x;
235
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700236 CASE_OP_32_64(ext16s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400237 return (int16_t)x;
238
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700239 CASE_OP_32_64(ext8u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400240 return (uint8_t)x;
241
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700242 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400243 return (uint16_t)x;
244
Kirill Batuzova640f032011-07-07 16:37:17 +0400245 case INDEX_op_ext32s_i64:
246 return (int32_t)x;
247
248 case INDEX_op_ext32u_i64:
249 return (uint32_t)x;
Kirill Batuzova640f032011-07-07 16:37:17 +0400250
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400251 default:
252 fprintf(stderr,
253 "Unrecognized operation %d in do_constant_folding.\n", op);
254 tcg_abort();
255 }
256}
257
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000258static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400259{
260 TCGArg res = do_constant_folding_2(op, x, y);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400261 if (op_bits(op) == 32) {
262 res &= 0xffffffff;
263 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400264 return res;
265}
266
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200267static TCGArg do_constant_folding_cond(TCGOpcode op, TCGArg x,
268 TCGArg y, TCGCond c)
269{
270 switch (op_bits(op)) {
271 case 32:
272 switch (c) {
273 case TCG_COND_EQ:
274 return (uint32_t)x == (uint32_t)y;
275 case TCG_COND_NE:
276 return (uint32_t)x != (uint32_t)y;
277 case TCG_COND_LT:
278 return (int32_t)x < (int32_t)y;
279 case TCG_COND_GE:
280 return (int32_t)x >= (int32_t)y;
281 case TCG_COND_LE:
282 return (int32_t)x <= (int32_t)y;
283 case TCG_COND_GT:
284 return (int32_t)x > (int32_t)y;
285 case TCG_COND_LTU:
286 return (uint32_t)x < (uint32_t)y;
287 case TCG_COND_GEU:
288 return (uint32_t)x >= (uint32_t)y;
289 case TCG_COND_LEU:
290 return (uint32_t)x <= (uint32_t)y;
291 case TCG_COND_GTU:
292 return (uint32_t)x > (uint32_t)y;
293 }
294 break;
295 case 64:
296 switch (c) {
297 case TCG_COND_EQ:
298 return (uint64_t)x == (uint64_t)y;
299 case TCG_COND_NE:
300 return (uint64_t)x != (uint64_t)y;
301 case TCG_COND_LT:
302 return (int64_t)x < (int64_t)y;
303 case TCG_COND_GE:
304 return (int64_t)x >= (int64_t)y;
305 case TCG_COND_LE:
306 return (int64_t)x <= (int64_t)y;
307 case TCG_COND_GT:
308 return (int64_t)x > (int64_t)y;
309 case TCG_COND_LTU:
310 return (uint64_t)x < (uint64_t)y;
311 case TCG_COND_GEU:
312 return (uint64_t)x >= (uint64_t)y;
313 case TCG_COND_LEU:
314 return (uint64_t)x <= (uint64_t)y;
315 case TCG_COND_GTU:
316 return (uint64_t)x > (uint64_t)y;
317 }
318 break;
319 }
320
321 fprintf(stderr,
322 "Unrecognized bitness %d or condition %d in "
323 "do_constant_folding_cond.\n", op_bits(op), c);
324 tcg_abort();
325}
326
327
Kirill Batuzov22613af2011-07-07 16:37:13 +0400328/* Propagate constants and copies, fold constant expressions. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400329static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
330 TCGArg *args, TCGOpDef *tcg_op_defs)
331{
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000332 int i, nb_ops, op_index, nb_temps, nb_globals, nb_call_args;
333 TCGOpcode op;
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400334 const TCGOpDef *def;
335 TCGArg *gen_args;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400336 TCGArg tmp;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400337 /* Array VALS has an element for each temp.
338 If this temp holds a constant then its value is kept in VALS' element.
339 If this temp is a copy of other ones then this equivalence class'
340 representative is kept in VALS' element.
341 If this temp is neither copy nor constant then corresponding VALS'
342 element is unused. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400343
344 nb_temps = s->nb_temps;
345 nb_globals = s->nb_globals;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400346 memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info));
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400347
348 nb_ops = tcg_opc_ptr - gen_opc_buf;
349 gen_args = args;
350 for (op_index = 0; op_index < nb_ops; op_index++) {
351 op = gen_opc_buf[op_index];
352 def = &tcg_op_defs[op];
Kirill Batuzov22613af2011-07-07 16:37:13 +0400353 /* Do copy propagation */
354 if (!(def->flags & (TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS))) {
355 assert(op != INDEX_op_call);
356 for (i = def->nb_oargs; i < def->nb_oargs + def->nb_iargs; i++) {
357 if (temps[args[i]].state == TCG_TEMP_COPY) {
358 args[i] = temps[args[i]].val;
359 }
360 }
361 }
362
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400363 /* For commutative operations make constant second argument */
364 switch (op) {
365 CASE_OP_32_64(add):
366 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400367 CASE_OP_32_64(and):
368 CASE_OP_32_64(or):
369 CASE_OP_32_64(xor):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700370 CASE_OP_32_64(eqv):
371 CASE_OP_32_64(nand):
372 CASE_OP_32_64(nor):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400373 if (temps[args[1]].state == TCG_TEMP_CONST) {
374 tmp = args[1];
375 args[1] = args[2];
376 args[2] = tmp;
377 }
378 break;
Aurelien Jarno65a7cce2012-09-06 16:47:14 +0200379 CASE_OP_32_64(brcond):
380 if (temps[args[0]].state == TCG_TEMP_CONST
381 && temps[args[1]].state != TCG_TEMP_CONST) {
382 tmp = args[0];
383 args[0] = args[1];
384 args[1] = tmp;
385 args[2] = tcg_swap_cond(args[2]);
386 }
387 break;
388 CASE_OP_32_64(setcond):
389 if (temps[args[1]].state == TCG_TEMP_CONST
390 && temps[args[2]].state != TCG_TEMP_CONST) {
391 tmp = args[1];
392 args[1] = args[2];
393 args[2] = tmp;
394 args[3] = tcg_swap_cond(args[3]);
395 }
396 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -0700397 CASE_OP_32_64(movcond):
398 if (temps[args[1]].state == TCG_TEMP_CONST
399 && temps[args[2]].state != TCG_TEMP_CONST) {
400 tmp = args[1];
401 args[1] = args[2];
402 args[2] = tmp;
403 args[5] = tcg_swap_cond(args[5]);
404 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400405 default:
406 break;
407 }
408
Aurelien Jarno01ee5282012-09-06 16:47:14 +0200409 /* Simplify expressions for "shift/rot r, 0, a => movi r, 0" */
410 switch (op) {
411 CASE_OP_32_64(shl):
412 CASE_OP_32_64(shr):
413 CASE_OP_32_64(sar):
414 CASE_OP_32_64(rotl):
415 CASE_OP_32_64(rotr):
416 if (temps[args[1]].state == TCG_TEMP_CONST
417 && temps[args[1]].val == 0) {
418 gen_opc_buf[op_index] = op_to_movi(op);
419 tcg_opt_gen_movi(gen_args, args[0], 0, nb_temps, nb_globals);
420 args += 3;
421 gen_args += 2;
422 continue;
423 }
424 break;
425 default:
426 break;
427 }
428
Aurelien Jarno56e49432012-09-06 16:47:13 +0200429 /* Simplify expression for "op r, a, 0 => mov r, a" cases */
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400430 switch (op) {
431 CASE_OP_32_64(add):
432 CASE_OP_32_64(sub):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400433 CASE_OP_32_64(shl):
434 CASE_OP_32_64(shr):
435 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700436 CASE_OP_32_64(rotl):
437 CASE_OP_32_64(rotr):
Aurelien Jarno38ee1882012-09-06 16:47:14 +0200438 CASE_OP_32_64(or):
439 CASE_OP_32_64(xor):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400440 if (temps[args[1]].state == TCG_TEMP_CONST) {
441 /* Proceed with possible constant folding. */
442 break;
443 }
444 if (temps[args[2]].state == TCG_TEMP_CONST
445 && temps[args[2]].val == 0) {
446 if ((temps[args[0]].state == TCG_TEMP_COPY
447 && temps[args[0]].val == args[1])
448 || args[0] == args[1]) {
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400449 gen_opc_buf[op_index] = INDEX_op_nop;
450 } else {
451 gen_opc_buf[op_index] = op_to_mov(op);
Aurelien Jarnod104beb2012-09-10 13:14:12 +0200452 tcg_opt_gen_mov(gen_args, args[0], args[1],
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400453 nb_temps, nb_globals);
454 gen_args += 2;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400455 }
Aurelien Jarnofedc0da2012-09-07 12:24:32 +0200456 args += 3;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400457 continue;
458 }
459 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200460 default:
461 break;
462 }
463
464 /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
465 switch (op) {
Aurelien Jarno61251c02012-09-06 16:47:14 +0200466 CASE_OP_32_64(and):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400467 CASE_OP_32_64(mul):
468 if ((temps[args[2]].state == TCG_TEMP_CONST
469 && temps[args[2]].val == 0)) {
470 gen_opc_buf[op_index] = op_to_movi(op);
471 tcg_opt_gen_movi(gen_args, args[0], 0, nb_temps, nb_globals);
472 args += 3;
473 gen_args += 2;
474 continue;
475 }
476 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200477 default:
478 break;
479 }
480
481 /* Simplify expression for "op r, a, a => mov r, a" cases */
482 switch (op) {
Kirill Batuzov9a810902011-07-07 16:37:15 +0400483 CASE_OP_32_64(or):
484 CASE_OP_32_64(and):
485 if (args[1] == args[2]) {
486 if (args[1] == args[0]) {
Kirill Batuzov9a810902011-07-07 16:37:15 +0400487 gen_opc_buf[op_index] = INDEX_op_nop;
488 } else {
489 gen_opc_buf[op_index] = op_to_mov(op);
Aurelien Jarnod104beb2012-09-10 13:14:12 +0200490 tcg_opt_gen_mov(gen_args, args[0], args[1], nb_temps,
Kirill Batuzov9a810902011-07-07 16:37:15 +0400491 nb_globals);
492 gen_args += 2;
Kirill Batuzov9a810902011-07-07 16:37:15 +0400493 }
Aurelien Jarnofedc0da2012-09-07 12:24:32 +0200494 args += 3;
Kirill Batuzov9a810902011-07-07 16:37:15 +0400495 continue;
496 }
497 break;
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000498 default:
499 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400500 }
501
Kirill Batuzov22613af2011-07-07 16:37:13 +0400502 /* Propagate constants through copy operations and do constant
503 folding. Constants will be substituted to arguments by register
504 allocator where needed and possible. Also detect copies. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400505 switch (op) {
Kirill Batuzov22613af2011-07-07 16:37:13 +0400506 CASE_OP_32_64(mov):
507 if ((temps[args[1]].state == TCG_TEMP_COPY
508 && temps[args[1]].val == args[0])
509 || args[0] == args[1]) {
510 args += 2;
511 gen_opc_buf[op_index] = INDEX_op_nop;
512 break;
513 }
514 if (temps[args[1]].state != TCG_TEMP_CONST) {
Aurelien Jarnod104beb2012-09-10 13:14:12 +0200515 tcg_opt_gen_mov(gen_args, args[0], args[1],
Kirill Batuzov22613af2011-07-07 16:37:13 +0400516 nb_temps, nb_globals);
517 gen_args += 2;
518 args += 2;
519 break;
520 }
521 /* Source argument is constant. Rewrite the operation and
522 let movi case handle it. */
523 op = op_to_movi(op);
524 gen_opc_buf[op_index] = op;
525 args[1] = temps[args[1]].val;
526 /* fallthrough */
527 CASE_OP_32_64(movi):
528 tcg_opt_gen_movi(gen_args, args[0], args[1], nb_temps, nb_globals);
529 gen_args += 2;
530 args += 2;
531 break;
Kirill Batuzova640f032011-07-07 16:37:17 +0400532 CASE_OP_32_64(not):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700533 CASE_OP_32_64(neg):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700534 CASE_OP_32_64(ext8s):
535 CASE_OP_32_64(ext8u):
536 CASE_OP_32_64(ext16s):
537 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400538 case INDEX_op_ext32s_i64:
539 case INDEX_op_ext32u_i64:
Kirill Batuzova640f032011-07-07 16:37:17 +0400540 if (temps[args[1]].state == TCG_TEMP_CONST) {
541 gen_opc_buf[op_index] = op_to_movi(op);
542 tmp = do_constant_folding(op, temps[args[1]].val, 0);
543 tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
Kirill Batuzova640f032011-07-07 16:37:17 +0400544 } else {
545 reset_temp(args[0], nb_temps, nb_globals);
546 gen_args[0] = args[0];
547 gen_args[1] = args[1];
Kirill Batuzova640f032011-07-07 16:37:17 +0400548 }
Aurelien Jarnofedc0da2012-09-07 12:24:32 +0200549 gen_args += 2;
550 args += 2;
551 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400552 CASE_OP_32_64(add):
553 CASE_OP_32_64(sub):
554 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400555 CASE_OP_32_64(or):
556 CASE_OP_32_64(and):
557 CASE_OP_32_64(xor):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400558 CASE_OP_32_64(shl):
559 CASE_OP_32_64(shr):
560 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700561 CASE_OP_32_64(rotl):
562 CASE_OP_32_64(rotr):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700563 CASE_OP_32_64(andc):
564 CASE_OP_32_64(orc):
565 CASE_OP_32_64(eqv):
566 CASE_OP_32_64(nand):
567 CASE_OP_32_64(nor):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400568 if (temps[args[1]].state == TCG_TEMP_CONST
569 && temps[args[2]].state == TCG_TEMP_CONST) {
570 gen_opc_buf[op_index] = op_to_movi(op);
571 tmp = do_constant_folding(op, temps[args[1]].val,
572 temps[args[2]].val);
573 tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
574 gen_args += 2;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400575 } else {
576 reset_temp(args[0], nb_temps, nb_globals);
577 gen_args[0] = args[0];
578 gen_args[1] = args[1];
579 gen_args[2] = args[2];
580 gen_args += 3;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400581 }
Aurelien Jarnofedc0da2012-09-07 12:24:32 +0200582 args += 3;
583 break;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200584 CASE_OP_32_64(setcond):
585 if (temps[args[1]].state == TCG_TEMP_CONST
586 && temps[args[2]].state == TCG_TEMP_CONST) {
587 gen_opc_buf[op_index] = op_to_movi(op);
588 tmp = do_constant_folding_cond(op, temps[args[1]].val,
589 temps[args[2]].val, args[3]);
590 tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
591 gen_args += 2;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200592 } else {
593 reset_temp(args[0], nb_temps, nb_globals);
594 gen_args[0] = args[0];
595 gen_args[1] = args[1];
596 gen_args[2] = args[2];
597 gen_args[3] = args[3];
598 gen_args += 4;
Aurelien Jarnof8dd19e2012-09-06 16:47:14 +0200599 }
Aurelien Jarnofedc0da2012-09-07 12:24:32 +0200600 args += 4;
601 break;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +0200602 CASE_OP_32_64(brcond):
603 if (temps[args[0]].state == TCG_TEMP_CONST
604 && temps[args[1]].state == TCG_TEMP_CONST) {
605 if (do_constant_folding_cond(op, temps[args[0]].val,
606 temps[args[1]].val, args[2])) {
607 memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info));
608 gen_opc_buf[op_index] = INDEX_op_br;
609 gen_args[0] = args[3];
610 gen_args += 1;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +0200611 } else {
612 gen_opc_buf[op_index] = INDEX_op_nop;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +0200613 }
Aurelien Jarnofbeaa262012-09-06 16:47:14 +0200614 } else {
615 memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info));
616 reset_temp(args[0], nb_temps, nb_globals);
617 gen_args[0] = args[0];
618 gen_args[1] = args[1];
619 gen_args[2] = args[2];
620 gen_args[3] = args[3];
621 gen_args += 4;
Aurelien Jarnofbeaa262012-09-06 16:47:14 +0200622 }
Aurelien Jarnofedc0da2012-09-07 12:24:32 +0200623 args += 4;
624 break;
Richard Hendersonfa01a202012-09-21 10:13:37 -0700625 CASE_OP_32_64(movcond):
626 if (temps[args[1]].state == TCG_TEMP_CONST
627 && temps[args[2]].state == TCG_TEMP_CONST) {
628 tmp = do_constant_folding_cond(op, temps[args[1]].val,
629 temps[args[2]].val, args[5]);
630 if (args[0] == args[4-tmp]
631 || (temps[args[4-tmp]].state == TCG_TEMP_COPY
632 && temps[args[4-tmp]].val == args[0])) {
633 gen_opc_buf[op_index] = INDEX_op_nop;
634 } else if (temps[args[4-tmp]].state == TCG_TEMP_CONST) {
635 gen_opc_buf[op_index] = op_to_movi(op);
636 tcg_opt_gen_movi(gen_args, args[0], temps[args[4-tmp]].val,
637 nb_temps, nb_globals);
638 gen_args += 2;
639 } else {
640 gen_opc_buf[op_index] = op_to_mov(op);
641 tcg_opt_gen_mov(gen_args, args[0], args[4-tmp],
642 nb_temps, nb_globals);
643 gen_args += 2;
644 }
645 } else {
646 reset_temp(args[0], nb_temps, nb_globals);
647 gen_args[0] = args[0];
648 gen_args[1] = args[1];
649 gen_args[2] = args[2];
650 gen_args[3] = args[3];
651 gen_args[4] = args[4];
652 gen_args[5] = args[5];
653 gen_args += 6;
654 }
655 args += 6;
656 break;
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400657 case INDEX_op_call:
Kirill Batuzov22613af2011-07-07 16:37:13 +0400658 nb_call_args = (args[0] >> 16) + (args[0] & 0xffff);
659 if (!(args[nb_call_args + 1] & (TCG_CALL_CONST | TCG_CALL_PURE))) {
660 for (i = 0; i < nb_globals; i++) {
661 reset_temp(i, nb_temps, nb_globals);
662 }
663 }
664 for (i = 0; i < (args[0] >> 16); i++) {
665 reset_temp(args[i + 1], nb_temps, nb_globals);
666 }
667 i = nb_call_args + 3;
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400668 while (i) {
669 *gen_args = *args;
670 args++;
671 gen_args++;
672 i--;
673 }
674 break;
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400675 default:
Kirill Batuzov22613af2011-07-07 16:37:13 +0400676 /* Default case: we do know nothing about operation so no
Aurelien Jarnoa2550662012-09-19 21:40:30 +0200677 propagation is done. We trash everything if the operation
678 is the end of a basic block, otherwise we only trash the
679 output args. */
680 if (def->flags & TCG_OPF_BB_END) {
681 memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info));
682 } else {
683 for (i = 0; i < def->nb_oargs; i++) {
684 reset_temp(args[i], nb_temps, nb_globals);
685 }
Kirill Batuzov22613af2011-07-07 16:37:13 +0400686 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400687 for (i = 0; i < def->nb_args; i++) {
688 gen_args[i] = args[i];
689 }
690 args += def->nb_args;
691 gen_args += def->nb_args;
692 break;
693 }
694 }
695
696 return gen_args;
697}
698
699TCGArg *tcg_optimize(TCGContext *s, uint16_t *tcg_opc_ptr,
700 TCGArg *args, TCGOpDef *tcg_op_defs)
701{
702 TCGArg *res;
703 res = tcg_constant_folding(s, tcg_opc_ptr, args, tcg_op_defs);
704 return res;
705}