blob: c12cb2bc4b206bebbcbbeef0c35d0906d56e31fd [file] [log] [blame]
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +04001/*
2 * Optimizations for Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2010 Samsung Electronics.
5 * Contributed by Kirill Batuzov <batuzovk@ispras.ru>
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
24 */
25
26#include "config.h"
27
28#include <stdlib.h>
29#include <stdio.h>
30
31#include "qemu-common.h"
32#include "tcg-op.h"
33
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040034#define CASE_OP_32_64(x) \
35 glue(glue(case INDEX_op_, x), _i32): \
36 glue(glue(case INDEX_op_, x), _i64)
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +040037
Kirill Batuzov22613af2011-07-07 16:37:13 +040038typedef enum {
39 TCG_TEMP_UNDEF = 0,
40 TCG_TEMP_CONST,
41 TCG_TEMP_COPY,
42 TCG_TEMP_HAS_COPY,
43 TCG_TEMP_ANY
44} tcg_temp_state;
45
46struct tcg_temp_info {
47 tcg_temp_state state;
48 uint16_t prev_copy;
49 uint16_t next_copy;
50 tcg_target_ulong val;
51};
52
53static struct tcg_temp_info temps[TCG_MAX_TEMPS];
54
55/* Reset TEMP's state to TCG_TEMP_ANY. If TEMP was a representative of some
56 class of equivalent temp's, a new representative should be chosen in this
57 class. */
58static void reset_temp(TCGArg temp, int nb_temps, int nb_globals)
59{
60 int i;
61 TCGArg new_base = (TCGArg)-1;
62 if (temps[temp].state == TCG_TEMP_HAS_COPY) {
63 for (i = temps[temp].next_copy; i != temp; i = temps[i].next_copy) {
64 if (i >= nb_globals) {
65 temps[i].state = TCG_TEMP_HAS_COPY;
66 new_base = i;
67 break;
68 }
69 }
70 for (i = temps[temp].next_copy; i != temp; i = temps[i].next_copy) {
71 if (new_base == (TCGArg)-1) {
72 temps[i].state = TCG_TEMP_ANY;
73 } else {
74 temps[i].val = new_base;
75 }
76 }
77 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
78 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
79 } else if (temps[temp].state == TCG_TEMP_COPY) {
80 temps[temps[temp].next_copy].prev_copy = temps[temp].prev_copy;
81 temps[temps[temp].prev_copy].next_copy = temps[temp].next_copy;
82 new_base = temps[temp].val;
83 }
84 temps[temp].state = TCG_TEMP_ANY;
85 if (new_base != (TCGArg)-1 && temps[new_base].next_copy == new_base) {
86 temps[new_base].state = TCG_TEMP_ANY;
87 }
88}
89
Blue Swirlfe0de7a2011-07-30 19:18:32 +000090static int op_bits(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +040091{
Richard Henderson8399ad52011-08-17 14:11:45 -070092 const TCGOpDef *def = &tcg_op_defs[op];
93 return def->flags & TCG_OPF_64BIT ? 64 : 32;
Kirill Batuzov22613af2011-07-07 16:37:13 +040094}
95
Blue Swirlfe0de7a2011-07-30 19:18:32 +000096static TCGOpcode op_to_movi(TCGOpcode op)
Kirill Batuzov22613af2011-07-07 16:37:13 +040097{
98 switch (op_bits(op)) {
99 case 32:
100 return INDEX_op_movi_i32;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400101 case 64:
102 return INDEX_op_movi_i64;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400103 default:
104 fprintf(stderr, "op_to_movi: unexpected return value of "
105 "function op_bits.\n");
106 tcg_abort();
107 }
108}
109
Blue Swirle31b0a72011-08-06 13:58:47 +0000110static void tcg_opt_gen_mov(TCGContext *s, TCGArg *gen_args, TCGArg dst,
111 TCGArg src, int nb_temps, int nb_globals)
Kirill Batuzov22613af2011-07-07 16:37:13 +0400112{
113 reset_temp(dst, nb_temps, nb_globals);
114 assert(temps[src].state != TCG_TEMP_COPY);
Blue Swirle31b0a72011-08-06 13:58:47 +0000115 /* Don't try to copy if one of temps is a global or either one
116 is local and another is register */
117 if (src >= nb_globals && dst >= nb_globals &&
118 tcg_arg_is_local(s, src) == tcg_arg_is_local(s, dst)) {
Kirill Batuzov22613af2011-07-07 16:37:13 +0400119 assert(temps[src].state != TCG_TEMP_CONST);
120 if (temps[src].state != TCG_TEMP_HAS_COPY) {
121 temps[src].state = TCG_TEMP_HAS_COPY;
122 temps[src].next_copy = src;
123 temps[src].prev_copy = src;
124 }
125 temps[dst].state = TCG_TEMP_COPY;
126 temps[dst].val = src;
127 temps[dst].next_copy = temps[src].next_copy;
128 temps[dst].prev_copy = src;
129 temps[temps[dst].next_copy].prev_copy = dst;
130 temps[src].next_copy = dst;
131 }
132 gen_args[0] = dst;
133 gen_args[1] = src;
134}
135
136static void tcg_opt_gen_movi(TCGArg *gen_args, TCGArg dst, TCGArg val,
137 int nb_temps, int nb_globals)
138{
139 reset_temp(dst, nb_temps, nb_globals);
140 temps[dst].state = TCG_TEMP_CONST;
141 temps[dst].val = val;
142 gen_args[0] = dst;
143 gen_args[1] = val;
144}
145
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000146static TCGOpcode op_to_mov(TCGOpcode op)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400147{
148 switch (op_bits(op)) {
149 case 32:
150 return INDEX_op_mov_i32;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400151 case 64:
152 return INDEX_op_mov_i64;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400153 default:
154 fprintf(stderr, "op_to_mov: unexpected return value of "
155 "function op_bits.\n");
156 tcg_abort();
157 }
158}
159
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000160static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400161{
162 switch (op) {
163 CASE_OP_32_64(add):
164 return x + y;
165
166 CASE_OP_32_64(sub):
167 return x - y;
168
169 CASE_OP_32_64(mul):
170 return x * y;
171
Kirill Batuzov9a810902011-07-07 16:37:15 +0400172 CASE_OP_32_64(and):
173 return x & y;
174
175 CASE_OP_32_64(or):
176 return x | y;
177
178 CASE_OP_32_64(xor):
179 return x ^ y;
180
Kirill Batuzov55c09752011-07-07 16:37:16 +0400181 case INDEX_op_shl_i32:
182 return (uint32_t)x << (uint32_t)y;
183
Kirill Batuzov55c09752011-07-07 16:37:16 +0400184 case INDEX_op_shl_i64:
185 return (uint64_t)x << (uint64_t)y;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400186
187 case INDEX_op_shr_i32:
188 return (uint32_t)x >> (uint32_t)y;
189
Kirill Batuzov55c09752011-07-07 16:37:16 +0400190 case INDEX_op_shr_i64:
191 return (uint64_t)x >> (uint64_t)y;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400192
193 case INDEX_op_sar_i32:
194 return (int32_t)x >> (int32_t)y;
195
Kirill Batuzov55c09752011-07-07 16:37:16 +0400196 case INDEX_op_sar_i64:
197 return (int64_t)x >> (int64_t)y;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400198
199 case INDEX_op_rotr_i32:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700200 x = ((uint32_t)x << (32 - y)) | ((uint32_t)x >> y);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400201 return x;
202
Kirill Batuzov55c09752011-07-07 16:37:16 +0400203 case INDEX_op_rotr_i64:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700204 x = ((uint64_t)x << (64 - y)) | ((uint64_t)x >> y);
Kirill Batuzov55c09752011-07-07 16:37:16 +0400205 return x;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400206
207 case INDEX_op_rotl_i32:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700208 x = ((uint32_t)x << y) | ((uint32_t)x >> (32 - y));
Kirill Batuzov55c09752011-07-07 16:37:16 +0400209 return x;
210
Kirill Batuzov55c09752011-07-07 16:37:16 +0400211 case INDEX_op_rotl_i64:
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700212 x = ((uint64_t)x << y) | ((uint64_t)x >> (64 - y));
Kirill Batuzov55c09752011-07-07 16:37:16 +0400213 return x;
Kirill Batuzov55c09752011-07-07 16:37:16 +0400214
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700215 CASE_OP_32_64(not):
Kirill Batuzova640f032011-07-07 16:37:17 +0400216 return ~x;
217
Richard Hendersoncb25c802011-08-17 14:11:47 -0700218 CASE_OP_32_64(neg):
219 return -x;
220
221 CASE_OP_32_64(andc):
222 return x & ~y;
223
224 CASE_OP_32_64(orc):
225 return x | ~y;
226
227 CASE_OP_32_64(eqv):
228 return ~(x ^ y);
229
230 CASE_OP_32_64(nand):
231 return ~(x & y);
232
233 CASE_OP_32_64(nor):
234 return ~(x | y);
235
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700236 CASE_OP_32_64(ext8s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400237 return (int8_t)x;
238
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700239 CASE_OP_32_64(ext16s):
Kirill Batuzova640f032011-07-07 16:37:17 +0400240 return (int16_t)x;
241
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700242 CASE_OP_32_64(ext8u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400243 return (uint8_t)x;
244
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700245 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400246 return (uint16_t)x;
247
Kirill Batuzova640f032011-07-07 16:37:17 +0400248 case INDEX_op_ext32s_i64:
249 return (int32_t)x;
250
251 case INDEX_op_ext32u_i64:
252 return (uint32_t)x;
Kirill Batuzova640f032011-07-07 16:37:17 +0400253
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400254 default:
255 fprintf(stderr,
256 "Unrecognized operation %d in do_constant_folding.\n", op);
257 tcg_abort();
258 }
259}
260
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000261static TCGArg do_constant_folding(TCGOpcode op, TCGArg x, TCGArg y)
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400262{
263 TCGArg res = do_constant_folding_2(op, x, y);
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400264 if (op_bits(op) == 32) {
265 res &= 0xffffffff;
266 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400267 return res;
268}
269
Kirill Batuzov22613af2011-07-07 16:37:13 +0400270/* Propagate constants and copies, fold constant expressions. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400271static TCGArg *tcg_constant_folding(TCGContext *s, uint16_t *tcg_opc_ptr,
272 TCGArg *args, TCGOpDef *tcg_op_defs)
273{
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000274 int i, nb_ops, op_index, nb_temps, nb_globals, nb_call_args;
275 TCGOpcode op;
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400276 const TCGOpDef *def;
277 TCGArg *gen_args;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400278 TCGArg tmp;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400279 /* Array VALS has an element for each temp.
280 If this temp holds a constant then its value is kept in VALS' element.
281 If this temp is a copy of other ones then this equivalence class'
282 representative is kept in VALS' element.
283 If this temp is neither copy nor constant then corresponding VALS'
284 element is unused. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400285
286 nb_temps = s->nb_temps;
287 nb_globals = s->nb_globals;
Kirill Batuzov22613af2011-07-07 16:37:13 +0400288 memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info));
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400289
290 nb_ops = tcg_opc_ptr - gen_opc_buf;
291 gen_args = args;
292 for (op_index = 0; op_index < nb_ops; op_index++) {
293 op = gen_opc_buf[op_index];
294 def = &tcg_op_defs[op];
Kirill Batuzov22613af2011-07-07 16:37:13 +0400295 /* Do copy propagation */
296 if (!(def->flags & (TCG_OPF_CALL_CLOBBER | TCG_OPF_SIDE_EFFECTS))) {
297 assert(op != INDEX_op_call);
298 for (i = def->nb_oargs; i < def->nb_oargs + def->nb_iargs; i++) {
299 if (temps[args[i]].state == TCG_TEMP_COPY) {
300 args[i] = temps[args[i]].val;
301 }
302 }
303 }
304
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400305 /* For commutative operations make constant second argument */
306 switch (op) {
307 CASE_OP_32_64(add):
308 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400309 CASE_OP_32_64(and):
310 CASE_OP_32_64(or):
311 CASE_OP_32_64(xor):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700312 CASE_OP_32_64(eqv):
313 CASE_OP_32_64(nand):
314 CASE_OP_32_64(nor):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400315 if (temps[args[1]].state == TCG_TEMP_CONST) {
316 tmp = args[1];
317 args[1] = args[2];
318 args[2] = tmp;
319 }
320 break;
321 default:
322 break;
323 }
324
Aurelien Jarno56e49432012-09-06 16:47:13 +0200325 /* Simplify expression for "op r, a, 0 => mov r, a" cases */
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400326 switch (op) {
327 CASE_OP_32_64(add):
328 CASE_OP_32_64(sub):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400329 CASE_OP_32_64(shl):
330 CASE_OP_32_64(shr):
331 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700332 CASE_OP_32_64(rotl):
333 CASE_OP_32_64(rotr):
Aurelien Jarno38ee1882012-09-06 16:47:14 +0200334 CASE_OP_32_64(or):
335 CASE_OP_32_64(xor):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400336 if (temps[args[1]].state == TCG_TEMP_CONST) {
337 /* Proceed with possible constant folding. */
338 break;
339 }
340 if (temps[args[2]].state == TCG_TEMP_CONST
341 && temps[args[2]].val == 0) {
342 if ((temps[args[0]].state == TCG_TEMP_COPY
343 && temps[args[0]].val == args[1])
344 || args[0] == args[1]) {
345 args += 3;
346 gen_opc_buf[op_index] = INDEX_op_nop;
347 } else {
348 gen_opc_buf[op_index] = op_to_mov(op);
Blue Swirle31b0a72011-08-06 13:58:47 +0000349 tcg_opt_gen_mov(s, gen_args, args[0], args[1],
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400350 nb_temps, nb_globals);
351 gen_args += 2;
352 args += 3;
353 }
354 continue;
355 }
356 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200357 default:
358 break;
359 }
360
361 /* Simplify expression for "op r, a, 0 => movi r, 0" cases */
362 switch (op) {
Aurelien Jarno61251c02012-09-06 16:47:14 +0200363 CASE_OP_32_64(and):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400364 CASE_OP_32_64(mul):
365 if ((temps[args[2]].state == TCG_TEMP_CONST
366 && temps[args[2]].val == 0)) {
367 gen_opc_buf[op_index] = op_to_movi(op);
368 tcg_opt_gen_movi(gen_args, args[0], 0, nb_temps, nb_globals);
369 args += 3;
370 gen_args += 2;
371 continue;
372 }
373 break;
Aurelien Jarno56e49432012-09-06 16:47:13 +0200374 default:
375 break;
376 }
377
378 /* Simplify expression for "op r, a, a => mov r, a" cases */
379 switch (op) {
Kirill Batuzov9a810902011-07-07 16:37:15 +0400380 CASE_OP_32_64(or):
381 CASE_OP_32_64(and):
382 if (args[1] == args[2]) {
383 if (args[1] == args[0]) {
384 args += 3;
385 gen_opc_buf[op_index] = INDEX_op_nop;
386 } else {
387 gen_opc_buf[op_index] = op_to_mov(op);
Blue Swirle31b0a72011-08-06 13:58:47 +0000388 tcg_opt_gen_mov(s, gen_args, args[0], args[1], nb_temps,
Kirill Batuzov9a810902011-07-07 16:37:15 +0400389 nb_globals);
390 gen_args += 2;
391 args += 3;
392 }
393 continue;
394 }
395 break;
Blue Swirlfe0de7a2011-07-30 19:18:32 +0000396 default:
397 break;
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400398 }
399
Kirill Batuzov22613af2011-07-07 16:37:13 +0400400 /* Propagate constants through copy operations and do constant
401 folding. Constants will be substituted to arguments by register
402 allocator where needed and possible. Also detect copies. */
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400403 switch (op) {
Kirill Batuzov22613af2011-07-07 16:37:13 +0400404 CASE_OP_32_64(mov):
405 if ((temps[args[1]].state == TCG_TEMP_COPY
406 && temps[args[1]].val == args[0])
407 || args[0] == args[1]) {
408 args += 2;
409 gen_opc_buf[op_index] = INDEX_op_nop;
410 break;
411 }
412 if (temps[args[1]].state != TCG_TEMP_CONST) {
Blue Swirle31b0a72011-08-06 13:58:47 +0000413 tcg_opt_gen_mov(s, gen_args, args[0], args[1],
Kirill Batuzov22613af2011-07-07 16:37:13 +0400414 nb_temps, nb_globals);
415 gen_args += 2;
416 args += 2;
417 break;
418 }
419 /* Source argument is constant. Rewrite the operation and
420 let movi case handle it. */
421 op = op_to_movi(op);
422 gen_opc_buf[op_index] = op;
423 args[1] = temps[args[1]].val;
424 /* fallthrough */
425 CASE_OP_32_64(movi):
426 tcg_opt_gen_movi(gen_args, args[0], args[1], nb_temps, nb_globals);
427 gen_args += 2;
428 args += 2;
429 break;
Kirill Batuzova640f032011-07-07 16:37:17 +0400430 CASE_OP_32_64(not):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700431 CASE_OP_32_64(neg):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700432 CASE_OP_32_64(ext8s):
433 CASE_OP_32_64(ext8u):
434 CASE_OP_32_64(ext16s):
435 CASE_OP_32_64(ext16u):
Kirill Batuzova640f032011-07-07 16:37:17 +0400436 case INDEX_op_ext32s_i64:
437 case INDEX_op_ext32u_i64:
Kirill Batuzova640f032011-07-07 16:37:17 +0400438 if (temps[args[1]].state == TCG_TEMP_CONST) {
439 gen_opc_buf[op_index] = op_to_movi(op);
440 tmp = do_constant_folding(op, temps[args[1]].val, 0);
441 tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
442 gen_args += 2;
443 args += 2;
444 break;
445 } else {
446 reset_temp(args[0], nb_temps, nb_globals);
447 gen_args[0] = args[0];
448 gen_args[1] = args[1];
449 gen_args += 2;
450 args += 2;
451 break;
452 }
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400453 CASE_OP_32_64(add):
454 CASE_OP_32_64(sub):
455 CASE_OP_32_64(mul):
Kirill Batuzov9a810902011-07-07 16:37:15 +0400456 CASE_OP_32_64(or):
457 CASE_OP_32_64(and):
458 CASE_OP_32_64(xor):
Kirill Batuzov55c09752011-07-07 16:37:16 +0400459 CASE_OP_32_64(shl):
460 CASE_OP_32_64(shr):
461 CASE_OP_32_64(sar):
Richard Henderson25c4d9c2011-08-17 14:11:46 -0700462 CASE_OP_32_64(rotl):
463 CASE_OP_32_64(rotr):
Richard Hendersoncb25c802011-08-17 14:11:47 -0700464 CASE_OP_32_64(andc):
465 CASE_OP_32_64(orc):
466 CASE_OP_32_64(eqv):
467 CASE_OP_32_64(nand):
468 CASE_OP_32_64(nor):
Kirill Batuzov53108fb2011-07-07 16:37:14 +0400469 if (temps[args[1]].state == TCG_TEMP_CONST
470 && temps[args[2]].state == TCG_TEMP_CONST) {
471 gen_opc_buf[op_index] = op_to_movi(op);
472 tmp = do_constant_folding(op, temps[args[1]].val,
473 temps[args[2]].val);
474 tcg_opt_gen_movi(gen_args, args[0], tmp, nb_temps, nb_globals);
475 gen_args += 2;
476 args += 3;
477 break;
478 } else {
479 reset_temp(args[0], nb_temps, nb_globals);
480 gen_args[0] = args[0];
481 gen_args[1] = args[1];
482 gen_args[2] = args[2];
483 gen_args += 3;
484 args += 3;
485 break;
486 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400487 case INDEX_op_call:
Kirill Batuzov22613af2011-07-07 16:37:13 +0400488 nb_call_args = (args[0] >> 16) + (args[0] & 0xffff);
489 if (!(args[nb_call_args + 1] & (TCG_CALL_CONST | TCG_CALL_PURE))) {
490 for (i = 0; i < nb_globals; i++) {
491 reset_temp(i, nb_temps, nb_globals);
492 }
493 }
494 for (i = 0; i < (args[0] >> 16); i++) {
495 reset_temp(args[i + 1], nb_temps, nb_globals);
496 }
497 i = nb_call_args + 3;
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400498 while (i) {
499 *gen_args = *args;
500 args++;
501 gen_args++;
502 i--;
503 }
504 break;
505 case INDEX_op_set_label:
506 case INDEX_op_jmp:
507 case INDEX_op_br:
508 CASE_OP_32_64(brcond):
Kirill Batuzov22613af2011-07-07 16:37:13 +0400509 memset(temps, 0, nb_temps * sizeof(struct tcg_temp_info));
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400510 for (i = 0; i < def->nb_args; i++) {
511 *gen_args = *args;
512 args++;
513 gen_args++;
514 }
515 break;
516 default:
Kirill Batuzov22613af2011-07-07 16:37:13 +0400517 /* Default case: we do know nothing about operation so no
518 propagation is done. We only trash output args. */
519 for (i = 0; i < def->nb_oargs; i++) {
520 reset_temp(args[i], nb_temps, nb_globals);
521 }
Kirill Batuzov8f2e8c02011-07-07 16:37:12 +0400522 for (i = 0; i < def->nb_args; i++) {
523 gen_args[i] = args[i];
524 }
525 args += def->nb_args;
526 gen_args += def->nb_args;
527 break;
528 }
529 }
530
531 return gen_args;
532}
533
534TCGArg *tcg_optimize(TCGContext *s, uint16_t *tcg_opc_ptr,
535 TCGArg *args, TCGOpDef *tcg_op_defs)
536{
537 TCGArg *res;
538 res = tcg_constant_folding(s, tcg_opc_ptr, args, tcg_op_defs);
539 return res;
540}