Mercurial > hg > CbC > CbC_gcc
annotate gcc/tree-ssa-loop-im.c @ 47:3bfb6c00c1e0
update it from 4.4.2 to 4.4.3.
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Sun, 07 Feb 2010 17:44:34 +0900 |
parents | a06113de4d67 |
children | 77e2b8dfacca |
rev | line source |
---|---|
0 | 1 /* Loop invariant motion. |
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software | |
3 Foundation, Inc. | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify it | |
8 under the terms of the GNU General Public License as published by the | |
9 Free Software Foundation; either version 3, or (at your option) any | |
10 later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, but WITHOUT | |
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "tree.h" | |
26 #include "rtl.h" | |
27 #include "tm_p.h" | |
28 #include "hard-reg-set.h" | |
29 #include "basic-block.h" | |
30 #include "output.h" | |
31 #include "diagnostic.h" | |
32 #include "tree-flow.h" | |
33 #include "tree-dump.h" | |
34 #include "timevar.h" | |
35 #include "cfgloop.h" | |
36 #include "domwalk.h" | |
37 #include "params.h" | |
38 #include "tree-pass.h" | |
39 #include "flags.h" | |
40 #include "real.h" | |
41 #include "hashtab.h" | |
42 #include "tree-affine.h" | |
43 #include "pointer-set.h" | |
44 #include "tree-ssa-propagate.h" | |
45 | |
46 /* TODO: Support for predicated code motion. I.e. | |
47 | |
48 while (1) | |
49 { | |
50 if (cond) | |
51 { | |
52 a = inv; | |
53 something; | |
54 } | |
55 } | |
56 | |
57 Where COND and INV are is invariants, but evaluating INV may trap or be | |
58 invalid from some other reason if !COND. This may be transformed to | |
59 | |
60 if (cond) | |
61 a = inv; | |
62 while (1) | |
63 { | |
64 if (cond) | |
65 something; | |
66 } */ | |
67 | |
68 /* A type for the list of statements that have to be moved in order to be able | |
69 to hoist an invariant computation. */ | |
70 | |
71 struct depend | |
72 { | |
73 gimple stmt; | |
74 struct depend *next; | |
75 }; | |
76 | |
77 /* The auxiliary data kept for each statement. */ | |
78 | |
79 struct lim_aux_data | |
80 { | |
81 struct loop *max_loop; /* The outermost loop in that the statement | |
82 is invariant. */ | |
83 | |
84 struct loop *tgt_loop; /* The loop out of that we want to move the | |
85 invariant. */ | |
86 | |
87 struct loop *always_executed_in; | |
88 /* The outermost loop for that we are sure | |
89 the statement is executed if the loop | |
90 is entered. */ | |
91 | |
92 unsigned cost; /* Cost of the computation performed by the | |
93 statement. */ | |
94 | |
95 struct depend *depends; /* List of statements that must be also hoisted | |
96 out of the loop when this statement is | |
97 hoisted; i.e. those that define the operands | |
98 of the statement and are inside of the | |
99 MAX_LOOP loop. */ | |
100 }; | |
101 | |
102 /* Maps statements to their lim_aux_data. */ | |
103 | |
104 static struct pointer_map_t *lim_aux_data_map; | |
105 | |
106 /* Description of a memory reference location. */ | |
107 | |
108 typedef struct mem_ref_loc | |
109 { | |
110 tree *ref; /* The reference itself. */ | |
111 gimple stmt; /* The statement in that it occurs. */ | |
112 } *mem_ref_loc_p; | |
113 | |
114 DEF_VEC_P(mem_ref_loc_p); | |
115 DEF_VEC_ALLOC_P(mem_ref_loc_p, heap); | |
116 | |
117 /* The list of memory reference locations in a loop. */ | |
118 | |
119 typedef struct mem_ref_locs | |
120 { | |
121 VEC (mem_ref_loc_p, heap) *locs; | |
122 } *mem_ref_locs_p; | |
123 | |
124 DEF_VEC_P(mem_ref_locs_p); | |
125 DEF_VEC_ALLOC_P(mem_ref_locs_p, heap); | |
126 | |
127 /* Description of a memory reference. */ | |
128 | |
129 typedef struct mem_ref | |
130 { | |
131 tree mem; /* The memory itself. */ | |
132 unsigned id; /* ID assigned to the memory reference | |
133 (its index in memory_accesses.refs_list) */ | |
134 hashval_t hash; /* Its hash value. */ | |
135 bitmap stored; /* The set of loops in that this memory location | |
136 is stored to. */ | |
137 VEC (mem_ref_locs_p, heap) *accesses_in_loop; | |
138 /* The locations of the accesses. Vector | |
139 indexed by the loop number. */ | |
140 bitmap vops; /* Vops corresponding to this memory | |
141 location. */ | |
142 | |
143 /* The following sets are computed on demand. We keep both set and | |
144 its complement, so that we know whether the information was | |
145 already computed or not. */ | |
146 bitmap indep_loop; /* The set of loops in that the memory | |
147 reference is independent, meaning: | |
148 If it is stored in the loop, this store | |
149 is independent on all other loads and | |
150 stores. | |
151 If it is only loaded, then it is independent | |
152 on all stores in the loop. */ | |
153 bitmap dep_loop; /* The complement of INDEP_LOOP. */ | |
154 | |
155 bitmap indep_ref; /* The set of memory references on that | |
156 this reference is independent. */ | |
157 bitmap dep_ref; /* The complement of DEP_REF. */ | |
158 } *mem_ref_p; | |
159 | |
160 DEF_VEC_P(mem_ref_p); | |
161 DEF_VEC_ALLOC_P(mem_ref_p, heap); | |
162 | |
163 DEF_VEC_P(bitmap); | |
164 DEF_VEC_ALLOC_P(bitmap, heap); | |
165 | |
166 DEF_VEC_P(htab_t); | |
167 DEF_VEC_ALLOC_P(htab_t, heap); | |
168 | |
169 /* Description of memory accesses in loops. */ | |
170 | |
171 static struct | |
172 { | |
173 /* The hash table of memory references accessed in loops. */ | |
174 htab_t refs; | |
175 | |
176 /* The list of memory references. */ | |
177 VEC (mem_ref_p, heap) *refs_list; | |
178 | |
179 /* The set of memory references accessed in each loop. */ | |
180 VEC (bitmap, heap) *refs_in_loop; | |
181 | |
182 /* The set of memory references accessed in each loop, including | |
183 subloops. */ | |
184 VEC (bitmap, heap) *all_refs_in_loop; | |
185 | |
186 /* The set of virtual operands clobbered in a given loop. */ | |
187 VEC (bitmap, heap) *clobbered_vops; | |
188 | |
189 /* Map from the pair (loop, virtual operand) to the set of refs that | |
190 touch the virtual operand in the loop. */ | |
191 VEC (htab_t, heap) *vop_ref_map; | |
192 | |
193 /* Cache for expanding memory addresses. */ | |
194 struct pointer_map_t *ttae_cache; | |
195 } memory_accesses; | |
196 | |
197 static bool ref_indep_loop_p (struct loop *, mem_ref_p); | |
198 | |
199 /* Minimum cost of an expensive expression. */ | |
200 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE)) | |
201 | |
202 /* The outermost loop for that execution of the header guarantees that the | |
203 block will be executed. */ | |
204 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux) | |
205 | |
206 static struct lim_aux_data * | |
207 init_lim_data (gimple stmt) | |
208 { | |
209 void **p = pointer_map_insert (lim_aux_data_map, stmt); | |
210 | |
211 *p = XCNEW (struct lim_aux_data); | |
212 return (struct lim_aux_data *) *p; | |
213 } | |
214 | |
215 static struct lim_aux_data * | |
216 get_lim_data (gimple stmt) | |
217 { | |
218 void **p = pointer_map_contains (lim_aux_data_map, stmt); | |
219 if (!p) | |
220 return NULL; | |
221 | |
222 return (struct lim_aux_data *) *p; | |
223 } | |
224 | |
225 /* Releases the memory occupied by DATA. */ | |
226 | |
227 static void | |
228 free_lim_aux_data (struct lim_aux_data *data) | |
229 { | |
230 struct depend *dep, *next; | |
231 | |
232 for (dep = data->depends; dep; dep = next) | |
233 { | |
234 next = dep->next; | |
235 free (dep); | |
236 } | |
237 free (data); | |
238 } | |
239 | |
240 static void | |
241 clear_lim_data (gimple stmt) | |
242 { | |
243 void **p = pointer_map_contains (lim_aux_data_map, stmt); | |
244 if (!p) | |
245 return; | |
246 | |
247 free_lim_aux_data ((struct lim_aux_data *) *p); | |
248 *p = NULL; | |
249 } | |
250 | |
251 /* Calls CBCK for each index in memory reference ADDR_P. There are two | |
252 kinds situations handled; in each of these cases, the memory reference | |
253 and DATA are passed to the callback: | |
254 | |
255 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also | |
256 pass the pointer to the index to the callback. | |
257 | |
258 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the | |
259 pointer to addr to the callback. | |
260 | |
261 If the callback returns false, the whole search stops and false is returned. | |
262 Otherwise the function returns true after traversing through the whole | |
263 reference *ADDR_P. */ | |
264 | |
265 bool | |
266 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data) | |
267 { | |
268 tree *nxt, *idx; | |
269 | |
270 for (; ; addr_p = nxt) | |
271 { | |
272 switch (TREE_CODE (*addr_p)) | |
273 { | |
274 case SSA_NAME: | |
275 return cbck (*addr_p, addr_p, data); | |
276 | |
277 case MISALIGNED_INDIRECT_REF: | |
278 case ALIGN_INDIRECT_REF: | |
279 case INDIRECT_REF: | |
280 nxt = &TREE_OPERAND (*addr_p, 0); | |
281 return cbck (*addr_p, nxt, data); | |
282 | |
283 case BIT_FIELD_REF: | |
284 case VIEW_CONVERT_EXPR: | |
285 case REALPART_EXPR: | |
286 case IMAGPART_EXPR: | |
287 nxt = &TREE_OPERAND (*addr_p, 0); | |
288 break; | |
289 | |
290 case COMPONENT_REF: | |
291 /* If the component has varying offset, it behaves like index | |
292 as well. */ | |
293 idx = &TREE_OPERAND (*addr_p, 2); | |
294 if (*idx | |
295 && !cbck (*addr_p, idx, data)) | |
296 return false; | |
297 | |
298 nxt = &TREE_OPERAND (*addr_p, 0); | |
299 break; | |
300 | |
301 case ARRAY_REF: | |
302 case ARRAY_RANGE_REF: | |
303 nxt = &TREE_OPERAND (*addr_p, 0); | |
304 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data)) | |
305 return false; | |
306 break; | |
307 | |
308 case VAR_DECL: | |
309 case PARM_DECL: | |
310 case STRING_CST: | |
311 case RESULT_DECL: | |
312 case VECTOR_CST: | |
313 case COMPLEX_CST: | |
314 case INTEGER_CST: | |
315 case REAL_CST: | |
316 case FIXED_CST: | |
317 case CONSTRUCTOR: | |
318 return true; | |
319 | |
320 case ADDR_EXPR: | |
321 gcc_assert (is_gimple_min_invariant (*addr_p)); | |
322 return true; | |
323 | |
324 case TARGET_MEM_REF: | |
325 idx = &TMR_BASE (*addr_p); | |
326 if (*idx | |
327 && !cbck (*addr_p, idx, data)) | |
328 return false; | |
329 idx = &TMR_INDEX (*addr_p); | |
330 if (*idx | |
331 && !cbck (*addr_p, idx, data)) | |
332 return false; | |
333 return true; | |
334 | |
335 default: | |
336 gcc_unreachable (); | |
337 } | |
338 } | |
339 } | |
340 | |
341 /* If it is possible to hoist the statement STMT unconditionally, | |
342 returns MOVE_POSSIBLE. | |
343 If it is possible to hoist the statement STMT, but we must avoid making | |
344 it executed if it would not be executed in the original program (e.g. | |
345 because it may trap), return MOVE_PRESERVE_EXECUTION. | |
346 Otherwise return MOVE_IMPOSSIBLE. */ | |
347 | |
348 enum move_pos | |
349 movement_possibility (gimple stmt) | |
350 { | |
351 tree lhs; | |
352 enum move_pos ret = MOVE_POSSIBLE; | |
353 | |
354 if (flag_unswitch_loops | |
355 && gimple_code (stmt) == GIMPLE_COND) | |
356 { | |
357 /* If we perform unswitching, force the operands of the invariant | |
358 condition to be moved out of the loop. */ | |
359 return MOVE_POSSIBLE; | |
360 } | |
361 | |
362 if (gimple_get_lhs (stmt) == NULL_TREE) | |
363 return MOVE_IMPOSSIBLE; | |
364 | |
365 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS)) | |
366 return MOVE_IMPOSSIBLE; | |
367 | |
368 if (stmt_ends_bb_p (stmt) | |
369 || gimple_has_volatile_ops (stmt) | |
370 || gimple_has_side_effects (stmt) | |
371 || stmt_could_throw_p (stmt)) | |
372 return MOVE_IMPOSSIBLE; | |
373 | |
374 if (is_gimple_call (stmt)) | |
375 { | |
376 /* While pure or const call is guaranteed to have no side effects, we | |
377 cannot move it arbitrarily. Consider code like | |
378 | |
379 char *s = something (); | |
380 | |
381 while (1) | |
382 { | |
383 if (s) | |
384 t = strlen (s); | |
385 else | |
386 t = 0; | |
387 } | |
388 | |
389 Here the strlen call cannot be moved out of the loop, even though | |
390 s is invariant. In addition to possibly creating a call with | |
391 invalid arguments, moving out a function call that is not executed | |
392 may cause performance regressions in case the call is costly and | |
393 not executed at all. */ | |
394 ret = MOVE_PRESERVE_EXECUTION; | |
395 lhs = gimple_call_lhs (stmt); | |
396 } | |
397 else if (is_gimple_assign (stmt)) | |
398 lhs = gimple_assign_lhs (stmt); | |
399 else | |
400 return MOVE_IMPOSSIBLE; | |
401 | |
402 if (TREE_CODE (lhs) == SSA_NAME | |
403 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
404 return MOVE_IMPOSSIBLE; | |
405 | |
406 if (TREE_CODE (lhs) != SSA_NAME | |
407 || gimple_could_trap_p (stmt)) | |
408 return MOVE_PRESERVE_EXECUTION; | |
409 | |
410 return ret; | |
411 } | |
412 | |
413 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost | |
414 loop to that we could move the expression using DEF if it did not have | |
415 other operands, i.e. the outermost loop enclosing LOOP in that the value | |
416 of DEF is invariant. */ | |
417 | |
418 static struct loop * | |
419 outermost_invariant_loop (tree def, struct loop *loop) | |
420 { | |
421 gimple def_stmt; | |
422 basic_block def_bb; | |
423 struct loop *max_loop; | |
424 struct lim_aux_data *lim_data; | |
425 | |
426 if (!def) | |
427 return superloop_at_depth (loop, 1); | |
428 | |
429 if (TREE_CODE (def) != SSA_NAME) | |
430 { | |
431 gcc_assert (is_gimple_min_invariant (def)); | |
432 return superloop_at_depth (loop, 1); | |
433 } | |
434 | |
435 def_stmt = SSA_NAME_DEF_STMT (def); | |
436 def_bb = gimple_bb (def_stmt); | |
437 if (!def_bb) | |
438 return superloop_at_depth (loop, 1); | |
439 | |
440 max_loop = find_common_loop (loop, def_bb->loop_father); | |
441 | |
442 lim_data = get_lim_data (def_stmt); | |
443 if (lim_data != NULL && lim_data->max_loop != NULL) | |
444 max_loop = find_common_loop (max_loop, | |
445 loop_outer (lim_data->max_loop)); | |
446 if (max_loop == loop) | |
447 return NULL; | |
448 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1); | |
449 | |
450 return max_loop; | |
451 } | |
452 | |
453 /* DATA is a structure containing information associated with a statement | |
454 inside LOOP. DEF is one of the operands of this statement. | |
455 | |
456 Find the outermost loop enclosing LOOP in that value of DEF is invariant | |
457 and record this in DATA->max_loop field. If DEF itself is defined inside | |
458 this loop as well (i.e. we need to hoist it out of the loop if we want | |
459 to hoist the statement represented by DATA), record the statement in that | |
460 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true, | |
461 add the cost of the computation of DEF to the DATA->cost. | |
462 | |
463 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */ | |
464 | |
465 static bool | |
466 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop, | |
467 bool add_cost) | |
468 { | |
469 gimple def_stmt = SSA_NAME_DEF_STMT (def); | |
470 basic_block def_bb = gimple_bb (def_stmt); | |
471 struct loop *max_loop; | |
472 struct depend *dep; | |
473 struct lim_aux_data *def_data; | |
474 | |
475 if (!def_bb) | |
476 return true; | |
477 | |
478 max_loop = outermost_invariant_loop (def, loop); | |
479 if (!max_loop) | |
480 return false; | |
481 | |
482 if (flow_loop_nested_p (data->max_loop, max_loop)) | |
483 data->max_loop = max_loop; | |
484 | |
485 def_data = get_lim_data (def_stmt); | |
486 if (!def_data) | |
487 return true; | |
488 | |
489 if (add_cost | |
490 /* Only add the cost if the statement defining DEF is inside LOOP, | |
491 i.e. if it is likely that by moving the invariants dependent | |
492 on it, we will be able to avoid creating a new register for | |
493 it (since it will be only used in these dependent invariants). */ | |
494 && def_bb->loop_father == loop) | |
495 data->cost += def_data->cost; | |
496 | |
497 dep = XNEW (struct depend); | |
498 dep->stmt = def_stmt; | |
499 dep->next = data->depends; | |
500 data->depends = dep; | |
501 | |
502 return true; | |
503 } | |
504 | |
505 /* Returns an estimate for a cost of statement STMT. TODO -- the values here | |
506 are just ad-hoc constants. The estimates should be based on target-specific | |
507 values. */ | |
508 | |
509 static unsigned | |
510 stmt_cost (gimple stmt) | |
511 { | |
512 tree fndecl; | |
513 unsigned cost = 1; | |
514 | |
515 /* Always try to create possibilities for unswitching. */ | |
516 if (gimple_code (stmt) == GIMPLE_COND) | |
517 return LIM_EXPENSIVE; | |
518 | |
519 /* Hoisting memory references out should almost surely be a win. */ | |
520 if (gimple_references_memory_p (stmt)) | |
521 cost += 20; | |
522 | |
523 if (is_gimple_call (stmt)) | |
524 { | |
525 /* We should be hoisting calls if possible. */ | |
526 | |
527 /* Unless the call is a builtin_constant_p; this always folds to a | |
528 constant, so moving it is useless. */ | |
529 fndecl = gimple_call_fndecl (stmt); | |
530 if (fndecl | |
531 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
532 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P) | |
533 return 0; | |
534 | |
535 return cost + 20; | |
536 } | |
537 | |
538 if (gimple_code (stmt) != GIMPLE_ASSIGN) | |
539 return cost; | |
540 | |
541 switch (gimple_assign_rhs_code (stmt)) | |
542 { | |
543 case MULT_EXPR: | |
544 case TRUNC_DIV_EXPR: | |
545 case CEIL_DIV_EXPR: | |
546 case FLOOR_DIV_EXPR: | |
547 case ROUND_DIV_EXPR: | |
548 case EXACT_DIV_EXPR: | |
549 case CEIL_MOD_EXPR: | |
550 case FLOOR_MOD_EXPR: | |
551 case ROUND_MOD_EXPR: | |
552 case TRUNC_MOD_EXPR: | |
553 case RDIV_EXPR: | |
554 /* Division and multiplication are usually expensive. */ | |
555 cost += 20; | |
556 break; | |
557 | |
558 case LSHIFT_EXPR: | |
559 case RSHIFT_EXPR: | |
560 cost += 20; | |
561 break; | |
562 | |
563 default: | |
564 break; | |
565 } | |
566 | |
567 return cost; | |
568 } | |
569 | |
570 /* Finds the outermost loop between OUTER and LOOP in that the memory reference | |
571 REF is independent. If REF is not independent in LOOP, NULL is returned | |
572 instead. */ | |
573 | |
574 static struct loop * | |
575 outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref) | |
576 { | |
577 struct loop *aloop; | |
578 | |
579 if (bitmap_bit_p (ref->stored, loop->num)) | |
580 return NULL; | |
581 | |
582 for (aloop = outer; | |
583 aloop != loop; | |
584 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1)) | |
585 if (!bitmap_bit_p (ref->stored, aloop->num) | |
586 && ref_indep_loop_p (aloop, ref)) | |
587 return aloop; | |
588 | |
589 if (ref_indep_loop_p (loop, ref)) | |
590 return loop; | |
591 else | |
592 return NULL; | |
593 } | |
594 | |
595 /* If there is a simple load or store to a memory reference in STMT, returns | |
596 the location of the memory reference, and sets IS_STORE according to whether | |
597 it is a store or load. Otherwise, returns NULL. */ | |
598 | |
599 static tree * | |
600 simple_mem_ref_in_stmt (gimple stmt, bool *is_store) | |
601 { | |
602 tree *lhs; | |
603 enum tree_code code; | |
604 | |
605 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */ | |
606 if (gimple_code (stmt) != GIMPLE_ASSIGN) | |
607 return NULL; | |
608 | |
609 code = gimple_assign_rhs_code (stmt); | |
610 | |
611 lhs = gimple_assign_lhs_ptr (stmt); | |
612 | |
613 if (TREE_CODE (*lhs) == SSA_NAME) | |
614 { | |
615 if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS | |
616 || !is_gimple_addressable (gimple_assign_rhs1 (stmt))) | |
617 return NULL; | |
618 | |
619 *is_store = false; | |
620 return gimple_assign_rhs1_ptr (stmt); | |
621 } | |
622 else if (code == SSA_NAME | |
623 || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS | |
624 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))) | |
625 { | |
626 *is_store = true; | |
627 return lhs; | |
628 } | |
629 else | |
630 return NULL; | |
631 } | |
632 | |
633 /* Returns the memory reference contained in STMT. */ | |
634 | |
635 static mem_ref_p | |
636 mem_ref_in_stmt (gimple stmt) | |
637 { | |
638 bool store; | |
639 tree *mem = simple_mem_ref_in_stmt (stmt, &store); | |
640 hashval_t hash; | |
641 mem_ref_p ref; | |
642 | |
643 if (!mem) | |
644 return NULL; | |
645 gcc_assert (!store); | |
646 | |
647 hash = iterative_hash_expr (*mem, 0); | |
648 ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash); | |
649 | |
650 gcc_assert (ref != NULL); | |
651 return ref; | |
652 } | |
653 | |
654 /* Determine the outermost loop to that it is possible to hoist a statement | |
655 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine | |
656 the outermost loop in that the value computed by STMT is invariant. | |
657 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that | |
658 we preserve the fact whether STMT is executed. It also fills other related | |
659 information to LIM_DATA (STMT). | |
660 | |
661 The function returns false if STMT cannot be hoisted outside of the loop it | |
662 is defined in, and true otherwise. */ | |
663 | |
664 static bool | |
665 determine_max_movement (gimple stmt, bool must_preserve_exec) | |
666 { | |
667 basic_block bb = gimple_bb (stmt); | |
668 struct loop *loop = bb->loop_father; | |
669 struct loop *level; | |
670 struct lim_aux_data *lim_data = get_lim_data (stmt); | |
671 tree val; | |
672 ssa_op_iter iter; | |
673 | |
674 if (must_preserve_exec) | |
675 level = ALWAYS_EXECUTED_IN (bb); | |
676 else | |
677 level = superloop_at_depth (loop, 1); | |
678 lim_data->max_loop = level; | |
679 | |
680 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE) | |
681 if (!add_dependency (val, lim_data, loop, true)) | |
682 return false; | |
683 | |
684 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)) | |
685 { | |
686 mem_ref_p ref = mem_ref_in_stmt (stmt); | |
687 | |
688 if (ref) | |
689 { | |
690 lim_data->max_loop | |
691 = outermost_indep_loop (lim_data->max_loop, loop, ref); | |
692 if (!lim_data->max_loop) | |
693 return false; | |
694 } | |
695 else | |
696 { | |
697 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES) | |
698 { | |
699 if (!add_dependency (val, lim_data, loop, false)) | |
700 return false; | |
701 } | |
702 } | |
703 } | |
704 | |
705 lim_data->cost += stmt_cost (stmt); | |
706 | |
707 return true; | |
708 } | |
709 | |
710 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL, | |
711 and that one of the operands of this statement is computed by STMT. | |
712 Ensure that STMT (together with all the statements that define its | |
713 operands) is hoisted at least out of the loop LEVEL. */ | |
714 | |
715 static void | |
716 set_level (gimple stmt, struct loop *orig_loop, struct loop *level) | |
717 { | |
718 struct loop *stmt_loop = gimple_bb (stmt)->loop_father; | |
719 struct depend *dep; | |
720 struct lim_aux_data *lim_data; | |
721 | |
722 stmt_loop = find_common_loop (orig_loop, stmt_loop); | |
723 lim_data = get_lim_data (stmt); | |
724 if (lim_data != NULL && lim_data->tgt_loop != NULL) | |
725 stmt_loop = find_common_loop (stmt_loop, | |
726 loop_outer (lim_data->tgt_loop)); | |
727 if (flow_loop_nested_p (stmt_loop, level)) | |
728 return; | |
729 | |
730 gcc_assert (level == lim_data->max_loop | |
731 || flow_loop_nested_p (lim_data->max_loop, level)); | |
732 | |
733 lim_data->tgt_loop = level; | |
734 for (dep = lim_data->depends; dep; dep = dep->next) | |
735 set_level (dep->stmt, orig_loop, level); | |
736 } | |
737 | |
738 /* Determines an outermost loop from that we want to hoist the statement STMT. | |
739 For now we chose the outermost possible loop. TODO -- use profiling | |
740 information to set it more sanely. */ | |
741 | |
742 static void | |
743 set_profitable_level (gimple stmt) | |
744 { | |
745 set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop); | |
746 } | |
747 | |
748 /* Returns true if STMT is a call that has side effects. */ | |
749 | |
750 static bool | |
751 nonpure_call_p (gimple stmt) | |
752 { | |
753 if (gimple_code (stmt) != GIMPLE_CALL) | |
754 return false; | |
755 | |
756 return gimple_has_side_effects (stmt); | |
757 } | |
758 | |
759 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */ | |
760 | |
761 static gimple | |
762 rewrite_reciprocal (gimple_stmt_iterator *bsi) | |
763 { | |
764 gimple stmt, stmt1, stmt2; | |
765 tree var, name, lhs, type; | |
766 tree real_one; | |
767 | |
768 stmt = gsi_stmt (*bsi); | |
769 lhs = gimple_assign_lhs (stmt); | |
770 type = TREE_TYPE (lhs); | |
771 | |
772 var = create_tmp_var (type, "reciptmp"); | |
773 add_referenced_var (var); | |
774 DECL_GIMPLE_REG_P (var) = 1; | |
775 | |
776 /* For vectors, create a VECTOR_CST full of 1's. */ | |
777 if (TREE_CODE (type) == VECTOR_TYPE) | |
778 { | |
779 int i, len; | |
780 tree list = NULL_TREE; | |
781 real_one = build_real (TREE_TYPE (type), dconst1); | |
782 len = TYPE_VECTOR_SUBPARTS (type); | |
783 for (i = 0; i < len; i++) | |
784 list = tree_cons (NULL, real_one, list); | |
785 real_one = build_vector (type, list); | |
786 } | |
787 else | |
788 real_one = build_real (type, dconst1); | |
789 | |
790 stmt1 = gimple_build_assign_with_ops (RDIV_EXPR, | |
791 var, real_one, gimple_assign_rhs2 (stmt)); | |
792 name = make_ssa_name (var, stmt1); | |
793 gimple_assign_set_lhs (stmt1, name); | |
794 | |
795 stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name, | |
796 gimple_assign_rhs1 (stmt)); | |
797 | |
798 /* Replace division stmt with reciprocal and multiply stmts. | |
799 The multiply stmt is not invariant, so update iterator | |
800 and avoid rescanning. */ | |
801 gsi_replace (bsi, stmt1, true); | |
802 gsi_insert_after (bsi, stmt2, GSI_NEW_STMT); | |
803 | |
804 /* Continue processing with invariant reciprocal statement. */ | |
805 return stmt1; | |
806 } | |
807 | |
808 /* Check if the pattern at *BSI is a bittest of the form | |
809 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */ | |
810 | |
811 static gimple | |
812 rewrite_bittest (gimple_stmt_iterator *bsi) | |
813 { | |
814 gimple stmt, use_stmt, stmt1, stmt2; | |
815 tree lhs, var, name, t, a, b; | |
816 use_operand_p use; | |
817 | |
818 stmt = gsi_stmt (*bsi); | |
819 lhs = gimple_assign_lhs (stmt); | |
820 | |
821 /* Verify that the single use of lhs is a comparison against zero. */ | |
822 if (TREE_CODE (lhs) != SSA_NAME | |
823 || !single_imm_use (lhs, &use, &use_stmt) | |
824 || gimple_code (use_stmt) != GIMPLE_COND) | |
825 return stmt; | |
826 if (gimple_cond_lhs (use_stmt) != lhs | |
827 || (gimple_cond_code (use_stmt) != NE_EXPR | |
828 && gimple_cond_code (use_stmt) != EQ_EXPR) | |
829 || !integer_zerop (gimple_cond_rhs (use_stmt))) | |
830 return stmt; | |
831 | |
832 /* Get at the operands of the shift. The rhs is TMP1 & 1. */ | |
833 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); | |
834 if (gimple_code (stmt1) != GIMPLE_ASSIGN) | |
835 return stmt; | |
836 | |
837 /* There is a conversion in between possibly inserted by fold. */ | |
838 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1))) | |
839 { | |
840 t = gimple_assign_rhs1 (stmt1); | |
841 if (TREE_CODE (t) != SSA_NAME | |
842 || !has_single_use (t)) | |
843 return stmt; | |
844 stmt1 = SSA_NAME_DEF_STMT (t); | |
845 if (gimple_code (stmt1) != GIMPLE_ASSIGN) | |
846 return stmt; | |
847 } | |
848 | |
849 /* Verify that B is loop invariant but A is not. Verify that with | |
850 all the stmt walking we are still in the same loop. */ | |
851 if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR | |
852 || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt)) | |
853 return stmt; | |
854 | |
855 a = gimple_assign_rhs1 (stmt1); | |
856 b = gimple_assign_rhs2 (stmt1); | |
857 | |
858 if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL | |
859 && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL) | |
860 { | |
861 /* 1 << B */ | |
862 var = create_tmp_var (TREE_TYPE (a), "shifttmp"); | |
863 add_referenced_var (var); | |
864 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a), | |
865 build_int_cst (TREE_TYPE (a), 1), b); | |
866 stmt1 = gimple_build_assign (var, t); | |
867 name = make_ssa_name (var, stmt1); | |
868 gimple_assign_set_lhs (stmt1, name); | |
869 | |
870 /* A & (1 << B) */ | |
871 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name); | |
872 stmt2 = gimple_build_assign (var, t); | |
873 name = make_ssa_name (var, stmt2); | |
874 gimple_assign_set_lhs (stmt2, name); | |
875 | |
876 /* Replace the SSA_NAME we compare against zero. Adjust | |
877 the type of zero accordingly. */ | |
878 SET_USE (use, name); | |
879 gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0)); | |
880 | |
881 gsi_insert_before (bsi, stmt1, GSI_SAME_STMT); | |
882 gsi_replace (bsi, stmt2, true); | |
883 | |
884 return stmt1; | |
885 } | |
886 | |
887 return stmt; | |
888 } | |
889 | |
890 | |
891 /* Determine the outermost loops in that statements in basic block BB are | |
892 invariant, and record them to the LIM_DATA associated with the statements. | |
893 Callback for walk_dominator_tree. */ | |
894 | |
895 static void | |
896 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED, | |
897 basic_block bb) | |
898 { | |
899 enum move_pos pos; | |
900 gimple_stmt_iterator bsi; | |
901 gimple stmt; | |
902 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL; | |
903 struct loop *outermost = ALWAYS_EXECUTED_IN (bb); | |
904 struct lim_aux_data *lim_data; | |
905 | |
906 if (!loop_outer (bb->loop_father)) | |
907 return; | |
908 | |
909 if (dump_file && (dump_flags & TDF_DETAILS)) | |
910 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n", | |
911 bb->index, bb->loop_father->num, loop_depth (bb->loop_father)); | |
912 | |
913 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | |
914 { | |
915 stmt = gsi_stmt (bsi); | |
916 | |
917 pos = movement_possibility (stmt); | |
918 if (pos == MOVE_IMPOSSIBLE) | |
919 { | |
920 if (nonpure_call_p (stmt)) | |
921 { | |
922 maybe_never = true; | |
923 outermost = NULL; | |
924 } | |
925 /* Make sure to note always_executed_in for stores to make | |
926 store-motion work. */ | |
927 else if (stmt_makes_single_store (stmt)) | |
928 { | |
929 struct lim_aux_data *lim_data = init_lim_data (stmt); | |
930 lim_data->always_executed_in = outermost; | |
931 } | |
932 continue; | |
933 } | |
934 | |
935 if (is_gimple_assign (stmt) | |
936 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
937 == GIMPLE_BINARY_RHS)) | |
938 { | |
939 tree op0 = gimple_assign_rhs1 (stmt); | |
940 tree op1 = gimple_assign_rhs2 (stmt); | |
941 struct loop *ol1 = outermost_invariant_loop (op1, | |
942 loop_containing_stmt (stmt)); | |
943 | |
944 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal | |
945 to be hoisted out of loop, saving expensive divide. */ | |
946 if (pos == MOVE_POSSIBLE | |
947 && gimple_assign_rhs_code (stmt) == RDIV_EXPR | |
948 && flag_unsafe_math_optimizations | |
949 && !flag_trapping_math | |
950 && ol1 != NULL | |
951 && outermost_invariant_loop (op0, ol1) == NULL) | |
952 stmt = rewrite_reciprocal (&bsi); | |
953 | |
954 /* If the shift count is invariant, convert (A >> B) & 1 to | |
955 A & (1 << B) allowing the bit mask to be hoisted out of the loop | |
956 saving an expensive shift. */ | |
957 if (pos == MOVE_POSSIBLE | |
958 && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR | |
959 && integer_onep (op1) | |
960 && TREE_CODE (op0) == SSA_NAME | |
961 && has_single_use (op0)) | |
962 stmt = rewrite_bittest (&bsi); | |
963 } | |
964 | |
965 lim_data = init_lim_data (stmt); | |
966 lim_data->always_executed_in = outermost; | |
967 | |
968 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION) | |
969 continue; | |
970 | |
971 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION)) | |
972 { | |
973 lim_data->max_loop = NULL; | |
974 continue; | |
975 } | |
976 | |
977 if (dump_file && (dump_flags & TDF_DETAILS)) | |
978 { | |
979 print_gimple_stmt (dump_file, stmt, 2, 0); | |
980 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n", | |
981 loop_depth (lim_data->max_loop), | |
982 lim_data->cost); | |
983 } | |
984 | |
985 if (lim_data->cost >= LIM_EXPENSIVE) | |
986 set_profitable_level (stmt); | |
987 } | |
988 } | |
989 | |
990 /* For each statement determines the outermost loop in that it is invariant, | |
991 statements on whose motion it depends and the cost of the computation. | |
992 This information is stored to the LIM_DATA structure associated with | |
993 each statement. */ | |
994 | |
995 static void | |
996 determine_invariantness (void) | |
997 { | |
998 struct dom_walk_data walk_data; | |
999 | |
1000 memset (&walk_data, 0, sizeof (struct dom_walk_data)); | |
1001 walk_data.dom_direction = CDI_DOMINATORS; | |
1002 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt; | |
1003 | |
1004 init_walk_dominator_tree (&walk_data); | |
1005 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR); | |
1006 fini_walk_dominator_tree (&walk_data); | |
1007 } | |
1008 | |
1009 /* Hoist the statements in basic block BB out of the loops prescribed by | |
1010 data stored in LIM_DATA structures associated with each statement. Callback | |
1011 for walk_dominator_tree. */ | |
1012 | |
1013 static void | |
1014 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED, | |
1015 basic_block bb) | |
1016 { | |
1017 struct loop *level; | |
1018 gimple_stmt_iterator bsi; | |
1019 gimple stmt; | |
1020 unsigned cost = 0; | |
1021 struct lim_aux_data *lim_data; | |
1022 | |
1023 if (!loop_outer (bb->loop_father)) | |
1024 return; | |
1025 | |
1026 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); ) | |
1027 { | |
1028 stmt = gsi_stmt (bsi); | |
1029 | |
1030 lim_data = get_lim_data (stmt); | |
1031 if (lim_data == NULL) | |
1032 { | |
1033 gsi_next (&bsi); | |
1034 continue; | |
1035 } | |
1036 | |
1037 cost = lim_data->cost; | |
1038 level = lim_data->tgt_loop; | |
1039 clear_lim_data (stmt); | |
1040 | |
1041 if (!level) | |
1042 { | |
1043 gsi_next (&bsi); | |
1044 continue; | |
1045 } | |
1046 | |
1047 /* We do not really want to move conditionals out of the loop; we just | |
1048 placed it here to force its operands to be moved if necessary. */ | |
1049 if (gimple_code (stmt) == GIMPLE_COND) | |
1050 continue; | |
1051 | |
1052 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1053 { | |
1054 fprintf (dump_file, "Moving statement\n"); | |
1055 print_gimple_stmt (dump_file, stmt, 0, 0); | |
1056 fprintf (dump_file, "(cost %u) out of loop %d.\n\n", | |
1057 cost, level->num); | |
1058 } | |
1059 | |
1060 mark_virtual_ops_for_renaming (stmt); | |
1061 gsi_insert_on_edge (loop_preheader_edge (level), stmt); | |
1062 gsi_remove (&bsi, false); | |
1063 } | |
1064 } | |
1065 | |
1066 /* Hoist the statements out of the loops prescribed by data stored in | |
1067 LIM_DATA structures associated with each statement.*/ | |
1068 | |
1069 static void | |
1070 move_computations (void) | |
1071 { | |
1072 struct dom_walk_data walk_data; | |
1073 | |
1074 memset (&walk_data, 0, sizeof (struct dom_walk_data)); | |
1075 walk_data.dom_direction = CDI_DOMINATORS; | |
1076 walk_data.before_dom_children_before_stmts = move_computations_stmt; | |
1077 | |
1078 init_walk_dominator_tree (&walk_data); | |
1079 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR); | |
1080 fini_walk_dominator_tree (&walk_data); | |
1081 | |
1082 gsi_commit_edge_inserts (); | |
1083 if (need_ssa_update_p ()) | |
1084 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); | |
1085 } | |
1086 | |
1087 /* Checks whether the statement defining variable *INDEX can be hoisted | |
1088 out of the loop passed in DATA. Callback for for_each_index. */ | |
1089 | |
1090 static bool | |
1091 may_move_till (tree ref, tree *index, void *data) | |
1092 { | |
1093 struct loop *loop = (struct loop *) data, *max_loop; | |
1094 | |
1095 /* If REF is an array reference, check also that the step and the lower | |
1096 bound is invariant in LOOP. */ | |
1097 if (TREE_CODE (ref) == ARRAY_REF) | |
1098 { | |
1099 tree step = TREE_OPERAND (ref, 3); | |
1100 tree lbound = TREE_OPERAND (ref, 2); | |
1101 | |
1102 max_loop = outermost_invariant_loop (step, loop); | |
1103 if (!max_loop) | |
1104 return false; | |
1105 | |
1106 max_loop = outermost_invariant_loop (lbound, loop); | |
1107 if (!max_loop) | |
1108 return false; | |
1109 } | |
1110 | |
1111 max_loop = outermost_invariant_loop (*index, loop); | |
1112 if (!max_loop) | |
1113 return false; | |
1114 | |
1115 return true; | |
1116 } | |
1117 | |
1118 /* If OP is SSA NAME, force the statement that defines it to be | |
1119 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */ | |
1120 | |
1121 static void | |
1122 force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop) | |
1123 { | |
1124 gimple stmt; | |
1125 | |
1126 if (!op | |
1127 || is_gimple_min_invariant (op)) | |
1128 return; | |
1129 | |
1130 gcc_assert (TREE_CODE (op) == SSA_NAME); | |
1131 | |
1132 stmt = SSA_NAME_DEF_STMT (op); | |
1133 if (gimple_nop_p (stmt)) | |
1134 return; | |
1135 | |
1136 set_level (stmt, orig_loop, loop); | |
1137 } | |
1138 | |
1139 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of | |
1140 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for | |
1141 for_each_index. */ | |
1142 | |
1143 struct fmt_data | |
1144 { | |
1145 struct loop *loop; | |
1146 struct loop *orig_loop; | |
1147 }; | |
1148 | |
1149 static bool | |
1150 force_move_till (tree ref, tree *index, void *data) | |
1151 { | |
1152 struct fmt_data *fmt_data = (struct fmt_data *) data; | |
1153 | |
1154 if (TREE_CODE (ref) == ARRAY_REF) | |
1155 { | |
1156 tree step = TREE_OPERAND (ref, 3); | |
1157 tree lbound = TREE_OPERAND (ref, 2); | |
1158 | |
1159 force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop); | |
1160 force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop); | |
1161 } | |
1162 | |
1163 force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop); | |
1164 | |
1165 return true; | |
1166 } | |
1167 | |
1168 /* A hash function for struct mem_ref object OBJ. */ | |
1169 | |
1170 static hashval_t | |
1171 memref_hash (const void *obj) | |
1172 { | |
1173 const struct mem_ref *const mem = (const struct mem_ref *) obj; | |
1174 | |
1175 return mem->hash; | |
1176 } | |
1177 | |
1178 /* An equality function for struct mem_ref object OBJ1 with | |
1179 memory reference OBJ2. */ | |
1180 | |
1181 static int | |
1182 memref_eq (const void *obj1, const void *obj2) | |
1183 { | |
1184 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1; | |
1185 | |
1186 return operand_equal_p (mem1->mem, (const_tree) obj2, 0); | |
1187 } | |
1188 | |
1189 /* Releases list of memory reference locations ACCS. */ | |
1190 | |
1191 static void | |
1192 free_mem_ref_locs (mem_ref_locs_p accs) | |
1193 { | |
1194 unsigned i; | |
1195 mem_ref_loc_p loc; | |
1196 | |
1197 if (!accs) | |
1198 return; | |
1199 | |
1200 for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++) | |
1201 free (loc); | |
1202 VEC_free (mem_ref_loc_p, heap, accs->locs); | |
1203 free (accs); | |
1204 } | |
1205 | |
1206 /* A function to free the mem_ref object OBJ. */ | |
1207 | |
1208 static void | |
1209 memref_free (void *obj) | |
1210 { | |
1211 struct mem_ref *const mem = (struct mem_ref *) obj; | |
1212 unsigned i; | |
1213 mem_ref_locs_p accs; | |
1214 | |
1215 BITMAP_FREE (mem->stored); | |
1216 BITMAP_FREE (mem->indep_loop); | |
1217 BITMAP_FREE (mem->dep_loop); | |
1218 BITMAP_FREE (mem->indep_ref); | |
1219 BITMAP_FREE (mem->dep_ref); | |
1220 | |
1221 for (i = 0; VEC_iterate (mem_ref_locs_p, mem->accesses_in_loop, i, accs); i++) | |
1222 free_mem_ref_locs (accs); | |
1223 VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop); | |
1224 | |
1225 BITMAP_FREE (mem->vops); | |
1226 free (mem); | |
1227 } | |
1228 | |
1229 /* Allocates and returns a memory reference description for MEM whose hash | |
1230 value is HASH and id is ID. */ | |
1231 | |
1232 static mem_ref_p | |
1233 mem_ref_alloc (tree mem, unsigned hash, unsigned id) | |
1234 { | |
1235 mem_ref_p ref = XNEW (struct mem_ref); | |
1236 ref->mem = mem; | |
1237 ref->id = id; | |
1238 ref->hash = hash; | |
1239 ref->stored = BITMAP_ALLOC (NULL); | |
1240 ref->indep_loop = BITMAP_ALLOC (NULL); | |
1241 ref->dep_loop = BITMAP_ALLOC (NULL); | |
1242 ref->indep_ref = BITMAP_ALLOC (NULL); | |
1243 ref->dep_ref = BITMAP_ALLOC (NULL); | |
1244 ref->accesses_in_loop = NULL; | |
1245 ref->vops = BITMAP_ALLOC (NULL); | |
1246 | |
1247 return ref; | |
1248 } | |
1249 | |
1250 /* Allocates and returns the new list of locations. */ | |
1251 | |
1252 static mem_ref_locs_p | |
1253 mem_ref_locs_alloc (void) | |
1254 { | |
1255 mem_ref_locs_p accs = XNEW (struct mem_ref_locs); | |
1256 accs->locs = NULL; | |
1257 return accs; | |
1258 } | |
1259 | |
1260 /* Records memory reference location *LOC in LOOP to the memory reference | |
1261 description REF. The reference occurs in statement STMT. */ | |
1262 | |
1263 static void | |
1264 record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc) | |
1265 { | |
1266 mem_ref_loc_p aref = XNEW (struct mem_ref_loc); | |
1267 mem_ref_locs_p accs; | |
1268 bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1269 | |
1270 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop) | |
1271 <= (unsigned) loop->num) | |
1272 VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop, | |
1273 loop->num + 1); | |
1274 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num); | |
1275 if (!accs) | |
1276 { | |
1277 accs = mem_ref_locs_alloc (); | |
1278 VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs); | |
1279 } | |
1280 | |
1281 aref->stmt = stmt; | |
1282 aref->ref = loc; | |
1283 | |
1284 VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref); | |
1285 bitmap_set_bit (ril, ref->id); | |
1286 } | |
1287 | |
1288 /* Marks reference REF as stored in LOOP. */ | |
1289 | |
1290 static void | |
1291 mark_ref_stored (mem_ref_p ref, struct loop *loop) | |
1292 { | |
1293 for (; | |
1294 loop != current_loops->tree_root | |
1295 && !bitmap_bit_p (ref->stored, loop->num); | |
1296 loop = loop_outer (loop)) | |
1297 bitmap_set_bit (ref->stored, loop->num); | |
1298 } | |
1299 | |
1300 /* Gathers memory references in statement STMT in LOOP, storing the | |
1301 information about them in the memory_accesses structure. Marks | |
1302 the vops accessed through unrecognized statements there as | |
1303 well. */ | |
1304 | |
1305 static void | |
1306 gather_mem_refs_stmt (struct loop *loop, gimple stmt) | |
1307 { | |
1308 tree *mem = NULL; | |
1309 hashval_t hash; | |
1310 PTR *slot; | |
1311 mem_ref_p ref; | |
1312 ssa_op_iter oi; | |
1313 tree vname; | |
1314 bool is_stored; | |
1315 bitmap clvops; | |
1316 unsigned id; | |
1317 | |
1318 if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)) | |
1319 return; | |
1320 | |
1321 mem = simple_mem_ref_in_stmt (stmt, &is_stored); | |
1322 if (!mem) | |
1323 goto fail; | |
1324 | |
1325 hash = iterative_hash_expr (*mem, 0); | |
1326 slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT); | |
1327 | |
1328 if (*slot) | |
1329 { | |
1330 ref = (mem_ref_p) *slot; | |
1331 id = ref->id; | |
1332 } | |
1333 else | |
1334 { | |
1335 id = VEC_length (mem_ref_p, memory_accesses.refs_list); | |
1336 ref = mem_ref_alloc (*mem, hash, id); | |
1337 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref); | |
1338 *slot = ref; | |
1339 | |
1340 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1341 { | |
1342 fprintf (dump_file, "Memory reference %u: ", id); | |
1343 print_generic_expr (dump_file, ref->mem, TDF_SLIM); | |
1344 fprintf (dump_file, "\n"); | |
1345 } | |
1346 } | |
1347 if (is_stored) | |
1348 mark_ref_stored (ref, loop); | |
1349 | |
1350 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES) | |
1351 bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname))); | |
1352 record_mem_ref_loc (ref, loop, stmt, mem); | |
1353 return; | |
1354 | |
1355 fail: | |
1356 clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num); | |
1357 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES) | |
1358 bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname))); | |
1359 } | |
1360 | |
1361 /* Gathers memory references in loops. */ | |
1362 | |
1363 static void | |
1364 gather_mem_refs_in_loops (void) | |
1365 { | |
1366 gimple_stmt_iterator bsi; | |
1367 basic_block bb; | |
1368 struct loop *loop; | |
1369 loop_iterator li; | |
1370 bitmap clvo, clvi; | |
1371 bitmap lrefs, alrefs, alrefso; | |
1372 | |
1373 FOR_EACH_BB (bb) | |
1374 { | |
1375 loop = bb->loop_father; | |
1376 if (loop == current_loops->tree_root) | |
1377 continue; | |
1378 | |
1379 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | |
1380 gather_mem_refs_stmt (loop, gsi_stmt (bsi)); | |
1381 } | |
1382 | |
1383 /* Propagate the information about clobbered vops and accessed memory | |
1384 references up the loop hierarchy. */ | |
1385 FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST) | |
1386 { | |
1387 lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1388 alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num); | |
1389 bitmap_ior_into (alrefs, lrefs); | |
1390 | |
1391 if (loop_outer (loop) == current_loops->tree_root) | |
1392 continue; | |
1393 | |
1394 clvi = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num); | |
1395 clvo = VEC_index (bitmap, memory_accesses.clobbered_vops, | |
1396 loop_outer (loop)->num); | |
1397 bitmap_ior_into (clvo, clvi); | |
1398 | |
1399 alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop, | |
1400 loop_outer (loop)->num); | |
1401 bitmap_ior_into (alrefso, alrefs); | |
1402 } | |
1403 } | |
1404 | |
1405 /* Element of the hash table that maps vops to memory references. */ | |
1406 | |
1407 struct vop_to_refs_elt | |
1408 { | |
1409 /* DECL_UID of the vop. */ | |
1410 unsigned uid; | |
1411 | |
1412 /* List of the all references. */ | |
1413 bitmap refs_all; | |
1414 | |
1415 /* List of stored references. */ | |
1416 bitmap refs_stored; | |
1417 }; | |
1418 | |
1419 /* A hash function for struct vop_to_refs_elt object OBJ. */ | |
1420 | |
1421 static hashval_t | |
1422 vtoe_hash (const void *obj) | |
1423 { | |
1424 const struct vop_to_refs_elt *const vtoe = | |
1425 (const struct vop_to_refs_elt *) obj; | |
1426 | |
1427 return vtoe->uid; | |
1428 } | |
1429 | |
1430 /* An equality function for struct vop_to_refs_elt object OBJ1 with | |
1431 uid of a vop OBJ2. */ | |
1432 | |
1433 static int | |
1434 vtoe_eq (const void *obj1, const void *obj2) | |
1435 { | |
1436 const struct vop_to_refs_elt *const vtoe = | |
1437 (const struct vop_to_refs_elt *) obj1; | |
1438 const unsigned *const uid = (const unsigned *) obj2; | |
1439 | |
1440 return vtoe->uid == *uid; | |
1441 } | |
1442 | |
1443 /* A function to free the struct vop_to_refs_elt object. */ | |
1444 | |
1445 static void | |
1446 vtoe_free (void *obj) | |
1447 { | |
1448 struct vop_to_refs_elt *const vtoe = | |
1449 (struct vop_to_refs_elt *) obj; | |
1450 | |
1451 BITMAP_FREE (vtoe->refs_all); | |
1452 BITMAP_FREE (vtoe->refs_stored); | |
1453 free (vtoe); | |
1454 } | |
1455 | |
1456 /* Records REF to hashtable VOP_TO_REFS for the index VOP. STORED is true | |
1457 if the reference REF is stored. */ | |
1458 | |
1459 static void | |
1460 record_vop_access (htab_t vop_to_refs, unsigned vop, unsigned ref, bool stored) | |
1461 { | |
1462 void **slot = htab_find_slot_with_hash (vop_to_refs, &vop, vop, INSERT); | |
1463 struct vop_to_refs_elt *vtoe; | |
1464 | |
1465 if (!*slot) | |
1466 { | |
1467 vtoe = XNEW (struct vop_to_refs_elt); | |
1468 vtoe->uid = vop; | |
1469 vtoe->refs_all = BITMAP_ALLOC (NULL); | |
1470 vtoe->refs_stored = BITMAP_ALLOC (NULL); | |
1471 *slot = vtoe; | |
1472 } | |
1473 else | |
1474 vtoe = (struct vop_to_refs_elt *) *slot; | |
1475 | |
1476 bitmap_set_bit (vtoe->refs_all, ref); | |
1477 if (stored) | |
1478 bitmap_set_bit (vtoe->refs_stored, ref); | |
1479 } | |
1480 | |
1481 /* Returns the set of references that access VOP according to the table | |
1482 VOP_TO_REFS. */ | |
1483 | |
1484 static bitmap | |
1485 get_vop_accesses (htab_t vop_to_refs, unsigned vop) | |
1486 { | |
1487 struct vop_to_refs_elt *const vtoe = | |
1488 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop); | |
1489 return vtoe->refs_all; | |
1490 } | |
1491 | |
1492 /* Returns the set of stores that access VOP according to the table | |
1493 VOP_TO_REFS. */ | |
1494 | |
1495 static bitmap | |
1496 get_vop_stores (htab_t vop_to_refs, unsigned vop) | |
1497 { | |
1498 struct vop_to_refs_elt *const vtoe = | |
1499 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop); | |
1500 return vtoe->refs_stored; | |
1501 } | |
1502 | |
1503 /* Adds REF to mapping from virtual operands to references in LOOP. */ | |
1504 | |
1505 static void | |
1506 add_vop_ref_mapping (struct loop *loop, mem_ref_p ref) | |
1507 { | |
1508 htab_t map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num); | |
1509 bool stored = bitmap_bit_p (ref->stored, loop->num); | |
1510 bitmap clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops, | |
1511 loop->num); | |
1512 bitmap_iterator bi; | |
1513 unsigned vop; | |
1514 | |
1515 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, vop, bi) | |
1516 { | |
1517 record_vop_access (map, vop, ref->id, stored); | |
1518 } | |
1519 } | |
1520 | |
1521 /* Create a mapping from virtual operands to references that touch them | |
1522 in LOOP. */ | |
1523 | |
1524 static void | |
1525 create_vop_ref_mapping_loop (struct loop *loop) | |
1526 { | |
1527 bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1528 struct loop *sloop; | |
1529 bitmap_iterator bi; | |
1530 unsigned i; | |
1531 mem_ref_p ref; | |
1532 | |
1533 EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi) | |
1534 { | |
1535 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | |
1536 for (sloop = loop; sloop != current_loops->tree_root; sloop = loop_outer (sloop)) | |
1537 add_vop_ref_mapping (sloop, ref); | |
1538 } | |
1539 } | |
1540 | |
1541 /* For each non-clobbered virtual operand and each loop, record the memory | |
1542 references in this loop that touch the operand. */ | |
1543 | |
1544 static void | |
1545 create_vop_ref_mapping (void) | |
1546 { | |
1547 loop_iterator li; | |
1548 struct loop *loop; | |
1549 | |
1550 FOR_EACH_LOOP (li, loop, 0) | |
1551 { | |
1552 create_vop_ref_mapping_loop (loop); | |
1553 } | |
1554 } | |
1555 | |
1556 /* Gathers information about memory accesses in the loops. */ | |
1557 | |
1558 static void | |
1559 analyze_memory_references (void) | |
1560 { | |
1561 unsigned i; | |
1562 bitmap empty; | |
1563 htab_t hempty; | |
1564 | |
1565 memory_accesses.refs | |
1566 = htab_create (100, memref_hash, memref_eq, memref_free); | |
1567 memory_accesses.refs_list = NULL; | |
1568 memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap, | |
1569 number_of_loops ()); | |
1570 memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap, | |
1571 number_of_loops ()); | |
1572 memory_accesses.clobbered_vops = VEC_alloc (bitmap, heap, | |
1573 number_of_loops ()); | |
1574 memory_accesses.vop_ref_map = VEC_alloc (htab_t, heap, | |
1575 number_of_loops ()); | |
1576 | |
1577 for (i = 0; i < number_of_loops (); i++) | |
1578 { | |
1579 empty = BITMAP_ALLOC (NULL); | |
1580 VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty); | |
1581 empty = BITMAP_ALLOC (NULL); | |
1582 VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty); | |
1583 empty = BITMAP_ALLOC (NULL); | |
1584 VEC_quick_push (bitmap, memory_accesses.clobbered_vops, empty); | |
1585 hempty = htab_create (10, vtoe_hash, vtoe_eq, vtoe_free); | |
1586 VEC_quick_push (htab_t, memory_accesses.vop_ref_map, hempty); | |
1587 } | |
1588 | |
1589 memory_accesses.ttae_cache = NULL; | |
1590 | |
1591 gather_mem_refs_in_loops (); | |
1592 create_vop_ref_mapping (); | |
1593 } | |
1594 | |
1595 /* Returns true if a region of size SIZE1 at position 0 and a region of | |
1596 size SIZE2 at position DIFF cannot overlap. */ | |
1597 | |
1598 static bool | |
1599 cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2) | |
1600 { | |
1601 double_int d, bound; | |
1602 | |
1603 /* Unless the difference is a constant, we fail. */ | |
1604 if (diff->n != 0) | |
1605 return false; | |
1606 | |
1607 d = diff->offset; | |
1608 if (double_int_negative_p (d)) | |
1609 { | |
1610 /* The second object is before the first one, we succeed if the last | |
1611 element of the second object is before the start of the first one. */ | |
1612 bound = double_int_add (d, double_int_add (size2, double_int_minus_one)); | |
1613 return double_int_negative_p (bound); | |
1614 } | |
1615 else | |
1616 { | |
1617 /* We succeed if the second object starts after the first one ends. */ | |
1618 return double_int_scmp (size1, d) <= 0; | |
1619 } | |
1620 } | |
1621 | |
1622 /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in | |
1623 tree_to_aff_combination_expand. */ | |
1624 | |
1625 static bool | |
1626 mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache) | |
1627 { | |
1628 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same | |
1629 object and their offset differ in such a way that the locations cannot | |
1630 overlap, then they cannot alias. */ | |
1631 double_int size1, size2; | |
1632 aff_tree off1, off2; | |
1633 | |
1634 /* Perform basic offset and type-based disambiguation. */ | |
1635 if (!refs_may_alias_p (mem1, mem2)) | |
1636 return false; | |
1637 | |
1638 /* The expansion of addresses may be a bit expensive, thus we only do | |
1639 the check at -O2 and higher optimization levels. */ | |
1640 if (optimize < 2) | |
1641 return true; | |
1642 | |
1643 get_inner_reference_aff (mem1, &off1, &size1); | |
1644 get_inner_reference_aff (mem2, &off2, &size2); | |
1645 aff_combination_expand (&off1, ttae_cache); | |
1646 aff_combination_expand (&off2, ttae_cache); | |
1647 aff_combination_scale (&off1, double_int_minus_one); | |
1648 aff_combination_add (&off2, &off1); | |
1649 | |
1650 if (cannot_overlap_p (&off2, size1, size2)) | |
1651 return false; | |
1652 | |
1653 return true; | |
1654 } | |
1655 | |
1656 /* Rewrites location LOC by TMP_VAR. */ | |
1657 | |
1658 static void | |
1659 rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var) | |
1660 { | |
1661 mark_virtual_ops_for_renaming (loc->stmt); | |
1662 *loc->ref = tmp_var; | |
1663 update_stmt (loc->stmt); | |
1664 } | |
1665 | |
1666 /* Adds all locations of REF in LOOP and its subloops to LOCS. */ | |
1667 | |
1668 static void | |
1669 get_all_locs_in_loop (struct loop *loop, mem_ref_p ref, | |
1670 VEC (mem_ref_loc_p, heap) **locs) | |
1671 { | |
1672 mem_ref_locs_p accs; | |
1673 unsigned i; | |
1674 mem_ref_loc_p loc; | |
1675 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, | |
1676 loop->num); | |
1677 struct loop *subloop; | |
1678 | |
1679 if (!bitmap_bit_p (refs, ref->id)) | |
1680 return; | |
1681 | |
1682 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop) | |
1683 > (unsigned) loop->num) | |
1684 { | |
1685 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num); | |
1686 if (accs) | |
1687 { | |
1688 for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++) | |
1689 VEC_safe_push (mem_ref_loc_p, heap, *locs, loc); | |
1690 } | |
1691 } | |
1692 | |
1693 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next) | |
1694 get_all_locs_in_loop (subloop, ref, locs); | |
1695 } | |
1696 | |
1697 /* Rewrites all references to REF in LOOP by variable TMP_VAR. */ | |
1698 | |
1699 static void | |
1700 rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var) | |
1701 { | |
1702 unsigned i; | |
1703 mem_ref_loc_p loc; | |
1704 VEC (mem_ref_loc_p, heap) *locs = NULL; | |
1705 | |
1706 get_all_locs_in_loop (loop, ref, &locs); | |
1707 for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++) | |
1708 rewrite_mem_ref_loc (loc, tmp_var); | |
1709 VEC_free (mem_ref_loc_p, heap, locs); | |
1710 } | |
1711 | |
1712 /* The name and the length of the currently generated variable | |
1713 for lsm. */ | |
1714 #define MAX_LSM_NAME_LENGTH 40 | |
1715 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1]; | |
1716 static int lsm_tmp_name_length; | |
1717 | |
1718 /* Adds S to lsm_tmp_name. */ | |
1719 | |
1720 static void | |
1721 lsm_tmp_name_add (const char *s) | |
1722 { | |
1723 int l = strlen (s) + lsm_tmp_name_length; | |
1724 if (l > MAX_LSM_NAME_LENGTH) | |
1725 return; | |
1726 | |
1727 strcpy (lsm_tmp_name + lsm_tmp_name_length, s); | |
1728 lsm_tmp_name_length = l; | |
1729 } | |
1730 | |
1731 /* Stores the name for temporary variable that replaces REF to | |
1732 lsm_tmp_name. */ | |
1733 | |
1734 static void | |
1735 gen_lsm_tmp_name (tree ref) | |
1736 { | |
1737 const char *name; | |
1738 | |
1739 switch (TREE_CODE (ref)) | |
1740 { | |
1741 case MISALIGNED_INDIRECT_REF: | |
1742 case ALIGN_INDIRECT_REF: | |
1743 case INDIRECT_REF: | |
1744 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1745 lsm_tmp_name_add ("_"); | |
1746 break; | |
1747 | |
1748 case BIT_FIELD_REF: | |
1749 case VIEW_CONVERT_EXPR: | |
1750 case ARRAY_RANGE_REF: | |
1751 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1752 break; | |
1753 | |
1754 case REALPART_EXPR: | |
1755 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1756 lsm_tmp_name_add ("_RE"); | |
1757 break; | |
1758 | |
1759 case IMAGPART_EXPR: | |
1760 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1761 lsm_tmp_name_add ("_IM"); | |
1762 break; | |
1763 | |
1764 case COMPONENT_REF: | |
1765 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1766 lsm_tmp_name_add ("_"); | |
1767 name = get_name (TREE_OPERAND (ref, 1)); | |
1768 if (!name) | |
1769 name = "F"; | |
1770 lsm_tmp_name_add (name); | |
47
3bfb6c00c1e0
update it from 4.4.2 to 4.4.3.
kent <kent@cr.ie.u-ryukyu.ac.jp>
parents:
0
diff
changeset
|
1771 break; |
0 | 1772 |
1773 case ARRAY_REF: | |
1774 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1775 lsm_tmp_name_add ("_I"); | |
1776 break; | |
1777 | |
1778 case SSA_NAME: | |
1779 ref = SSA_NAME_VAR (ref); | |
1780 /* Fallthru. */ | |
1781 | |
1782 case VAR_DECL: | |
1783 case PARM_DECL: | |
1784 name = get_name (ref); | |
1785 if (!name) | |
1786 name = "D"; | |
1787 lsm_tmp_name_add (name); | |
1788 break; | |
1789 | |
1790 case STRING_CST: | |
1791 lsm_tmp_name_add ("S"); | |
1792 break; | |
1793 | |
1794 case RESULT_DECL: | |
1795 lsm_tmp_name_add ("R"); | |
1796 break; | |
1797 | |
1798 default: | |
1799 gcc_unreachable (); | |
1800 } | |
1801 } | |
1802 | |
1803 /* Determines name for temporary variable that replaces REF. | |
1804 The name is accumulated into the lsm_tmp_name variable. | |
1805 N is added to the name of the temporary. */ | |
1806 | |
1807 char * | |
1808 get_lsm_tmp_name (tree ref, unsigned n) | |
1809 { | |
1810 char ns[2]; | |
1811 | |
1812 lsm_tmp_name_length = 0; | |
1813 gen_lsm_tmp_name (ref); | |
1814 lsm_tmp_name_add ("_lsm"); | |
1815 if (n < 10) | |
1816 { | |
1817 ns[0] = '0' + n; | |
1818 ns[1] = 0; | |
1819 lsm_tmp_name_add (ns); | |
1820 } | |
1821 return lsm_tmp_name; | |
1822 } | |
1823 | |
1824 /* Executes store motion of memory reference REF from LOOP. | |
1825 Exits from the LOOP are stored in EXITS. The initialization of the | |
1826 temporary variable is put to the preheader of the loop, and assignments | |
1827 to the reference from the temporary variable are emitted to exits. */ | |
1828 | |
1829 static void | |
1830 execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref) | |
1831 { | |
1832 tree tmp_var; | |
1833 unsigned i; | |
1834 gimple load, store; | |
1835 struct fmt_data fmt_data; | |
1836 edge ex; | |
1837 struct lim_aux_data *lim_data; | |
1838 | |
1839 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1840 { | |
1841 fprintf (dump_file, "Executing store motion of "); | |
1842 print_generic_expr (dump_file, ref->mem, 0); | |
1843 fprintf (dump_file, " from loop %d\n", loop->num); | |
1844 } | |
1845 | |
1846 tmp_var = make_rename_temp (TREE_TYPE (ref->mem), | |
1847 get_lsm_tmp_name (ref->mem, ~0)); | |
1848 | |
1849 fmt_data.loop = loop; | |
1850 fmt_data.orig_loop = loop; | |
1851 for_each_index (&ref->mem, force_move_till, &fmt_data); | |
1852 | |
1853 rewrite_mem_refs (loop, ref, tmp_var); | |
1854 | |
1855 /* Emit the load & stores. */ | |
1856 load = gimple_build_assign (tmp_var, unshare_expr (ref->mem)); | |
1857 lim_data = init_lim_data (load); | |
1858 lim_data->max_loop = loop; | |
1859 lim_data->tgt_loop = loop; | |
1860 | |
1861 /* Put this into the latch, so that we are sure it will be processed after | |
1862 all dependencies. */ | |
1863 gsi_insert_on_edge (loop_latch_edge (loop), load); | |
1864 | |
1865 for (i = 0; VEC_iterate (edge, exits, i, ex); i++) | |
1866 { | |
1867 store = gimple_build_assign (unshare_expr (ref->mem), tmp_var); | |
1868 gsi_insert_on_edge (ex, store); | |
1869 } | |
1870 } | |
1871 | |
1872 /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit | |
1873 edges of the LOOP. */ | |
1874 | |
1875 static void | |
1876 hoist_memory_references (struct loop *loop, bitmap mem_refs, | |
1877 VEC (edge, heap) *exits) | |
1878 { | |
1879 mem_ref_p ref; | |
1880 unsigned i; | |
1881 bitmap_iterator bi; | |
1882 | |
1883 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi) | |
1884 { | |
1885 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | |
1886 execute_sm (loop, exits, ref); | |
1887 } | |
1888 } | |
1889 | |
1890 /* Returns true if REF is always accessed in LOOP. */ | |
1891 | |
1892 static bool | |
1893 ref_always_accessed_p (struct loop *loop, mem_ref_p ref) | |
1894 { | |
1895 VEC (mem_ref_loc_p, heap) *locs = NULL; | |
1896 unsigned i; | |
1897 mem_ref_loc_p loc; | |
1898 bool ret = false; | |
1899 struct loop *must_exec; | |
1900 | |
1901 get_all_locs_in_loop (loop, ref, &locs); | |
1902 for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++) | |
1903 { | |
1904 if (!get_lim_data (loc->stmt)) | |
1905 continue; | |
1906 | |
1907 must_exec = get_lim_data (loc->stmt)->always_executed_in; | |
1908 if (!must_exec) | |
1909 continue; | |
1910 | |
1911 if (must_exec == loop | |
1912 || flow_loop_nested_p (must_exec, loop)) | |
1913 { | |
1914 ret = true; | |
1915 break; | |
1916 } | |
1917 } | |
1918 VEC_free (mem_ref_loc_p, heap, locs); | |
1919 | |
1920 return ret; | |
1921 } | |
1922 | |
1923 /* Returns true if REF1 and REF2 are independent. */ | |
1924 | |
1925 static bool | |
1926 refs_independent_p (mem_ref_p ref1, mem_ref_p ref2) | |
1927 { | |
1928 if (ref1 == ref2 | |
1929 || bitmap_bit_p (ref1->indep_ref, ref2->id)) | |
1930 return true; | |
1931 if (bitmap_bit_p (ref1->dep_ref, ref2->id)) | |
1932 return false; | |
1933 | |
1934 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1935 fprintf (dump_file, "Querying dependency of refs %u and %u: ", | |
1936 ref1->id, ref2->id); | |
1937 | |
1938 if (mem_refs_may_alias_p (ref1->mem, ref2->mem, | |
1939 &memory_accesses.ttae_cache)) | |
1940 { | |
1941 bitmap_set_bit (ref1->dep_ref, ref2->id); | |
1942 bitmap_set_bit (ref2->dep_ref, ref1->id); | |
1943 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1944 fprintf (dump_file, "dependent.\n"); | |
1945 return false; | |
1946 } | |
1947 else | |
1948 { | |
1949 bitmap_set_bit (ref1->indep_ref, ref2->id); | |
1950 bitmap_set_bit (ref2->indep_ref, ref1->id); | |
1951 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1952 fprintf (dump_file, "independent.\n"); | |
1953 return true; | |
1954 } | |
1955 } | |
1956 | |
1957 /* Records the information whether REF is independent in LOOP (according | |
1958 to INDEP). */ | |
1959 | |
1960 static void | |
1961 record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep) | |
1962 { | |
1963 if (indep) | |
1964 bitmap_set_bit (ref->indep_loop, loop->num); | |
1965 else | |
1966 bitmap_set_bit (ref->dep_loop, loop->num); | |
1967 } | |
1968 | |
1969 /* Returns true if REF is independent on all other memory references in | |
1970 LOOP. */ | |
1971 | |
1972 static bool | |
1973 ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref) | |
1974 { | |
1975 bitmap clobbers, refs_to_check, refs; | |
1976 unsigned i; | |
1977 bitmap_iterator bi; | |
1978 bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num); | |
1979 htab_t map; | |
1980 mem_ref_p aref; | |
1981 | |
1982 /* If the reference is clobbered, it is not independent. */ | |
1983 clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num); | |
1984 if (bitmap_intersect_p (ref->vops, clobbers)) | |
1985 return false; | |
1986 | |
1987 refs_to_check = BITMAP_ALLOC (NULL); | |
1988 | |
1989 map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num); | |
1990 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, i, bi) | |
1991 { | |
1992 if (stored) | |
1993 refs = get_vop_accesses (map, i); | |
1994 else | |
1995 refs = get_vop_stores (map, i); | |
1996 | |
1997 bitmap_ior_into (refs_to_check, refs); | |
1998 } | |
1999 | |
2000 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi) | |
2001 { | |
2002 aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | |
2003 if (!refs_independent_p (ref, aref)) | |
2004 { | |
2005 ret = false; | |
2006 record_indep_loop (loop, aref, false); | |
2007 break; | |
2008 } | |
2009 } | |
2010 | |
2011 BITMAP_FREE (refs_to_check); | |
2012 return ret; | |
2013 } | |
2014 | |
2015 /* Returns true if REF is independent on all other memory references in | |
2016 LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */ | |
2017 | |
2018 static bool | |
2019 ref_indep_loop_p (struct loop *loop, mem_ref_p ref) | |
2020 { | |
2021 bool ret; | |
2022 | |
2023 if (bitmap_bit_p (ref->indep_loop, loop->num)) | |
2024 return true; | |
2025 if (bitmap_bit_p (ref->dep_loop, loop->num)) | |
2026 return false; | |
2027 | |
2028 ret = ref_indep_loop_p_1 (loop, ref); | |
2029 | |
2030 if (dump_file && (dump_flags & TDF_DETAILS)) | |
2031 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n", | |
2032 ref->id, loop->num, ret ? "independent" : "dependent"); | |
2033 | |
2034 record_indep_loop (loop, ref, ret); | |
2035 | |
2036 return ret; | |
2037 } | |
2038 | |
2039 /* Returns true if we can perform store motion of REF from LOOP. */ | |
2040 | |
2041 static bool | |
2042 can_sm_ref_p (struct loop *loop, mem_ref_p ref) | |
2043 { | |
2044 /* Unless the reference is stored in the loop, there is nothing to do. */ | |
2045 if (!bitmap_bit_p (ref->stored, loop->num)) | |
2046 return false; | |
2047 | |
2048 /* It should be movable. */ | |
2049 if (!is_gimple_reg_type (TREE_TYPE (ref->mem)) | |
2050 || TREE_THIS_VOLATILE (ref->mem) | |
2051 || !for_each_index (&ref->mem, may_move_till, loop)) | |
2052 return false; | |
2053 | |
2054 /* If it can trap, it must be always executed in LOOP. */ | |
2055 if (tree_could_trap_p (ref->mem) | |
2056 && !ref_always_accessed_p (loop, ref)) | |
2057 return false; | |
2058 | |
2059 /* And it must be independent on all other memory references | |
2060 in LOOP. */ | |
2061 if (!ref_indep_loop_p (loop, ref)) | |
2062 return false; | |
2063 | |
2064 return true; | |
2065 } | |
2066 | |
2067 /* Marks the references in LOOP for that store motion should be performed | |
2068 in REFS_TO_SM. SM_EXECUTED is the set of references for that store | |
2069 motion was performed in one of the outer loops. */ | |
2070 | |
2071 static void | |
2072 find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm) | |
2073 { | |
2074 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, | |
2075 loop->num); | |
2076 unsigned i; | |
2077 bitmap_iterator bi; | |
2078 mem_ref_p ref; | |
2079 | |
2080 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi) | |
2081 { | |
2082 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | |
2083 if (can_sm_ref_p (loop, ref)) | |
2084 bitmap_set_bit (refs_to_sm, i); | |
2085 } | |
2086 } | |
2087 | |
2088 /* Checks whether LOOP (with exits stored in EXITS array) is suitable | |
2089 for a store motion optimization (i.e. whether we can insert statement | |
2090 on its exits). */ | |
2091 | |
2092 static bool | |
2093 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, | |
2094 VEC (edge, heap) *exits) | |
2095 { | |
2096 unsigned i; | |
2097 edge ex; | |
2098 | |
2099 for (i = 0; VEC_iterate (edge, exits, i, ex); i++) | |
2100 if (ex->flags & EDGE_ABNORMAL) | |
2101 return false; | |
2102 | |
2103 return true; | |
2104 } | |
2105 | |
2106 /* Try to perform store motion for all memory references modified inside | |
2107 LOOP. SM_EXECUTED is the bitmap of the memory references for that | |
2108 store motion was executed in one of the outer loops. */ | |
2109 | |
2110 static void | |
2111 store_motion_loop (struct loop *loop, bitmap sm_executed) | |
2112 { | |
2113 VEC (edge, heap) *exits = get_loop_exit_edges (loop); | |
2114 struct loop *subloop; | |
2115 bitmap sm_in_loop = BITMAP_ALLOC (NULL); | |
2116 | |
2117 if (loop_suitable_for_sm (loop, exits)) | |
2118 { | |
2119 find_refs_for_sm (loop, sm_executed, sm_in_loop); | |
2120 hoist_memory_references (loop, sm_in_loop, exits); | |
2121 } | |
2122 VEC_free (edge, heap, exits); | |
2123 | |
2124 bitmap_ior_into (sm_executed, sm_in_loop); | |
2125 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next) | |
2126 store_motion_loop (subloop, sm_executed); | |
2127 bitmap_and_compl_into (sm_executed, sm_in_loop); | |
2128 BITMAP_FREE (sm_in_loop); | |
2129 } | |
2130 | |
2131 /* Try to perform store motion for all memory references modified inside | |
2132 loops. */ | |
2133 | |
2134 static void | |
2135 store_motion (void) | |
2136 { | |
2137 struct loop *loop; | |
2138 bitmap sm_executed = BITMAP_ALLOC (NULL); | |
2139 | |
2140 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next) | |
2141 store_motion_loop (loop, sm_executed); | |
2142 | |
2143 BITMAP_FREE (sm_executed); | |
2144 gsi_commit_edge_inserts (); | |
2145 } | |
2146 | |
2147 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e. | |
2148 for each such basic block bb records the outermost loop for that execution | |
2149 of its header implies execution of bb. CONTAINS_CALL is the bitmap of | |
2150 blocks that contain a nonpure call. */ | |
2151 | |
2152 static void | |
2153 fill_always_executed_in (struct loop *loop, sbitmap contains_call) | |
2154 { | |
2155 basic_block bb = NULL, *bbs, last = NULL; | |
2156 unsigned i; | |
2157 edge e; | |
2158 struct loop *inn_loop = loop; | |
2159 | |
2160 if (!loop->header->aux) | |
2161 { | |
2162 bbs = get_loop_body_in_dom_order (loop); | |
2163 | |
2164 for (i = 0; i < loop->num_nodes; i++) | |
2165 { | |
2166 edge_iterator ei; | |
2167 bb = bbs[i]; | |
2168 | |
2169 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb)) | |
2170 last = bb; | |
2171 | |
2172 if (TEST_BIT (contains_call, bb->index)) | |
2173 break; | |
2174 | |
2175 FOR_EACH_EDGE (e, ei, bb->succs) | |
2176 if (!flow_bb_inside_loop_p (loop, e->dest)) | |
2177 break; | |
2178 if (e) | |
2179 break; | |
2180 | |
2181 /* A loop might be infinite (TODO use simple loop analysis | |
2182 to disprove this if possible). */ | |
2183 if (bb->flags & BB_IRREDUCIBLE_LOOP) | |
2184 break; | |
2185 | |
2186 if (!flow_bb_inside_loop_p (inn_loop, bb)) | |
2187 break; | |
2188 | |
2189 if (bb->loop_father->header == bb) | |
2190 { | |
2191 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb)) | |
2192 break; | |
2193 | |
2194 /* In a loop that is always entered we may proceed anyway. | |
2195 But record that we entered it and stop once we leave it. */ | |
2196 inn_loop = bb->loop_father; | |
2197 } | |
2198 } | |
2199 | |
2200 while (1) | |
2201 { | |
2202 last->aux = loop; | |
2203 if (last == loop->header) | |
2204 break; | |
2205 last = get_immediate_dominator (CDI_DOMINATORS, last); | |
2206 } | |
2207 | |
2208 free (bbs); | |
2209 } | |
2210 | |
2211 for (loop = loop->inner; loop; loop = loop->next) | |
2212 fill_always_executed_in (loop, contains_call); | |
2213 } | |
2214 | |
2215 /* Compute the global information needed by the loop invariant motion pass. */ | |
2216 | |
2217 static void | |
2218 tree_ssa_lim_initialize (void) | |
2219 { | |
2220 sbitmap contains_call = sbitmap_alloc (last_basic_block); | |
2221 gimple_stmt_iterator bsi; | |
2222 struct loop *loop; | |
2223 basic_block bb; | |
2224 | |
2225 sbitmap_zero (contains_call); | |
2226 FOR_EACH_BB (bb) | |
2227 { | |
2228 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | |
2229 { | |
2230 if (nonpure_call_p (gsi_stmt (bsi))) | |
2231 break; | |
2232 } | |
2233 | |
2234 if (!gsi_end_p (bsi)) | |
2235 SET_BIT (contains_call, bb->index); | |
2236 } | |
2237 | |
2238 for (loop = current_loops->tree_root->inner; loop; loop = loop->next) | |
2239 fill_always_executed_in (loop, contains_call); | |
2240 | |
2241 sbitmap_free (contains_call); | |
2242 | |
2243 lim_aux_data_map = pointer_map_create (); | |
2244 } | |
2245 | |
2246 /* Cleans up after the invariant motion pass. */ | |
2247 | |
2248 static void | |
2249 tree_ssa_lim_finalize (void) | |
2250 { | |
2251 basic_block bb; | |
2252 unsigned i; | |
2253 bitmap b; | |
2254 htab_t h; | |
2255 | |
2256 FOR_EACH_BB (bb) | |
2257 { | |
2258 bb->aux = NULL; | |
2259 } | |
2260 | |
2261 pointer_map_destroy (lim_aux_data_map); | |
2262 | |
2263 VEC_free (mem_ref_p, heap, memory_accesses.refs_list); | |
2264 htab_delete (memory_accesses.refs); | |
2265 | |
2266 for (i = 0; VEC_iterate (bitmap, memory_accesses.refs_in_loop, i, b); i++) | |
2267 BITMAP_FREE (b); | |
2268 VEC_free (bitmap, heap, memory_accesses.refs_in_loop); | |
2269 | |
2270 for (i = 0; VEC_iterate (bitmap, memory_accesses.all_refs_in_loop, i, b); i++) | |
2271 BITMAP_FREE (b); | |
2272 VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop); | |
2273 | |
2274 for (i = 0; VEC_iterate (bitmap, memory_accesses.clobbered_vops, i, b); i++) | |
2275 BITMAP_FREE (b); | |
2276 VEC_free (bitmap, heap, memory_accesses.clobbered_vops); | |
2277 | |
2278 for (i = 0; VEC_iterate (htab_t, memory_accesses.vop_ref_map, i, h); i++) | |
2279 htab_delete (h); | |
2280 VEC_free (htab_t, heap, memory_accesses.vop_ref_map); | |
2281 | |
2282 if (memory_accesses.ttae_cache) | |
2283 pointer_map_destroy (memory_accesses.ttae_cache); | |
2284 } | |
2285 | |
2286 /* Moves invariants from loops. Only "expensive" invariants are moved out -- | |
2287 i.e. those that are likely to be win regardless of the register pressure. */ | |
2288 | |
2289 void | |
2290 tree_ssa_lim (void) | |
2291 { | |
2292 tree_ssa_lim_initialize (); | |
2293 | |
2294 /* Gathers information about memory accesses in the loops. */ | |
2295 analyze_memory_references (); | |
2296 | |
2297 /* For each statement determine the outermost loop in that it is | |
2298 invariant and cost for computing the invariant. */ | |
2299 determine_invariantness (); | |
2300 | |
2301 /* Execute store motion. Force the necessary invariants to be moved | |
2302 out of the loops as well. */ | |
2303 store_motion (); | |
2304 | |
2305 /* Move the expressions that are expensive enough. */ | |
2306 move_computations (); | |
2307 | |
2308 tree_ssa_lim_finalize (); | |
2309 } |