Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-loop-im.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* Loop invariant motion. | 1 /* Loop invariant motion. |
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010 | 2 Copyright (C) 2003-2017 Free Software Foundation, Inc. |
3 Free Software Foundation, Inc. | |
4 | 3 |
5 This file is part of GCC. | 4 This file is part of GCC. |
6 | 5 |
7 GCC is free software; you can redistribute it and/or modify it | 6 GCC is free software; you can redistribute it and/or modify it |
8 under the terms of the GNU General Public License as published by the | 7 under the terms of the GNU General Public License as published by the |
19 <http://www.gnu.org/licenses/>. */ | 18 <http://www.gnu.org/licenses/>. */ |
20 | 19 |
21 #include "config.h" | 20 #include "config.h" |
22 #include "system.h" | 21 #include "system.h" |
23 #include "coretypes.h" | 22 #include "coretypes.h" |
24 #include "tm.h" | 23 #include "backend.h" |
25 #include "tree.h" | 24 #include "tree.h" |
26 #include "tm_p.h" | 25 #include "gimple.h" |
27 #include "basic-block.h" | 26 #include "cfghooks.h" |
28 #include "output.h" | 27 #include "tree-pass.h" |
29 #include "tree-pretty-print.h" | 28 #include "ssa.h" |
30 #include "gimple-pretty-print.h" | 29 #include "gimple-pretty-print.h" |
31 #include "tree-flow.h" | 30 #include "fold-const.h" |
32 #include "tree-dump.h" | 31 #include "cfganal.h" |
33 #include "timevar.h" | 32 #include "tree-eh.h" |
33 #include "gimplify.h" | |
34 #include "gimple-iterator.h" | |
35 #include "tree-cfg.h" | |
36 #include "tree-ssa-loop-manip.h" | |
37 #include "tree-ssa-loop.h" | |
38 #include "tree-into-ssa.h" | |
34 #include "cfgloop.h" | 39 #include "cfgloop.h" |
35 #include "domwalk.h" | 40 #include "domwalk.h" |
36 #include "params.h" | 41 #include "params.h" |
37 #include "tree-pass.h" | |
38 #include "flags.h" | |
39 #include "hashtab.h" | |
40 #include "tree-affine.h" | 42 #include "tree-affine.h" |
41 #include "pointer-set.h" | |
42 #include "tree-ssa-propagate.h" | 43 #include "tree-ssa-propagate.h" |
44 #include "trans-mem.h" | |
45 #include "gimple-fold.h" | |
46 #include "tree-scalar-evolution.h" | |
47 #include "tree-ssa-loop-niter.h" | |
43 | 48 |
44 /* TODO: Support for predicated code motion. I.e. | 49 /* TODO: Support for predicated code motion. I.e. |
45 | 50 |
46 while (1) | 51 while (1) |
47 { | 52 { |
50 a = inv; | 55 a = inv; |
51 something; | 56 something; |
52 } | 57 } |
53 } | 58 } |
54 | 59 |
55 Where COND and INV are is invariants, but evaluating INV may trap or be | 60 Where COND and INV are invariants, but evaluating INV may trap or be |
56 invalid from some other reason if !COND. This may be transformed to | 61 invalid from some other reason if !COND. This may be transformed to |
57 | 62 |
58 if (cond) | 63 if (cond) |
59 a = inv; | 64 a = inv; |
60 while (1) | 65 while (1) |
61 { | 66 { |
62 if (cond) | 67 if (cond) |
63 something; | 68 something; |
64 } */ | 69 } */ |
65 | 70 |
66 /* A type for the list of statements that have to be moved in order to be able | |
67 to hoist an invariant computation. */ | |
68 | |
69 struct depend | |
70 { | |
71 gimple stmt; | |
72 struct depend *next; | |
73 }; | |
74 | |
75 /* The auxiliary data kept for each statement. */ | 71 /* The auxiliary data kept for each statement. */ |
76 | 72 |
77 struct lim_aux_data | 73 struct lim_aux_data |
78 { | 74 { |
79 struct loop *max_loop; /* The outermost loop in that the statement | 75 struct loop *max_loop; /* The outermost loop in that the statement |
88 is entered. */ | 84 is entered. */ |
89 | 85 |
90 unsigned cost; /* Cost of the computation performed by the | 86 unsigned cost; /* Cost of the computation performed by the |
91 statement. */ | 87 statement. */ |
92 | 88 |
93 struct depend *depends; /* List of statements that must be also hoisted | 89 unsigned ref; /* The simple_mem_ref in this stmt or 0. */ |
94 out of the loop when this statement is | 90 |
95 hoisted; i.e. those that define the operands | 91 vec<gimple *> depends; /* Vector of statements that must be also |
96 of the statement and are inside of the | 92 hoisted out of the loop when this statement |
97 MAX_LOOP loop. */ | 93 is hoisted; i.e. those that define the |
94 operands of the statement and are inside of | |
95 the MAX_LOOP loop. */ | |
98 }; | 96 }; |
99 | 97 |
100 /* Maps statements to their lim_aux_data. */ | 98 /* Maps statements to their lim_aux_data. */ |
101 | 99 |
102 static struct pointer_map_t *lim_aux_data_map; | 100 static hash_map<gimple *, lim_aux_data *> *lim_aux_data_map; |
103 | 101 |
104 /* Description of a memory reference location. */ | 102 /* Description of a memory reference location. */ |
105 | 103 |
106 typedef struct mem_ref_loc | 104 struct mem_ref_loc |
107 { | 105 { |
108 tree *ref; /* The reference itself. */ | 106 tree *ref; /* The reference itself. */ |
109 gimple stmt; /* The statement in that it occurs. */ | 107 gimple *stmt; /* The statement in that it occurs. */ |
110 } *mem_ref_loc_p; | 108 }; |
111 | 109 |
112 DEF_VEC_P(mem_ref_loc_p); | |
113 DEF_VEC_ALLOC_P(mem_ref_loc_p, heap); | |
114 | |
115 /* The list of memory reference locations in a loop. */ | |
116 | |
117 typedef struct mem_ref_locs | |
118 { | |
119 VEC (mem_ref_loc_p, heap) *locs; | |
120 } *mem_ref_locs_p; | |
121 | |
122 DEF_VEC_P(mem_ref_locs_p); | |
123 DEF_VEC_ALLOC_P(mem_ref_locs_p, heap); | |
124 | 110 |
125 /* Description of a memory reference. */ | 111 /* Description of a memory reference. */ |
126 | 112 |
127 typedef struct mem_ref | 113 struct im_mem_ref |
128 { | 114 { |
129 tree mem; /* The memory itself. */ | |
130 unsigned id; /* ID assigned to the memory reference | 115 unsigned id; /* ID assigned to the memory reference |
131 (its index in memory_accesses.refs_list) */ | 116 (its index in memory_accesses.refs_list) */ |
132 hashval_t hash; /* Its hash value. */ | 117 hashval_t hash; /* Its hash value. */ |
118 | |
119 /* The memory access itself and associated caching of alias-oracle | |
120 query meta-data. */ | |
121 ao_ref mem; | |
122 | |
133 bitmap stored; /* The set of loops in that this memory location | 123 bitmap stored; /* The set of loops in that this memory location |
134 is stored to. */ | 124 is stored to. */ |
135 VEC (mem_ref_locs_p, heap) *accesses_in_loop; | 125 vec<mem_ref_loc> accesses_in_loop; |
136 /* The locations of the accesses. Vector | 126 /* The locations of the accesses. Vector |
137 indexed by the loop number. */ | 127 indexed by the loop number. */ |
138 bitmap vops; /* Vops corresponding to this memory | |
139 location. */ | |
140 | 128 |
141 /* The following sets are computed on demand. We keep both set and | 129 /* The following sets are computed on demand. We keep both set and |
142 its complement, so that we know whether the information was | 130 its complement, so that we know whether the information was |
143 already computed or not. */ | 131 already computed or not. */ |
144 bitmap indep_loop; /* The set of loops in that the memory | 132 bitmap_head indep_loop; /* The set of loops in that the memory |
145 reference is independent, meaning: | 133 reference is independent, meaning: |
146 If it is stored in the loop, this store | 134 If it is stored in the loop, this store |
147 is independent on all other loads and | 135 is independent on all other loads and |
148 stores. | 136 stores. |
149 If it is only loaded, then it is independent | 137 If it is only loaded, then it is independent |
150 on all stores in the loop. */ | 138 on all stores in the loop. */ |
151 bitmap dep_loop; /* The complement of INDEP_LOOP. */ | 139 bitmap_head dep_loop; /* The complement of INDEP_LOOP. */ |
152 | 140 }; |
153 bitmap indep_ref; /* The set of memory references on that | 141 |
154 this reference is independent. */ | 142 /* We use two bits per loop in the ref->{in,}dep_loop bitmaps, the first |
155 bitmap dep_ref; /* The complement of DEP_REF. */ | 143 to record (in)dependence against stores in the loop and its subloops, the |
156 } *mem_ref_p; | 144 second to record (in)dependence against all references in the loop |
157 | 145 and its subloops. */ |
158 DEF_VEC_P(mem_ref_p); | 146 #define LOOP_DEP_BIT(loopnum, storedp) (2 * (loopnum) + (storedp ? 1 : 0)) |
159 DEF_VEC_ALLOC_P(mem_ref_p, heap); | 147 |
160 | 148 /* Mem_ref hashtable helpers. */ |
161 DEF_VEC_P(bitmap); | 149 |
162 DEF_VEC_ALLOC_P(bitmap, heap); | 150 struct mem_ref_hasher : nofree_ptr_hash <im_mem_ref> |
163 | 151 { |
164 DEF_VEC_P(htab_t); | 152 typedef tree_node *compare_type; |
165 DEF_VEC_ALLOC_P(htab_t, heap); | 153 static inline hashval_t hash (const im_mem_ref *); |
154 static inline bool equal (const im_mem_ref *, const tree_node *); | |
155 }; | |
156 | |
157 /* A hash function for struct im_mem_ref object OBJ. */ | |
158 | |
159 inline hashval_t | |
160 mem_ref_hasher::hash (const im_mem_ref *mem) | |
161 { | |
162 return mem->hash; | |
163 } | |
164 | |
165 /* An equality function for struct im_mem_ref object MEM1 with | |
166 memory reference OBJ2. */ | |
167 | |
168 inline bool | |
169 mem_ref_hasher::equal (const im_mem_ref *mem1, const tree_node *obj2) | |
170 { | |
171 return operand_equal_p (mem1->mem.ref, (const_tree) obj2, 0); | |
172 } | |
173 | |
166 | 174 |
167 /* Description of memory accesses in loops. */ | 175 /* Description of memory accesses in loops. */ |
168 | 176 |
169 static struct | 177 static struct |
170 { | 178 { |
171 /* The hash table of memory references accessed in loops. */ | 179 /* The hash table of memory references accessed in loops. */ |
172 htab_t refs; | 180 hash_table<mem_ref_hasher> *refs; |
173 | 181 |
174 /* The list of memory references. */ | 182 /* The list of memory references. */ |
175 VEC (mem_ref_p, heap) *refs_list; | 183 vec<im_mem_ref *> refs_list; |
176 | 184 |
177 /* The set of memory references accessed in each loop. */ | 185 /* The set of memory references accessed in each loop. */ |
178 VEC (bitmap, heap) *refs_in_loop; | 186 vec<bitmap_head> refs_in_loop; |
179 | 187 |
180 /* The set of memory references accessed in each loop, including | 188 /* The set of memory references stored in each loop. */ |
181 subloops. */ | 189 vec<bitmap_head> refs_stored_in_loop; |
182 VEC (bitmap, heap) *all_refs_in_loop; | 190 |
183 | 191 /* The set of memory references stored in each loop, including subloops . */ |
184 /* The set of virtual operands clobbered in a given loop. */ | 192 vec<bitmap_head> all_refs_stored_in_loop; |
185 VEC (bitmap, heap) *clobbered_vops; | |
186 | |
187 /* Map from the pair (loop, virtual operand) to the set of refs that | |
188 touch the virtual operand in the loop. */ | |
189 VEC (htab_t, heap) *vop_ref_map; | |
190 | 193 |
191 /* Cache for expanding memory addresses. */ | 194 /* Cache for expanding memory addresses. */ |
192 struct pointer_map_t *ttae_cache; | 195 hash_map<tree, name_expansion *> *ttae_cache; |
193 } memory_accesses; | 196 } memory_accesses; |
194 | 197 |
195 static bool ref_indep_loop_p (struct loop *, mem_ref_p); | 198 /* Obstack for the bitmaps in the above data structures. */ |
199 static bitmap_obstack lim_bitmap_obstack; | |
200 static obstack mem_ref_obstack; | |
201 | |
202 static bool ref_indep_loop_p (struct loop *, im_mem_ref *, struct loop *); | |
203 static bool ref_always_accessed_p (struct loop *, im_mem_ref *, bool); | |
196 | 204 |
197 /* Minimum cost of an expensive expression. */ | 205 /* Minimum cost of an expensive expression. */ |
198 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE)) | 206 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE)) |
199 | 207 |
200 /* The outermost loop for that execution of the header guarantees that the | 208 /* The outermost loop for which execution of the header guarantees that the |
201 block will be executed. */ | 209 block will be executed. */ |
202 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux) | 210 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux) |
211 #define SET_ALWAYS_EXECUTED_IN(BB, VAL) ((BB)->aux = (void *) (VAL)) | |
212 | |
213 /* ID of the shared unanalyzable mem. */ | |
214 #define UNANALYZABLE_MEM_ID 0 | |
215 | |
216 /* Whether the reference was analyzable. */ | |
217 #define MEM_ANALYZABLE(REF) ((REF)->id != UNANALYZABLE_MEM_ID) | |
203 | 218 |
204 static struct lim_aux_data * | 219 static struct lim_aux_data * |
205 init_lim_data (gimple stmt) | 220 init_lim_data (gimple *stmt) |
206 { | 221 { |
207 void **p = pointer_map_insert (lim_aux_data_map, stmt); | 222 lim_aux_data *p = XCNEW (struct lim_aux_data); |
208 | 223 lim_aux_data_map->put (stmt, p); |
209 *p = XCNEW (struct lim_aux_data); | 224 |
210 return (struct lim_aux_data *) *p; | 225 return p; |
211 } | 226 } |
212 | 227 |
213 static struct lim_aux_data * | 228 static struct lim_aux_data * |
214 get_lim_data (gimple stmt) | 229 get_lim_data (gimple *stmt) |
215 { | 230 { |
216 void **p = pointer_map_contains (lim_aux_data_map, stmt); | 231 lim_aux_data **p = lim_aux_data_map->get (stmt); |
217 if (!p) | 232 if (!p) |
218 return NULL; | 233 return NULL; |
219 | 234 |
220 return (struct lim_aux_data *) *p; | 235 return *p; |
221 } | 236 } |
222 | 237 |
223 /* Releases the memory occupied by DATA. */ | 238 /* Releases the memory occupied by DATA. */ |
224 | 239 |
225 static void | 240 static void |
226 free_lim_aux_data (struct lim_aux_data *data) | 241 free_lim_aux_data (struct lim_aux_data *data) |
227 { | 242 { |
228 struct depend *dep, *next; | 243 data->depends.release (); |
229 | |
230 for (dep = data->depends; dep; dep = next) | |
231 { | |
232 next = dep->next; | |
233 free (dep); | |
234 } | |
235 free (data); | 244 free (data); |
236 } | 245 } |
237 | 246 |
238 static void | 247 static void |
239 clear_lim_data (gimple stmt) | 248 clear_lim_data (gimple *stmt) |
240 { | 249 { |
241 void **p = pointer_map_contains (lim_aux_data_map, stmt); | 250 lim_aux_data **p = lim_aux_data_map->get (stmt); |
242 if (!p) | 251 if (!p) |
243 return; | 252 return; |
244 | 253 |
245 free_lim_aux_data ((struct lim_aux_data *) *p); | 254 free_lim_aux_data (*p); |
246 *p = NULL; | 255 *p = NULL; |
247 } | 256 } |
248 | 257 |
249 /* Calls CBCK for each index in memory reference ADDR_P. There are two | 258 |
250 kinds situations handled; in each of these cases, the memory reference | 259 /* The possibilities of statement movement. */ |
251 and DATA are passed to the callback: | 260 enum move_pos |
252 | 261 { |
253 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also | 262 MOVE_IMPOSSIBLE, /* No movement -- side effect expression. */ |
254 pass the pointer to the index to the callback. | 263 MOVE_PRESERVE_EXECUTION, /* Must not cause the non-executed statement |
255 | 264 become executed -- memory accesses, ... */ |
256 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the | 265 MOVE_POSSIBLE /* Unlimited movement. */ |
257 pointer to addr to the callback. | 266 }; |
258 | 267 |
259 If the callback returns false, the whole search stops and false is returned. | |
260 Otherwise the function returns true after traversing through the whole | |
261 reference *ADDR_P. */ | |
262 | |
263 bool | |
264 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data) | |
265 { | |
266 tree *nxt, *idx; | |
267 | |
268 for (; ; addr_p = nxt) | |
269 { | |
270 switch (TREE_CODE (*addr_p)) | |
271 { | |
272 case SSA_NAME: | |
273 return cbck (*addr_p, addr_p, data); | |
274 | |
275 case MEM_REF: | |
276 nxt = &TREE_OPERAND (*addr_p, 0); | |
277 return cbck (*addr_p, nxt, data); | |
278 | |
279 case BIT_FIELD_REF: | |
280 case VIEW_CONVERT_EXPR: | |
281 case REALPART_EXPR: | |
282 case IMAGPART_EXPR: | |
283 nxt = &TREE_OPERAND (*addr_p, 0); | |
284 break; | |
285 | |
286 case COMPONENT_REF: | |
287 /* If the component has varying offset, it behaves like index | |
288 as well. */ | |
289 idx = &TREE_OPERAND (*addr_p, 2); | |
290 if (*idx | |
291 && !cbck (*addr_p, idx, data)) | |
292 return false; | |
293 | |
294 nxt = &TREE_OPERAND (*addr_p, 0); | |
295 break; | |
296 | |
297 case ARRAY_REF: | |
298 case ARRAY_RANGE_REF: | |
299 nxt = &TREE_OPERAND (*addr_p, 0); | |
300 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data)) | |
301 return false; | |
302 break; | |
303 | |
304 case VAR_DECL: | |
305 case PARM_DECL: | |
306 case STRING_CST: | |
307 case RESULT_DECL: | |
308 case VECTOR_CST: | |
309 case COMPLEX_CST: | |
310 case INTEGER_CST: | |
311 case REAL_CST: | |
312 case FIXED_CST: | |
313 case CONSTRUCTOR: | |
314 return true; | |
315 | |
316 case ADDR_EXPR: | |
317 gcc_assert (is_gimple_min_invariant (*addr_p)); | |
318 return true; | |
319 | |
320 case TARGET_MEM_REF: | |
321 idx = &TMR_BASE (*addr_p); | |
322 if (*idx | |
323 && !cbck (*addr_p, idx, data)) | |
324 return false; | |
325 idx = &TMR_INDEX (*addr_p); | |
326 if (*idx | |
327 && !cbck (*addr_p, idx, data)) | |
328 return false; | |
329 idx = &TMR_INDEX2 (*addr_p); | |
330 if (*idx | |
331 && !cbck (*addr_p, idx, data)) | |
332 return false; | |
333 return true; | |
334 | |
335 default: | |
336 gcc_unreachable (); | |
337 } | |
338 } | |
339 } | |
340 | 268 |
341 /* If it is possible to hoist the statement STMT unconditionally, | 269 /* If it is possible to hoist the statement STMT unconditionally, |
342 returns MOVE_POSSIBLE. | 270 returns MOVE_POSSIBLE. |
343 If it is possible to hoist the statement STMT, but we must avoid making | 271 If it is possible to hoist the statement STMT, but we must avoid making |
344 it executed if it would not be executed in the original program (e.g. | 272 it executed if it would not be executed in the original program (e.g. |
345 because it may trap), return MOVE_PRESERVE_EXECUTION. | 273 because it may trap), return MOVE_PRESERVE_EXECUTION. |
346 Otherwise return MOVE_IMPOSSIBLE. */ | 274 Otherwise return MOVE_IMPOSSIBLE. */ |
347 | 275 |
348 enum move_pos | 276 enum move_pos |
349 movement_possibility (gimple stmt) | 277 movement_possibility (gimple *stmt) |
350 { | 278 { |
351 tree lhs; | 279 tree lhs; |
352 enum move_pos ret = MOVE_POSSIBLE; | 280 enum move_pos ret = MOVE_POSSIBLE; |
353 | 281 |
354 if (flag_unswitch_loops | 282 if (flag_unswitch_loops |
359 return MOVE_POSSIBLE; | 287 return MOVE_POSSIBLE; |
360 } | 288 } |
361 | 289 |
362 if (gimple_code (stmt) == GIMPLE_PHI | 290 if (gimple_code (stmt) == GIMPLE_PHI |
363 && gimple_phi_num_args (stmt) <= 2 | 291 && gimple_phi_num_args (stmt) <= 2 |
364 && is_gimple_reg (gimple_phi_result (stmt)) | 292 && !virtual_operand_p (gimple_phi_result (stmt)) |
365 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt))) | 293 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt))) |
366 return MOVE_POSSIBLE; | 294 return MOVE_POSSIBLE; |
367 | 295 |
368 if (gimple_get_lhs (stmt) == NULL_TREE) | 296 if (gimple_get_lhs (stmt) == NULL_TREE) |
369 return MOVE_IMPOSSIBLE; | 297 return MOVE_IMPOSSIBLE; |
411 | 339 |
412 if (TREE_CODE (lhs) != SSA_NAME | 340 if (TREE_CODE (lhs) != SSA_NAME |
413 || gimple_could_trap_p (stmt)) | 341 || gimple_could_trap_p (stmt)) |
414 return MOVE_PRESERVE_EXECUTION; | 342 return MOVE_PRESERVE_EXECUTION; |
415 | 343 |
344 /* Non local loads in a transaction cannot be hoisted out. Well, | |
345 unless the load happens on every path out of the loop, but we | |
346 don't take this into account yet. */ | |
347 if (flag_tm | |
348 && gimple_in_transaction (stmt) | |
349 && gimple_assign_single_p (stmt)) | |
350 { | |
351 tree rhs = gimple_assign_rhs1 (stmt); | |
352 if (DECL_P (rhs) && is_global_var (rhs)) | |
353 { | |
354 if (dump_file) | |
355 { | |
356 fprintf (dump_file, "Cannot hoist conditional load of "); | |
357 print_generic_expr (dump_file, rhs, TDF_SLIM); | |
358 fprintf (dump_file, " because it is in a transaction.\n"); | |
359 } | |
360 return MOVE_IMPOSSIBLE; | |
361 } | |
362 } | |
363 | |
416 return ret; | 364 return ret; |
417 } | 365 } |
418 | 366 |
419 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost | 367 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost |
420 loop to that we could move the expression using DEF if it did not have | 368 loop to that we could move the expression using DEF if it did not have |
422 of DEF is invariant. */ | 370 of DEF is invariant. */ |
423 | 371 |
424 static struct loop * | 372 static struct loop * |
425 outermost_invariant_loop (tree def, struct loop *loop) | 373 outermost_invariant_loop (tree def, struct loop *loop) |
426 { | 374 { |
427 gimple def_stmt; | 375 gimple *def_stmt; |
428 basic_block def_bb; | 376 basic_block def_bb; |
429 struct loop *max_loop; | 377 struct loop *max_loop; |
430 struct lim_aux_data *lim_data; | 378 struct lim_aux_data *lim_data; |
431 | 379 |
432 if (!def) | 380 if (!def) |
470 | 418 |
471 static bool | 419 static bool |
472 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop, | 420 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop, |
473 bool add_cost) | 421 bool add_cost) |
474 { | 422 { |
475 gimple def_stmt = SSA_NAME_DEF_STMT (def); | 423 gimple *def_stmt = SSA_NAME_DEF_STMT (def); |
476 basic_block def_bb = gimple_bb (def_stmt); | 424 basic_block def_bb = gimple_bb (def_stmt); |
477 struct loop *max_loop; | 425 struct loop *max_loop; |
478 struct depend *dep; | |
479 struct lim_aux_data *def_data; | 426 struct lim_aux_data *def_data; |
480 | 427 |
481 if (!def_bb) | 428 if (!def_bb) |
482 return true; | 429 return true; |
483 | 430 |
498 on it, we will be able to avoid creating a new register for | 445 on it, we will be able to avoid creating a new register for |
499 it (since it will be only used in these dependent invariants). */ | 446 it (since it will be only used in these dependent invariants). */ |
500 && def_bb->loop_father == loop) | 447 && def_bb->loop_father == loop) |
501 data->cost += def_data->cost; | 448 data->cost += def_data->cost; |
502 | 449 |
503 dep = XNEW (struct depend); | 450 data->depends.safe_push (def_stmt); |
504 dep->stmt = def_stmt; | |
505 dep->next = data->depends; | |
506 data->depends = dep; | |
507 | 451 |
508 return true; | 452 return true; |
509 } | 453 } |
510 | 454 |
511 /* Returns an estimate for a cost of statement STMT. TODO -- the values here | 455 /* Returns an estimate for a cost of statement STMT. The values here |
512 are just ad-hoc constants. The estimates should be based on target-specific | 456 are just ad-hoc constants, similar to costs for inlining. */ |
513 values. */ | |
514 | 457 |
515 static unsigned | 458 static unsigned |
516 stmt_cost (gimple stmt) | 459 stmt_cost (gimple *stmt) |
517 { | 460 { |
518 tree fndecl; | |
519 unsigned cost = 1; | |
520 | |
521 /* Always try to create possibilities for unswitching. */ | 461 /* Always try to create possibilities for unswitching. */ |
522 if (gimple_code (stmt) == GIMPLE_COND | 462 if (gimple_code (stmt) == GIMPLE_COND |
523 || gimple_code (stmt) == GIMPLE_PHI) | 463 || gimple_code (stmt) == GIMPLE_PHI) |
524 return LIM_EXPENSIVE; | 464 return LIM_EXPENSIVE; |
525 | 465 |
526 /* Hoisting memory references out should almost surely be a win. */ | 466 /* We should be hoisting calls if possible. */ |
527 if (gimple_references_memory_p (stmt)) | |
528 cost += 20; | |
529 | |
530 if (is_gimple_call (stmt)) | 467 if (is_gimple_call (stmt)) |
531 { | 468 { |
532 /* We should be hoisting calls if possible. */ | 469 tree fndecl; |
533 | 470 |
534 /* Unless the call is a builtin_constant_p; this always folds to a | 471 /* Unless the call is a builtin_constant_p; this always folds to a |
535 constant, so moving it is useless. */ | 472 constant, so moving it is useless. */ |
536 fndecl = gimple_call_fndecl (stmt); | 473 fndecl = gimple_call_fndecl (stmt); |
537 if (fndecl | 474 if (fndecl |
538 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | 475 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL |
539 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P) | 476 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P) |
540 return 0; | 477 return 0; |
541 | 478 |
542 return cost + 20; | 479 return LIM_EXPENSIVE; |
543 } | 480 } |
481 | |
482 /* Hoisting memory references out should almost surely be a win. */ | |
483 if (gimple_references_memory_p (stmt)) | |
484 return LIM_EXPENSIVE; | |
544 | 485 |
545 if (gimple_code (stmt) != GIMPLE_ASSIGN) | 486 if (gimple_code (stmt) != GIMPLE_ASSIGN) |
546 return cost; | 487 return 1; |
547 | 488 |
548 switch (gimple_assign_rhs_code (stmt)) | 489 switch (gimple_assign_rhs_code (stmt)) |
549 { | 490 { |
550 case MULT_EXPR: | 491 case MULT_EXPR: |
492 case WIDEN_MULT_EXPR: | |
493 case WIDEN_MULT_PLUS_EXPR: | |
494 case WIDEN_MULT_MINUS_EXPR: | |
495 case DOT_PROD_EXPR: | |
496 case FMA_EXPR: | |
551 case TRUNC_DIV_EXPR: | 497 case TRUNC_DIV_EXPR: |
552 case CEIL_DIV_EXPR: | 498 case CEIL_DIV_EXPR: |
553 case FLOOR_DIV_EXPR: | 499 case FLOOR_DIV_EXPR: |
554 case ROUND_DIV_EXPR: | 500 case ROUND_DIV_EXPR: |
555 case EXACT_DIV_EXPR: | 501 case EXACT_DIV_EXPR: |
557 case FLOOR_MOD_EXPR: | 503 case FLOOR_MOD_EXPR: |
558 case ROUND_MOD_EXPR: | 504 case ROUND_MOD_EXPR: |
559 case TRUNC_MOD_EXPR: | 505 case TRUNC_MOD_EXPR: |
560 case RDIV_EXPR: | 506 case RDIV_EXPR: |
561 /* Division and multiplication are usually expensive. */ | 507 /* Division and multiplication are usually expensive. */ |
562 cost += 20; | 508 return LIM_EXPENSIVE; |
563 break; | |
564 | 509 |
565 case LSHIFT_EXPR: | 510 case LSHIFT_EXPR: |
566 case RSHIFT_EXPR: | 511 case RSHIFT_EXPR: |
567 cost += 20; | 512 case WIDEN_LSHIFT_EXPR: |
568 break; | 513 case LROTATE_EXPR: |
514 case RROTATE_EXPR: | |
515 /* Shifts and rotates are usually expensive. */ | |
516 return LIM_EXPENSIVE; | |
517 | |
518 case CONSTRUCTOR: | |
519 /* Make vector construction cost proportional to the number | |
520 of elements. */ | |
521 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); | |
522 | |
523 case SSA_NAME: | |
524 case PAREN_EXPR: | |
525 /* Whether or not something is wrapped inside a PAREN_EXPR | |
526 should not change move cost. Nor should an intermediate | |
527 unpropagated SSA name copy. */ | |
528 return 0; | |
569 | 529 |
570 default: | 530 default: |
571 break; | 531 return 1; |
572 } | 532 } |
573 | |
574 return cost; | |
575 } | 533 } |
576 | 534 |
577 /* Finds the outermost loop between OUTER and LOOP in that the memory reference | 535 /* Finds the outermost loop between OUTER and LOOP in that the memory reference |
578 REF is independent. If REF is not independent in LOOP, NULL is returned | 536 REF is independent. If REF is not independent in LOOP, NULL is returned |
579 instead. */ | 537 instead. */ |
580 | 538 |
581 static struct loop * | 539 static struct loop * |
582 outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref) | 540 outermost_indep_loop (struct loop *outer, struct loop *loop, im_mem_ref *ref) |
583 { | 541 { |
584 struct loop *aloop; | 542 struct loop *aloop; |
585 | 543 |
586 if (bitmap_bit_p (ref->stored, loop->num)) | 544 if (ref->stored && bitmap_bit_p (ref->stored, loop->num)) |
587 return NULL; | 545 return NULL; |
588 | 546 |
589 for (aloop = outer; | 547 for (aloop = outer; |
590 aloop != loop; | 548 aloop != loop; |
591 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1)) | 549 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1)) |
592 if (!bitmap_bit_p (ref->stored, aloop->num) | 550 if ((!ref->stored || !bitmap_bit_p (ref->stored, aloop->num)) |
593 && ref_indep_loop_p (aloop, ref)) | 551 && ref_indep_loop_p (aloop, ref, loop)) |
594 return aloop; | 552 return aloop; |
595 | 553 |
596 if (ref_indep_loop_p (loop, ref)) | 554 if (ref_indep_loop_p (loop, ref, loop)) |
597 return loop; | 555 return loop; |
598 else | 556 else |
599 return NULL; | 557 return NULL; |
600 } | 558 } |
601 | 559 |
602 /* If there is a simple load or store to a memory reference in STMT, returns | 560 /* If there is a simple load or store to a memory reference in STMT, returns |
603 the location of the memory reference, and sets IS_STORE according to whether | 561 the location of the memory reference, and sets IS_STORE according to whether |
604 it is a store or load. Otherwise, returns NULL. */ | 562 it is a store or load. Otherwise, returns NULL. */ |
605 | 563 |
606 static tree * | 564 static tree * |
607 simple_mem_ref_in_stmt (gimple stmt, bool *is_store) | 565 simple_mem_ref_in_stmt (gimple *stmt, bool *is_store) |
608 { | 566 { |
609 tree *lhs; | 567 tree *lhs, *rhs; |
610 enum tree_code code; | 568 |
611 | 569 /* Recognize SSA_NAME = MEM and MEM = (SSA_NAME | invariant) patterns. */ |
612 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */ | 570 if (!gimple_assign_single_p (stmt)) |
613 if (gimple_code (stmt) != GIMPLE_ASSIGN) | |
614 return NULL; | 571 return NULL; |
615 | 572 |
616 code = gimple_assign_rhs_code (stmt); | |
617 | |
618 lhs = gimple_assign_lhs_ptr (stmt); | 573 lhs = gimple_assign_lhs_ptr (stmt); |
619 | 574 rhs = gimple_assign_rhs1_ptr (stmt); |
620 if (TREE_CODE (*lhs) == SSA_NAME) | 575 |
621 { | 576 if (TREE_CODE (*lhs) == SSA_NAME && gimple_vuse (stmt)) |
622 if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS | 577 { |
623 || !is_gimple_addressable (gimple_assign_rhs1 (stmt))) | |
624 return NULL; | |
625 | |
626 *is_store = false; | 578 *is_store = false; |
627 return gimple_assign_rhs1_ptr (stmt); | 579 return rhs; |
628 } | 580 } |
629 else if (code == SSA_NAME | 581 else if (gimple_vdef (stmt) |
630 || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS | 582 && (TREE_CODE (*rhs) == SSA_NAME || is_gimple_min_invariant (*rhs))) |
631 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))) | |
632 { | 583 { |
633 *is_store = true; | 584 *is_store = true; |
634 return lhs; | 585 return lhs; |
635 } | 586 } |
636 else | 587 else |
637 return NULL; | 588 return NULL; |
638 } | |
639 | |
640 /* Returns the memory reference contained in STMT. */ | |
641 | |
642 static mem_ref_p | |
643 mem_ref_in_stmt (gimple stmt) | |
644 { | |
645 bool store; | |
646 tree *mem = simple_mem_ref_in_stmt (stmt, &store); | |
647 hashval_t hash; | |
648 mem_ref_p ref; | |
649 | |
650 if (!mem) | |
651 return NULL; | |
652 gcc_assert (!store); | |
653 | |
654 hash = iterative_hash_expr (*mem, 0); | |
655 ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash); | |
656 | |
657 gcc_assert (ref != NULL); | |
658 return ref; | |
659 } | 589 } |
660 | 590 |
661 /* From a controlling predicate in DOM determine the arguments from | 591 /* From a controlling predicate in DOM determine the arguments from |
662 the PHI node PHI that are chosen if the predicate evaluates to | 592 the PHI node PHI that are chosen if the predicate evaluates to |
663 true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if | 593 true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if |
664 they are non-NULL. Returns true if the arguments can be determined, | 594 they are non-NULL. Returns true if the arguments can be determined, |
665 else return false. */ | 595 else return false. */ |
666 | 596 |
667 static bool | 597 static bool |
668 extract_true_false_args_from_phi (basic_block dom, gimple phi, | 598 extract_true_false_args_from_phi (basic_block dom, gphi *phi, |
669 tree *true_arg_p, tree *false_arg_p) | 599 tree *true_arg_p, tree *false_arg_p) |
670 { | 600 { |
671 basic_block bb = gimple_bb (phi); | 601 edge te, fe; |
672 edge true_edge, false_edge, tem; | 602 if (! extract_true_false_controlled_edges (dom, gimple_bb (phi), |
673 tree arg0 = NULL_TREE, arg1 = NULL_TREE; | 603 &te, &fe)) |
674 | |
675 /* We have to verify that one edge into the PHI node is dominated | |
676 by the true edge of the predicate block and the other edge | |
677 dominated by the false edge. This ensures that the PHI argument | |
678 we are going to take is completely determined by the path we | |
679 take from the predicate block. | |
680 We can only use BB dominance checks below if the destination of | |
681 the true/false edges are dominated by their edge, thus only | |
682 have a single predecessor. */ | |
683 extract_true_false_edges_from_block (dom, &true_edge, &false_edge); | |
684 tem = EDGE_PRED (bb, 0); | |
685 if (tem == true_edge | |
686 || (single_pred_p (true_edge->dest) | |
687 && (tem->src == true_edge->dest | |
688 || dominated_by_p (CDI_DOMINATORS, | |
689 tem->src, true_edge->dest)))) | |
690 arg0 = PHI_ARG_DEF (phi, tem->dest_idx); | |
691 else if (tem == false_edge | |
692 || (single_pred_p (false_edge->dest) | |
693 && (tem->src == false_edge->dest | |
694 || dominated_by_p (CDI_DOMINATORS, | |
695 tem->src, false_edge->dest)))) | |
696 arg1 = PHI_ARG_DEF (phi, tem->dest_idx); | |
697 else | |
698 return false; | 604 return false; |
699 tem = EDGE_PRED (bb, 1); | |
700 if (tem == true_edge | |
701 || (single_pred_p (true_edge->dest) | |
702 && (tem->src == true_edge->dest | |
703 || dominated_by_p (CDI_DOMINATORS, | |
704 tem->src, true_edge->dest)))) | |
705 arg0 = PHI_ARG_DEF (phi, tem->dest_idx); | |
706 else if (tem == false_edge | |
707 || (single_pred_p (false_edge->dest) | |
708 && (tem->src == false_edge->dest | |
709 || dominated_by_p (CDI_DOMINATORS, | |
710 tem->src, false_edge->dest)))) | |
711 arg1 = PHI_ARG_DEF (phi, tem->dest_idx); | |
712 else | |
713 return false; | |
714 if (!arg0 || !arg1) | |
715 return false; | |
716 | 605 |
717 if (true_arg_p) | 606 if (true_arg_p) |
718 *true_arg_p = arg0; | 607 *true_arg_p = PHI_ARG_DEF (phi, te->dest_idx); |
719 if (false_arg_p) | 608 if (false_arg_p) |
720 *false_arg_p = arg1; | 609 *false_arg_p = PHI_ARG_DEF (phi, fe->dest_idx); |
721 | 610 |
722 return true; | 611 return true; |
723 } | 612 } |
724 | 613 |
725 /* Determine the outermost loop to that it is possible to hoist a statement | 614 /* Determine the outermost loop to that it is possible to hoist a statement |
731 | 620 |
732 The function returns false if STMT cannot be hoisted outside of the loop it | 621 The function returns false if STMT cannot be hoisted outside of the loop it |
733 is defined in, and true otherwise. */ | 622 is defined in, and true otherwise. */ |
734 | 623 |
735 static bool | 624 static bool |
736 determine_max_movement (gimple stmt, bool must_preserve_exec) | 625 determine_max_movement (gimple *stmt, bool must_preserve_exec) |
737 { | 626 { |
738 basic_block bb = gimple_bb (stmt); | 627 basic_block bb = gimple_bb (stmt); |
739 struct loop *loop = bb->loop_father; | 628 struct loop *loop = bb->loop_father; |
740 struct loop *level; | 629 struct loop *level; |
741 struct lim_aux_data *lim_data = get_lim_data (stmt); | 630 struct lim_aux_data *lim_data = get_lim_data (stmt); |
746 level = ALWAYS_EXECUTED_IN (bb); | 635 level = ALWAYS_EXECUTED_IN (bb); |
747 else | 636 else |
748 level = superloop_at_depth (loop, 1); | 637 level = superloop_at_depth (loop, 1); |
749 lim_data->max_loop = level; | 638 lim_data->max_loop = level; |
750 | 639 |
751 if (gimple_code (stmt) == GIMPLE_PHI) | 640 if (gphi *phi = dyn_cast <gphi *> (stmt)) |
752 { | 641 { |
753 use_operand_p use_p; | 642 use_operand_p use_p; |
754 unsigned min_cost = UINT_MAX; | 643 unsigned min_cost = UINT_MAX; |
755 unsigned total_cost = 0; | 644 unsigned total_cost = 0; |
756 struct lim_aux_data *def_data; | 645 struct lim_aux_data *def_data; |
757 | 646 |
758 /* We will end up promoting dependencies to be unconditionally | 647 /* We will end up promoting dependencies to be unconditionally |
759 evaluated. For this reason the PHI cost (and thus the | 648 evaluated. For this reason the PHI cost (and thus the |
760 cost we remove from the loop by doing the invariant motion) | 649 cost we remove from the loop by doing the invariant motion) |
761 is that of the cheapest PHI argument dependency chain. */ | 650 is that of the cheapest PHI argument dependency chain. */ |
762 FOR_EACH_PHI_ARG (use_p, stmt, iter, SSA_OP_USE) | 651 FOR_EACH_PHI_ARG (use_p, phi, iter, SSA_OP_USE) |
763 { | 652 { |
764 val = USE_FROM_PTR (use_p); | 653 val = USE_FROM_PTR (use_p); |
654 | |
765 if (TREE_CODE (val) != SSA_NAME) | 655 if (TREE_CODE (val) != SSA_NAME) |
766 continue; | 656 { |
657 /* Assign const 1 to constants. */ | |
658 min_cost = MIN (min_cost, 1); | |
659 total_cost += 1; | |
660 continue; | |
661 } | |
767 if (!add_dependency (val, lim_data, loop, false)) | 662 if (!add_dependency (val, lim_data, loop, false)) |
768 return false; | 663 return false; |
769 def_data = get_lim_data (SSA_NAME_DEF_STMT (val)); | 664 |
770 if (def_data) | 665 gimple *def_stmt = SSA_NAME_DEF_STMT (val); |
666 if (gimple_bb (def_stmt) | |
667 && gimple_bb (def_stmt)->loop_father == loop) | |
771 { | 668 { |
772 min_cost = MIN (min_cost, def_data->cost); | 669 def_data = get_lim_data (def_stmt); |
773 total_cost += def_data->cost; | 670 if (def_data) |
671 { | |
672 min_cost = MIN (min_cost, def_data->cost); | |
673 total_cost += def_data->cost; | |
674 } | |
774 } | 675 } |
775 } | 676 } |
776 | 677 |
678 min_cost = MIN (min_cost, total_cost); | |
777 lim_data->cost += min_cost; | 679 lim_data->cost += min_cost; |
778 | 680 |
779 if (gimple_phi_num_args (stmt) > 1) | 681 if (gimple_phi_num_args (phi) > 1) |
780 { | 682 { |
781 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb); | 683 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb); |
782 gimple cond; | 684 gimple *cond; |
783 if (gsi_end_p (gsi_last_bb (dom))) | 685 if (gsi_end_p (gsi_last_bb (dom))) |
784 return false; | 686 return false; |
785 cond = gsi_stmt (gsi_last_bb (dom)); | 687 cond = gsi_stmt (gsi_last_bb (dom)); |
786 if (gimple_code (cond) != GIMPLE_COND) | 688 if (gimple_code (cond) != GIMPLE_COND) |
787 return false; | 689 return false; |
788 /* Verify that this is an extended form of a diamond and | 690 /* Verify that this is an extended form of a diamond and |
789 the PHI arguments are completely controlled by the | 691 the PHI arguments are completely controlled by the |
790 predicate in DOM. */ | 692 predicate in DOM. */ |
791 if (!extract_true_false_args_from_phi (dom, stmt, NULL, NULL)) | 693 if (!extract_true_false_args_from_phi (dom, phi, NULL, NULL)) |
792 return false; | 694 return false; |
793 | 695 |
794 /* Fold in dependencies and cost of the condition. */ | 696 /* Fold in dependencies and cost of the condition. */ |
795 FOR_EACH_SSA_TREE_OPERAND (val, cond, iter, SSA_OP_USE) | 697 FOR_EACH_SSA_TREE_OPERAND (val, cond, iter, SSA_OP_USE) |
796 { | 698 { |
797 if (!add_dependency (val, lim_data, loop, false)) | 699 if (!add_dependency (val, lim_data, loop, false)) |
798 return false; | 700 return false; |
799 def_data = get_lim_data (SSA_NAME_DEF_STMT (val)); | 701 def_data = get_lim_data (SSA_NAME_DEF_STMT (val)); |
800 if (def_data) | 702 if (def_data) |
801 total_cost += def_data->cost; | 703 lim_data->cost += def_data->cost; |
802 } | 704 } |
803 | 705 |
804 /* We want to avoid unconditionally executing very expensive | 706 /* We want to avoid unconditionally executing very expensive |
805 operations. As costs for our dependencies cannot be | 707 operations. As costs for our dependencies cannot be |
806 negative just claim we are not invariand for this case. | 708 negative just claim we are not invariand for this case. |
824 if (!add_dependency (val, lim_data, loop, true)) | 726 if (!add_dependency (val, lim_data, loop, true)) |
825 return false; | 727 return false; |
826 | 728 |
827 if (gimple_vuse (stmt)) | 729 if (gimple_vuse (stmt)) |
828 { | 730 { |
829 mem_ref_p ref = mem_ref_in_stmt (stmt); | 731 im_mem_ref *ref |
830 | 732 = lim_data ? memory_accesses.refs_list[lim_data->ref] : NULL; |
831 if (ref) | 733 if (ref |
832 { | 734 && MEM_ANALYZABLE (ref)) |
833 lim_data->max_loop | 735 { |
834 = outermost_indep_loop (lim_data->max_loop, loop, ref); | 736 lim_data->max_loop = outermost_indep_loop (lim_data->max_loop, |
737 loop, ref); | |
835 if (!lim_data->max_loop) | 738 if (!lim_data->max_loop) |
836 return false; | 739 return false; |
837 } | 740 } |
838 else | 741 else if (! add_dependency (gimple_vuse (stmt), lim_data, loop, false)) |
839 { | 742 return false; |
840 if ((val = gimple_vuse (stmt)) != NULL_TREE) | |
841 { | |
842 if (!add_dependency (val, lim_data, loop, false)) | |
843 return false; | |
844 } | |
845 } | |
846 } | 743 } |
847 | 744 |
848 lim_data->cost += stmt_cost (stmt); | 745 lim_data->cost += stmt_cost (stmt); |
849 | 746 |
850 return true; | 747 return true; |
854 and that one of the operands of this statement is computed by STMT. | 751 and that one of the operands of this statement is computed by STMT. |
855 Ensure that STMT (together with all the statements that define its | 752 Ensure that STMT (together with all the statements that define its |
856 operands) is hoisted at least out of the loop LEVEL. */ | 753 operands) is hoisted at least out of the loop LEVEL. */ |
857 | 754 |
858 static void | 755 static void |
859 set_level (gimple stmt, struct loop *orig_loop, struct loop *level) | 756 set_level (gimple *stmt, struct loop *orig_loop, struct loop *level) |
860 { | 757 { |
861 struct loop *stmt_loop = gimple_bb (stmt)->loop_father; | 758 struct loop *stmt_loop = gimple_bb (stmt)->loop_father; |
862 struct depend *dep; | |
863 struct lim_aux_data *lim_data; | 759 struct lim_aux_data *lim_data; |
760 gimple *dep_stmt; | |
761 unsigned i; | |
864 | 762 |
865 stmt_loop = find_common_loop (orig_loop, stmt_loop); | 763 stmt_loop = find_common_loop (orig_loop, stmt_loop); |
866 lim_data = get_lim_data (stmt); | 764 lim_data = get_lim_data (stmt); |
867 if (lim_data != NULL && lim_data->tgt_loop != NULL) | 765 if (lim_data != NULL && lim_data->tgt_loop != NULL) |
868 stmt_loop = find_common_loop (stmt_loop, | 766 stmt_loop = find_common_loop (stmt_loop, |
872 | 770 |
873 gcc_assert (level == lim_data->max_loop | 771 gcc_assert (level == lim_data->max_loop |
874 || flow_loop_nested_p (lim_data->max_loop, level)); | 772 || flow_loop_nested_p (lim_data->max_loop, level)); |
875 | 773 |
876 lim_data->tgt_loop = level; | 774 lim_data->tgt_loop = level; |
877 for (dep = lim_data->depends; dep; dep = dep->next) | 775 FOR_EACH_VEC_ELT (lim_data->depends, i, dep_stmt) |
878 set_level (dep->stmt, orig_loop, level); | 776 set_level (dep_stmt, orig_loop, level); |
879 } | 777 } |
880 | 778 |
881 /* Determines an outermost loop from that we want to hoist the statement STMT. | 779 /* Determines an outermost loop from that we want to hoist the statement STMT. |
882 For now we chose the outermost possible loop. TODO -- use profiling | 780 For now we chose the outermost possible loop. TODO -- use profiling |
883 information to set it more sanely. */ | 781 information to set it more sanely. */ |
884 | 782 |
885 static void | 783 static void |
886 set_profitable_level (gimple stmt) | 784 set_profitable_level (gimple *stmt) |
887 { | 785 { |
888 set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop); | 786 set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop); |
889 } | 787 } |
890 | 788 |
891 /* Returns true if STMT is a call that has side effects. */ | 789 /* Returns true if STMT is a call that has side effects. */ |
892 | 790 |
893 static bool | 791 static bool |
894 nonpure_call_p (gimple stmt) | 792 nonpure_call_p (gimple *stmt) |
895 { | 793 { |
896 if (gimple_code (stmt) != GIMPLE_CALL) | 794 if (gimple_code (stmt) != GIMPLE_CALL) |
897 return false; | 795 return false; |
898 | 796 |
899 return gimple_has_side_effects (stmt); | 797 return gimple_has_side_effects (stmt); |
900 } | 798 } |
901 | 799 |
902 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */ | 800 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */ |
903 | 801 |
904 static gimple | 802 static gimple * |
905 rewrite_reciprocal (gimple_stmt_iterator *bsi) | 803 rewrite_reciprocal (gimple_stmt_iterator *bsi) |
906 { | 804 { |
907 gimple stmt, stmt1, stmt2; | 805 gassign *stmt, *stmt1, *stmt2; |
908 tree var, name, lhs, type; | 806 tree name, lhs, type; |
909 tree real_one; | 807 tree real_one; |
910 gimple_stmt_iterator gsi; | 808 gimple_stmt_iterator gsi; |
911 | 809 |
912 stmt = gsi_stmt (*bsi); | 810 stmt = as_a <gassign *> (gsi_stmt (*bsi)); |
913 lhs = gimple_assign_lhs (stmt); | 811 lhs = gimple_assign_lhs (stmt); |
914 type = TREE_TYPE (lhs); | 812 type = TREE_TYPE (lhs); |
915 | 813 |
916 var = create_tmp_var (type, "reciptmp"); | |
917 add_referenced_var (var); | |
918 DECL_GIMPLE_REG_P (var) = 1; | |
919 | |
920 real_one = build_one_cst (type); | 814 real_one = build_one_cst (type); |
921 | 815 |
922 stmt1 = gimple_build_assign_with_ops (RDIV_EXPR, | 816 name = make_temp_ssa_name (type, NULL, "reciptmp"); |
923 var, real_one, gimple_assign_rhs2 (stmt)); | 817 stmt1 = gimple_build_assign (name, RDIV_EXPR, real_one, |
924 name = make_ssa_name (var, stmt1); | 818 gimple_assign_rhs2 (stmt)); |
925 gimple_assign_set_lhs (stmt1, name); | 819 stmt2 = gimple_build_assign (lhs, MULT_EXPR, name, |
926 | 820 gimple_assign_rhs1 (stmt)); |
927 stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name, | |
928 gimple_assign_rhs1 (stmt)); | |
929 | 821 |
930 /* Replace division stmt with reciprocal and multiply stmts. | 822 /* Replace division stmt with reciprocal and multiply stmts. |
931 The multiply stmt is not invariant, so update iterator | 823 The multiply stmt is not invariant, so update iterator |
932 and avoid rescanning. */ | 824 and avoid rescanning. */ |
933 gsi = *bsi; | 825 gsi = *bsi; |
939 } | 831 } |
940 | 832 |
941 /* Check if the pattern at *BSI is a bittest of the form | 833 /* Check if the pattern at *BSI is a bittest of the form |
942 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */ | 834 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */ |
943 | 835 |
944 static gimple | 836 static gimple * |
945 rewrite_bittest (gimple_stmt_iterator *bsi) | 837 rewrite_bittest (gimple_stmt_iterator *bsi) |
946 { | 838 { |
947 gimple stmt, use_stmt, stmt1, stmt2; | 839 gassign *stmt; |
948 tree lhs, var, name, t, a, b; | 840 gimple *stmt1; |
841 gassign *stmt2; | |
842 gimple *use_stmt; | |
843 gcond *cond_stmt; | |
844 tree lhs, name, t, a, b; | |
949 use_operand_p use; | 845 use_operand_p use; |
950 | 846 |
951 stmt = gsi_stmt (*bsi); | 847 stmt = as_a <gassign *> (gsi_stmt (*bsi)); |
952 lhs = gimple_assign_lhs (stmt); | 848 lhs = gimple_assign_lhs (stmt); |
953 | 849 |
954 /* Verify that the single use of lhs is a comparison against zero. */ | 850 /* Verify that the single use of lhs is a comparison against zero. */ |
955 if (TREE_CODE (lhs) != SSA_NAME | 851 if (TREE_CODE (lhs) != SSA_NAME |
956 || !single_imm_use (lhs, &use, &use_stmt) | 852 || !single_imm_use (lhs, &use, &use_stmt)) |
957 || gimple_code (use_stmt) != GIMPLE_COND) | |
958 return stmt; | 853 return stmt; |
959 if (gimple_cond_lhs (use_stmt) != lhs | 854 cond_stmt = dyn_cast <gcond *> (use_stmt); |
960 || (gimple_cond_code (use_stmt) != NE_EXPR | 855 if (!cond_stmt) |
961 && gimple_cond_code (use_stmt) != EQ_EXPR) | 856 return stmt; |
962 || !integer_zerop (gimple_cond_rhs (use_stmt))) | 857 if (gimple_cond_lhs (cond_stmt) != lhs |
858 || (gimple_cond_code (cond_stmt) != NE_EXPR | |
859 && gimple_cond_code (cond_stmt) != EQ_EXPR) | |
860 || !integer_zerop (gimple_cond_rhs (cond_stmt))) | |
963 return stmt; | 861 return stmt; |
964 | 862 |
965 /* Get at the operands of the shift. The rhs is TMP1 & 1. */ | 863 /* Get at the operands of the shift. The rhs is TMP1 & 1. */ |
966 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); | 864 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); |
967 if (gimple_code (stmt1) != GIMPLE_ASSIGN) | 865 if (gimple_code (stmt1) != GIMPLE_ASSIGN) |
992 && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL) | 890 && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL) |
993 { | 891 { |
994 gimple_stmt_iterator rsi; | 892 gimple_stmt_iterator rsi; |
995 | 893 |
996 /* 1 << B */ | 894 /* 1 << B */ |
997 var = create_tmp_var (TREE_TYPE (a), "shifttmp"); | |
998 add_referenced_var (var); | |
999 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a), | 895 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a), |
1000 build_int_cst (TREE_TYPE (a), 1), b); | 896 build_int_cst (TREE_TYPE (a), 1), b); |
1001 stmt1 = gimple_build_assign (var, t); | 897 name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp"); |
1002 name = make_ssa_name (var, stmt1); | 898 stmt1 = gimple_build_assign (name, t); |
1003 gimple_assign_set_lhs (stmt1, name); | |
1004 | 899 |
1005 /* A & (1 << B) */ | 900 /* A & (1 << B) */ |
1006 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name); | 901 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name); |
1007 stmt2 = gimple_build_assign (var, t); | 902 name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp"); |
1008 name = make_ssa_name (var, stmt2); | 903 stmt2 = gimple_build_assign (name, t); |
1009 gimple_assign_set_lhs (stmt2, name); | |
1010 | 904 |
1011 /* Replace the SSA_NAME we compare against zero. Adjust | 905 /* Replace the SSA_NAME we compare against zero. Adjust |
1012 the type of zero accordingly. */ | 906 the type of zero accordingly. */ |
1013 SET_USE (use, name); | 907 SET_USE (use, name); |
1014 gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0)); | 908 gimple_cond_set_rhs (cond_stmt, |
909 build_int_cst_type (TREE_TYPE (name), | |
910 0)); | |
1015 | 911 |
1016 /* Don't use gsi_replace here, none of the new assignments sets | 912 /* Don't use gsi_replace here, none of the new assignments sets |
1017 the variable originally set in stmt. Move bsi to stmt1, and | 913 the variable originally set in stmt. Move bsi to stmt1, and |
1018 then remove the original stmt, so that we get a chance to | 914 then remove the original stmt, so that we get a chance to |
1019 retain debug info for it. */ | 915 retain debug info for it. */ |
1020 rsi = *bsi; | 916 rsi = *bsi; |
1021 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT); | 917 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT); |
1022 gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT); | 918 gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT); |
919 gimple *to_release = gsi_stmt (rsi); | |
1023 gsi_remove (&rsi, true); | 920 gsi_remove (&rsi, true); |
921 release_defs (to_release); | |
1024 | 922 |
1025 return stmt1; | 923 return stmt1; |
1026 } | 924 } |
1027 | 925 |
1028 return stmt; | 926 return stmt; |
1029 } | 927 } |
1030 | 928 |
929 /* For each statement determines the outermost loop in that it is invariant, | |
930 - statements on whose motion it depends and the cost of the computation. | |
931 - This information is stored to the LIM_DATA structure associated with | |
932 - each statement. */ | |
933 class invariantness_dom_walker : public dom_walker | |
934 { | |
935 public: | |
936 invariantness_dom_walker (cdi_direction direction) | |
937 : dom_walker (direction) {} | |
938 | |
939 virtual edge before_dom_children (basic_block); | |
940 }; | |
1031 | 941 |
1032 /* Determine the outermost loops in that statements in basic block BB are | 942 /* Determine the outermost loops in that statements in basic block BB are |
1033 invariant, and record them to the LIM_DATA associated with the statements. | 943 invariant, and record them to the LIM_DATA associated with the statements. |
1034 Callback for walk_dominator_tree. */ | 944 Callback for dom_walker. */ |
1035 | 945 |
1036 static void | 946 edge |
1037 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED, | 947 invariantness_dom_walker::before_dom_children (basic_block bb) |
1038 basic_block bb) | |
1039 { | 948 { |
1040 enum move_pos pos; | 949 enum move_pos pos; |
1041 gimple_stmt_iterator bsi; | 950 gimple_stmt_iterator bsi; |
1042 gimple stmt; | 951 gimple *stmt; |
1043 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL; | 952 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL; |
1044 struct loop *outermost = ALWAYS_EXECUTED_IN (bb); | 953 struct loop *outermost = ALWAYS_EXECUTED_IN (bb); |
1045 struct lim_aux_data *lim_data; | 954 struct lim_aux_data *lim_data; |
1046 | 955 |
1047 if (!loop_outer (bb->loop_father)) | 956 if (!loop_outer (bb->loop_father)) |
1048 return; | 957 return NULL; |
1049 | 958 |
1050 if (dump_file && (dump_flags & TDF_DETAILS)) | 959 if (dump_file && (dump_flags & TDF_DETAILS)) |
1051 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n", | 960 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n", |
1052 bb->index, bb->loop_father->num, loop_depth (bb->loop_father)); | 961 bb->index, bb->loop_father->num, loop_depth (bb->loop_father)); |
1053 | 962 |
1065 | 974 |
1066 pos = movement_possibility (stmt); | 975 pos = movement_possibility (stmt); |
1067 if (pos == MOVE_IMPOSSIBLE) | 976 if (pos == MOVE_IMPOSSIBLE) |
1068 continue; | 977 continue; |
1069 | 978 |
1070 lim_data = init_lim_data (stmt); | 979 lim_data = get_lim_data (stmt); |
980 if (! lim_data) | |
981 lim_data = init_lim_data (stmt); | |
1071 lim_data->always_executed_in = outermost; | 982 lim_data->always_executed_in = outermost; |
1072 | 983 |
1073 if (!determine_max_movement (stmt, false)) | 984 if (!determine_max_movement (stmt, false)) |
1074 { | 985 { |
1075 lim_data->max_loop = NULL; | 986 lim_data->max_loop = NULL; |
1076 continue; | 987 continue; |
1077 } | 988 } |
1078 | 989 |
1079 if (dump_file && (dump_flags & TDF_DETAILS)) | 990 if (dump_file && (dump_flags & TDF_DETAILS)) |
1080 { | 991 { |
1081 print_gimple_stmt (dump_file, stmt, 2, 0); | 992 print_gimple_stmt (dump_file, stmt, 2); |
1082 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n", | 993 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n", |
1083 loop_depth (lim_data->max_loop), | 994 loop_depth (lim_data->max_loop), |
1084 lim_data->cost); | 995 lim_data->cost); |
1085 } | 996 } |
1086 | 997 |
1102 } | 1013 } |
1103 /* Make sure to note always_executed_in for stores to make | 1014 /* Make sure to note always_executed_in for stores to make |
1104 store-motion work. */ | 1015 store-motion work. */ |
1105 else if (stmt_makes_single_store (stmt)) | 1016 else if (stmt_makes_single_store (stmt)) |
1106 { | 1017 { |
1107 struct lim_aux_data *lim_data = init_lim_data (stmt); | 1018 struct lim_aux_data *lim_data = get_lim_data (stmt); |
1019 if (! lim_data) | |
1020 lim_data = init_lim_data (stmt); | |
1108 lim_data->always_executed_in = outermost; | 1021 lim_data->always_executed_in = outermost; |
1109 } | 1022 } |
1110 continue; | 1023 continue; |
1111 } | 1024 } |
1112 | 1025 |
1138 && TREE_CODE (op0) == SSA_NAME | 1051 && TREE_CODE (op0) == SSA_NAME |
1139 && has_single_use (op0)) | 1052 && has_single_use (op0)) |
1140 stmt = rewrite_bittest (&bsi); | 1053 stmt = rewrite_bittest (&bsi); |
1141 } | 1054 } |
1142 | 1055 |
1143 lim_data = init_lim_data (stmt); | 1056 lim_data = get_lim_data (stmt); |
1057 if (! lim_data) | |
1058 lim_data = init_lim_data (stmt); | |
1144 lim_data->always_executed_in = outermost; | 1059 lim_data->always_executed_in = outermost; |
1145 | 1060 |
1146 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION) | 1061 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION) |
1147 continue; | 1062 continue; |
1148 | 1063 |
1152 continue; | 1067 continue; |
1153 } | 1068 } |
1154 | 1069 |
1155 if (dump_file && (dump_flags & TDF_DETAILS)) | 1070 if (dump_file && (dump_flags & TDF_DETAILS)) |
1156 { | 1071 { |
1157 print_gimple_stmt (dump_file, stmt, 2, 0); | 1072 print_gimple_stmt (dump_file, stmt, 2); |
1158 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n", | 1073 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n", |
1159 loop_depth (lim_data->max_loop), | 1074 loop_depth (lim_data->max_loop), |
1160 lim_data->cost); | 1075 lim_data->cost); |
1161 } | 1076 } |
1162 | 1077 |
1163 if (lim_data->cost >= LIM_EXPENSIVE) | 1078 if (lim_data->cost >= LIM_EXPENSIVE) |
1164 set_profitable_level (stmt); | 1079 set_profitable_level (stmt); |
1165 } | 1080 } |
1166 } | 1081 return NULL; |
1167 | 1082 } |
1168 /* For each statement determines the outermost loop in that it is invariant, | 1083 |
1169 statements on whose motion it depends and the cost of the computation. | 1084 class move_computations_dom_walker : public dom_walker |
1170 This information is stored to the LIM_DATA structure associated with | 1085 { |
1171 each statement. */ | 1086 public: |
1172 | 1087 move_computations_dom_walker (cdi_direction direction) |
1173 static void | 1088 : dom_walker (direction), todo_ (0) {} |
1174 determine_invariantness (void) | 1089 |
1175 { | 1090 virtual edge before_dom_children (basic_block); |
1176 struct dom_walk_data walk_data; | 1091 |
1177 | 1092 unsigned int todo_; |
1178 memset (&walk_data, 0, sizeof (struct dom_walk_data)); | 1093 }; |
1179 walk_data.dom_direction = CDI_DOMINATORS; | |
1180 walk_data.before_dom_children = determine_invariantness_stmt; | |
1181 | |
1182 init_walk_dominator_tree (&walk_data); | |
1183 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR); | |
1184 fini_walk_dominator_tree (&walk_data); | |
1185 } | |
1186 | 1094 |
1187 /* Hoist the statements in basic block BB out of the loops prescribed by | 1095 /* Hoist the statements in basic block BB out of the loops prescribed by |
1188 data stored in LIM_DATA structures associated with each statement. Callback | 1096 data stored in LIM_DATA structures associated with each statement. Callback |
1189 for walk_dominator_tree. */ | 1097 for walk_dominator_tree. */ |
1190 | 1098 |
1191 static void | 1099 unsigned int |
1192 move_computations_stmt (struct dom_walk_data *dw_data, | 1100 move_computations_worker (basic_block bb) |
1193 basic_block bb) | |
1194 { | 1101 { |
1195 struct loop *level; | 1102 struct loop *level; |
1196 gimple_stmt_iterator bsi; | |
1197 gimple stmt; | |
1198 unsigned cost = 0; | 1103 unsigned cost = 0; |
1199 struct lim_aux_data *lim_data; | 1104 struct lim_aux_data *lim_data; |
1105 unsigned int todo = 0; | |
1200 | 1106 |
1201 if (!loop_outer (bb->loop_father)) | 1107 if (!loop_outer (bb->loop_father)) |
1202 return; | 1108 return todo; |
1203 | 1109 |
1204 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); ) | 1110 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi); ) |
1205 { | 1111 { |
1206 gimple new_stmt; | 1112 gassign *new_stmt; |
1207 stmt = gsi_stmt (bsi); | 1113 gphi *stmt = bsi.phi (); |
1208 | 1114 |
1209 lim_data = get_lim_data (stmt); | 1115 lim_data = get_lim_data (stmt); |
1210 if (lim_data == NULL) | 1116 if (lim_data == NULL) |
1211 { | 1117 { |
1212 gsi_next (&bsi); | 1118 gsi_next (&bsi); |
1224 } | 1130 } |
1225 | 1131 |
1226 if (dump_file && (dump_flags & TDF_DETAILS)) | 1132 if (dump_file && (dump_flags & TDF_DETAILS)) |
1227 { | 1133 { |
1228 fprintf (dump_file, "Moving PHI node\n"); | 1134 fprintf (dump_file, "Moving PHI node\n"); |
1229 print_gimple_stmt (dump_file, stmt, 0, 0); | 1135 print_gimple_stmt (dump_file, stmt, 0); |
1230 fprintf (dump_file, "(cost %u) out of loop %d.\n\n", | 1136 fprintf (dump_file, "(cost %u) out of loop %d.\n\n", |
1231 cost, level->num); | 1137 cost, level->num); |
1232 } | 1138 } |
1233 | 1139 |
1234 if (gimple_phi_num_args (stmt) == 1) | 1140 if (gimple_phi_num_args (stmt) == 1) |
1235 { | 1141 { |
1236 tree arg = PHI_ARG_DEF (stmt, 0); | 1142 tree arg = PHI_ARG_DEF (stmt, 0); |
1237 new_stmt = gimple_build_assign_with_ops (TREE_CODE (arg), | 1143 new_stmt = gimple_build_assign (gimple_phi_result (stmt), |
1238 gimple_phi_result (stmt), | 1144 TREE_CODE (arg), arg); |
1239 arg, NULL_TREE); | |
1240 SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt; | |
1241 } | 1145 } |
1242 else | 1146 else |
1243 { | 1147 { |
1244 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb); | 1148 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb); |
1245 gimple cond = gsi_stmt (gsi_last_bb (dom)); | 1149 gimple *cond = gsi_stmt (gsi_last_bb (dom)); |
1246 tree arg0 = NULL_TREE, arg1 = NULL_TREE, t; | 1150 tree arg0 = NULL_TREE, arg1 = NULL_TREE, t; |
1247 /* Get the PHI arguments corresponding to the true and false | 1151 /* Get the PHI arguments corresponding to the true and false |
1248 edges of COND. */ | 1152 edges of COND. */ |
1249 extract_true_false_args_from_phi (dom, stmt, &arg0, &arg1); | 1153 extract_true_false_args_from_phi (dom, stmt, &arg0, &arg1); |
1250 gcc_assert (arg0 && arg1); | 1154 gcc_assert (arg0 && arg1); |
1251 t = build2 (gimple_cond_code (cond), boolean_type_node, | 1155 t = build2 (gimple_cond_code (cond), boolean_type_node, |
1252 gimple_cond_lhs (cond), gimple_cond_rhs (cond)); | 1156 gimple_cond_lhs (cond), gimple_cond_rhs (cond)); |
1253 t = build3 (COND_EXPR, TREE_TYPE (gimple_phi_result (stmt)), | 1157 new_stmt = gimple_build_assign (gimple_phi_result (stmt), |
1254 t, arg0, arg1); | 1158 COND_EXPR, t, arg0, arg1); |
1255 new_stmt = gimple_build_assign_with_ops (COND_EXPR, | 1159 todo |= TODO_cleanup_cfg; |
1256 gimple_phi_result (stmt), | 1160 } |
1257 t, NULL_TREE); | 1161 if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (new_stmt))) |
1258 SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt; | 1162 && (!ALWAYS_EXECUTED_IN (bb) |
1259 *((unsigned int *)(dw_data->global_data)) |= TODO_cleanup_cfg; | 1163 || (ALWAYS_EXECUTED_IN (bb) != level |
1164 && !flow_loop_nested_p (ALWAYS_EXECUTED_IN (bb), level)))) | |
1165 { | |
1166 tree lhs = gimple_assign_lhs (new_stmt); | |
1167 SSA_NAME_RANGE_INFO (lhs) = NULL; | |
1260 } | 1168 } |
1261 gsi_insert_on_edge (loop_preheader_edge (level), new_stmt); | 1169 gsi_insert_on_edge (loop_preheader_edge (level), new_stmt); |
1262 remove_phi_node (&bsi, false); | 1170 remove_phi_node (&bsi, false); |
1263 } | 1171 } |
1264 | 1172 |
1265 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); ) | 1173 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi); ) |
1266 { | 1174 { |
1267 stmt = gsi_stmt (bsi); | 1175 edge e; |
1176 | |
1177 gimple *stmt = gsi_stmt (bsi); | |
1268 | 1178 |
1269 lim_data = get_lim_data (stmt); | 1179 lim_data = get_lim_data (stmt); |
1270 if (lim_data == NULL) | 1180 if (lim_data == NULL) |
1271 { | 1181 { |
1272 gsi_next (&bsi); | 1182 gsi_next (&bsi); |
1289 continue; | 1199 continue; |
1290 | 1200 |
1291 if (dump_file && (dump_flags & TDF_DETAILS)) | 1201 if (dump_file && (dump_flags & TDF_DETAILS)) |
1292 { | 1202 { |
1293 fprintf (dump_file, "Moving statement\n"); | 1203 fprintf (dump_file, "Moving statement\n"); |
1294 print_gimple_stmt (dump_file, stmt, 0, 0); | 1204 print_gimple_stmt (dump_file, stmt, 0); |
1295 fprintf (dump_file, "(cost %u) out of loop %d.\n\n", | 1205 fprintf (dump_file, "(cost %u) out of loop %d.\n\n", |
1296 cost, level->num); | 1206 cost, level->num); |
1297 } | 1207 } |
1298 | 1208 |
1299 mark_virtual_ops_for_renaming (stmt); | 1209 e = loop_preheader_edge (level); |
1300 gsi_insert_on_edge (loop_preheader_edge (level), stmt); | 1210 gcc_assert (!gimple_vdef (stmt)); |
1211 if (gimple_vuse (stmt)) | |
1212 { | |
1213 /* The new VUSE is the one from the virtual PHI in the loop | |
1214 header or the one already present. */ | |
1215 gphi_iterator gsi2; | |
1216 for (gsi2 = gsi_start_phis (e->dest); | |
1217 !gsi_end_p (gsi2); gsi_next (&gsi2)) | |
1218 { | |
1219 gphi *phi = gsi2.phi (); | |
1220 if (virtual_operand_p (gimple_phi_result (phi))) | |
1221 { | |
1222 gimple_set_vuse (stmt, PHI_ARG_DEF_FROM_EDGE (phi, e)); | |
1223 break; | |
1224 } | |
1225 } | |
1226 } | |
1301 gsi_remove (&bsi, false); | 1227 gsi_remove (&bsi, false); |
1302 } | 1228 if (gimple_has_lhs (stmt) |
1229 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME | |
1230 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_get_lhs (stmt))) | |
1231 && (!ALWAYS_EXECUTED_IN (bb) | |
1232 || !(ALWAYS_EXECUTED_IN (bb) == level | |
1233 || flow_loop_nested_p (ALWAYS_EXECUTED_IN (bb), level)))) | |
1234 { | |
1235 tree lhs = gimple_get_lhs (stmt); | |
1236 SSA_NAME_RANGE_INFO (lhs) = NULL; | |
1237 } | |
1238 /* In case this is a stmt that is not unconditionally executed | |
1239 when the target loop header is executed and the stmt may | |
1240 invoke undefined integer or pointer overflow rewrite it to | |
1241 unsigned arithmetic. */ | |
1242 if (is_gimple_assign (stmt) | |
1243 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))) | |
1244 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (gimple_assign_lhs (stmt))) | |
1245 && arith_code_with_undefined_signed_overflow | |
1246 (gimple_assign_rhs_code (stmt)) | |
1247 && (!ALWAYS_EXECUTED_IN (bb) | |
1248 || !(ALWAYS_EXECUTED_IN (bb) == level | |
1249 || flow_loop_nested_p (ALWAYS_EXECUTED_IN (bb), level)))) | |
1250 gsi_insert_seq_on_edge (e, rewrite_to_defined_overflow (stmt)); | |
1251 else | |
1252 gsi_insert_on_edge (e, stmt); | |
1253 } | |
1254 | |
1255 return todo; | |
1303 } | 1256 } |
1304 | 1257 |
1305 /* Hoist the statements out of the loops prescribed by data stored in | 1258 /* Hoist the statements out of the loops prescribed by data stored in |
1306 LIM_DATA structures associated with each statement.*/ | 1259 LIM_DATA structures associated with each statement.*/ |
1307 | 1260 |
1308 static unsigned int | 1261 static unsigned int |
1309 move_computations (void) | 1262 move_computations (void) |
1310 { | 1263 { |
1311 struct dom_walk_data walk_data; | 1264 int *rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
1312 unsigned int todo = 0; | 1265 int n = pre_and_rev_post_order_compute_fn (cfun, NULL, rpo, false); |
1313 | 1266 unsigned todo = 0; |
1314 memset (&walk_data, 0, sizeof (struct dom_walk_data)); | 1267 |
1315 walk_data.global_data = &todo; | 1268 for (int i = 0; i < n; ++i) |
1316 walk_data.dom_direction = CDI_DOMINATORS; | 1269 todo |= move_computations_worker (BASIC_BLOCK_FOR_FN (cfun, rpo[i])); |
1317 walk_data.before_dom_children = move_computations_stmt; | 1270 |
1318 | 1271 free (rpo); |
1319 init_walk_dominator_tree (&walk_data); | |
1320 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR); | |
1321 fini_walk_dominator_tree (&walk_data); | |
1322 | 1272 |
1323 gsi_commit_edge_inserts (); | 1273 gsi_commit_edge_inserts (); |
1324 if (need_ssa_update_p (cfun)) | 1274 if (need_ssa_update_p (cfun)) |
1325 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); | 1275 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); |
1326 | 1276 |
1362 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */ | 1312 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */ |
1363 | 1313 |
1364 static void | 1314 static void |
1365 force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop) | 1315 force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop) |
1366 { | 1316 { |
1367 gimple stmt; | 1317 gimple *stmt; |
1368 | 1318 |
1369 if (!op | 1319 if (!op |
1370 || is_gimple_min_invariant (op)) | 1320 || is_gimple_min_invariant (op)) |
1371 return; | 1321 return; |
1372 | 1322 |
1406 force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop); | 1356 force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop); |
1407 | 1357 |
1408 return true; | 1358 return true; |
1409 } | 1359 } |
1410 | 1360 |
1411 /* A hash function for struct mem_ref object OBJ. */ | 1361 /* A function to free the mem_ref object OBJ. */ |
1412 | |
1413 static hashval_t | |
1414 memref_hash (const void *obj) | |
1415 { | |
1416 const struct mem_ref *const mem = (const struct mem_ref *) obj; | |
1417 | |
1418 return mem->hash; | |
1419 } | |
1420 | |
1421 /* An equality function for struct mem_ref object OBJ1 with | |
1422 memory reference OBJ2. */ | |
1423 | |
1424 static int | |
1425 memref_eq (const void *obj1, const void *obj2) | |
1426 { | |
1427 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1; | |
1428 | |
1429 return operand_equal_p (mem1->mem, (const_tree) obj2, 0); | |
1430 } | |
1431 | |
1432 /* Releases list of memory reference locations ACCS. */ | |
1433 | 1362 |
1434 static void | 1363 static void |
1435 free_mem_ref_locs (mem_ref_locs_p accs) | 1364 memref_free (struct im_mem_ref *mem) |
1436 { | 1365 { |
1437 unsigned i; | 1366 mem->accesses_in_loop.release (); |
1438 mem_ref_loc_p loc; | |
1439 | |
1440 if (!accs) | |
1441 return; | |
1442 | |
1443 FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc) | |
1444 free (loc); | |
1445 VEC_free (mem_ref_loc_p, heap, accs->locs); | |
1446 free (accs); | |
1447 } | |
1448 | |
1449 /* A function to free the mem_ref object OBJ. */ | |
1450 | |
1451 static void | |
1452 memref_free (void *obj) | |
1453 { | |
1454 struct mem_ref *const mem = (struct mem_ref *) obj; | |
1455 unsigned i; | |
1456 mem_ref_locs_p accs; | |
1457 | |
1458 BITMAP_FREE (mem->stored); | |
1459 BITMAP_FREE (mem->indep_loop); | |
1460 BITMAP_FREE (mem->dep_loop); | |
1461 BITMAP_FREE (mem->indep_ref); | |
1462 BITMAP_FREE (mem->dep_ref); | |
1463 | |
1464 FOR_EACH_VEC_ELT (mem_ref_locs_p, mem->accesses_in_loop, i, accs) | |
1465 free_mem_ref_locs (accs); | |
1466 VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop); | |
1467 | |
1468 BITMAP_FREE (mem->vops); | |
1469 free (mem); | |
1470 } | 1367 } |
1471 | 1368 |
1472 /* Allocates and returns a memory reference description for MEM whose hash | 1369 /* Allocates and returns a memory reference description for MEM whose hash |
1473 value is HASH and id is ID. */ | 1370 value is HASH and id is ID. */ |
1474 | 1371 |
1475 static mem_ref_p | 1372 static im_mem_ref * |
1476 mem_ref_alloc (tree mem, unsigned hash, unsigned id) | 1373 mem_ref_alloc (tree mem, unsigned hash, unsigned id) |
1477 { | 1374 { |
1478 mem_ref_p ref = XNEW (struct mem_ref); | 1375 im_mem_ref *ref = XOBNEW (&mem_ref_obstack, struct im_mem_ref); |
1479 ref->mem = mem; | 1376 ao_ref_init (&ref->mem, mem); |
1480 ref->id = id; | 1377 ref->id = id; |
1481 ref->hash = hash; | 1378 ref->hash = hash; |
1482 ref->stored = BITMAP_ALLOC (NULL); | 1379 ref->stored = NULL; |
1483 ref->indep_loop = BITMAP_ALLOC (NULL); | 1380 bitmap_initialize (&ref->indep_loop, &lim_bitmap_obstack); |
1484 ref->dep_loop = BITMAP_ALLOC (NULL); | 1381 bitmap_initialize (&ref->dep_loop, &lim_bitmap_obstack); |
1485 ref->indep_ref = BITMAP_ALLOC (NULL); | 1382 ref->accesses_in_loop.create (1); |
1486 ref->dep_ref = BITMAP_ALLOC (NULL); | |
1487 ref->accesses_in_loop = NULL; | |
1488 ref->vops = BITMAP_ALLOC (NULL); | |
1489 | 1383 |
1490 return ref; | 1384 return ref; |
1491 } | |
1492 | |
1493 /* Allocates and returns the new list of locations. */ | |
1494 | |
1495 static mem_ref_locs_p | |
1496 mem_ref_locs_alloc (void) | |
1497 { | |
1498 mem_ref_locs_p accs = XNEW (struct mem_ref_locs); | |
1499 accs->locs = NULL; | |
1500 return accs; | |
1501 } | 1385 } |
1502 | 1386 |
1503 /* Records memory reference location *LOC in LOOP to the memory reference | 1387 /* Records memory reference location *LOC in LOOP to the memory reference |
1504 description REF. The reference occurs in statement STMT. */ | 1388 description REF. The reference occurs in statement STMT. */ |
1505 | 1389 |
1506 static void | 1390 static void |
1507 record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc) | 1391 record_mem_ref_loc (im_mem_ref *ref, gimple *stmt, tree *loc) |
1508 { | 1392 { |
1509 mem_ref_loc_p aref = XNEW (struct mem_ref_loc); | 1393 mem_ref_loc aref; |
1510 mem_ref_locs_p accs; | 1394 aref.stmt = stmt; |
1511 bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | 1395 aref.ref = loc; |
1512 | 1396 ref->accesses_in_loop.safe_push (aref); |
1513 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop) | 1397 } |
1514 <= (unsigned) loop->num) | 1398 |
1515 VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop, | 1399 /* Set the LOOP bit in REF stored bitmap and allocate that if |
1516 loop->num + 1); | 1400 necessary. Return whether a bit was changed. */ |
1517 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num); | 1401 |
1518 if (!accs) | 1402 static bool |
1519 { | 1403 set_ref_stored_in_loop (im_mem_ref *ref, struct loop *loop) |
1520 accs = mem_ref_locs_alloc (); | 1404 { |
1521 VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs); | 1405 if (!ref->stored) |
1522 } | 1406 ref->stored = BITMAP_ALLOC (&lim_bitmap_obstack); |
1523 | 1407 return bitmap_set_bit (ref->stored, loop->num); |
1524 aref->stmt = stmt; | |
1525 aref->ref = loc; | |
1526 | |
1527 VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref); | |
1528 bitmap_set_bit (ril, ref->id); | |
1529 } | 1408 } |
1530 | 1409 |
1531 /* Marks reference REF as stored in LOOP. */ | 1410 /* Marks reference REF as stored in LOOP. */ |
1532 | 1411 |
1533 static void | 1412 static void |
1534 mark_ref_stored (mem_ref_p ref, struct loop *loop) | 1413 mark_ref_stored (im_mem_ref *ref, struct loop *loop) |
1535 { | 1414 { |
1536 for (; | 1415 while (loop != current_loops->tree_root |
1537 loop != current_loops->tree_root | 1416 && set_ref_stored_in_loop (ref, loop)) |
1538 && !bitmap_bit_p (ref->stored, loop->num); | 1417 loop = loop_outer (loop); |
1539 loop = loop_outer (loop)) | |
1540 bitmap_set_bit (ref->stored, loop->num); | |
1541 } | 1418 } |
1542 | 1419 |
1543 /* Gathers memory references in statement STMT in LOOP, storing the | 1420 /* Gathers memory references in statement STMT in LOOP, storing the |
1544 information about them in the memory_accesses structure. Marks | 1421 information about them in the memory_accesses structure. Marks |
1545 the vops accessed through unrecognized statements there as | 1422 the vops accessed through unrecognized statements there as |
1546 well. */ | 1423 well. */ |
1547 | 1424 |
1548 static void | 1425 static void |
1549 gather_mem_refs_stmt (struct loop *loop, gimple stmt) | 1426 gather_mem_refs_stmt (struct loop *loop, gimple *stmt) |
1550 { | 1427 { |
1551 tree *mem = NULL; | 1428 tree *mem = NULL; |
1552 hashval_t hash; | 1429 hashval_t hash; |
1553 PTR *slot; | 1430 im_mem_ref **slot; |
1554 mem_ref_p ref; | 1431 im_mem_ref *ref; |
1555 tree vname; | |
1556 bool is_stored; | 1432 bool is_stored; |
1557 bitmap clvops; | |
1558 unsigned id; | 1433 unsigned id; |
1559 | 1434 |
1560 if (!gimple_vuse (stmt)) | 1435 if (!gimple_vuse (stmt)) |
1561 return; | 1436 return; |
1562 | 1437 |
1563 mem = simple_mem_ref_in_stmt (stmt, &is_stored); | 1438 mem = simple_mem_ref_in_stmt (stmt, &is_stored); |
1564 if (!mem) | 1439 if (!mem) |
1565 goto fail; | 1440 { |
1566 | 1441 /* We use the shared mem_ref for all unanalyzable refs. */ |
1567 hash = iterative_hash_expr (*mem, 0); | 1442 id = UNANALYZABLE_MEM_ID; |
1568 slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT); | 1443 ref = memory_accesses.refs_list[id]; |
1569 | 1444 if (dump_file && (dump_flags & TDF_DETAILS)) |
1570 if (*slot) | 1445 { |
1571 { | 1446 fprintf (dump_file, "Unanalyzed memory reference %u: ", id); |
1572 ref = (mem_ref_p) *slot; | 1447 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
1573 id = ref->id; | 1448 } |
1449 is_stored = gimple_vdef (stmt); | |
1574 } | 1450 } |
1575 else | 1451 else |
1576 { | 1452 { |
1577 id = VEC_length (mem_ref_p, memory_accesses.refs_list); | 1453 hash = iterative_hash_expr (*mem, 0); |
1578 ref = mem_ref_alloc (*mem, hash, id); | 1454 slot = memory_accesses.refs->find_slot_with_hash (*mem, hash, INSERT); |
1579 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref); | 1455 if (*slot) |
1580 *slot = ref; | 1456 { |
1581 | 1457 ref = *slot; |
1582 if (dump_file && (dump_flags & TDF_DETAILS)) | 1458 id = ref->id; |
1583 { | 1459 } |
1584 fprintf (dump_file, "Memory reference %u: ", id); | 1460 else |
1585 print_generic_expr (dump_file, ref->mem, TDF_SLIM); | 1461 { |
1586 fprintf (dump_file, "\n"); | 1462 id = memory_accesses.refs_list.length (); |
1587 } | 1463 ref = mem_ref_alloc (*mem, hash, id); |
1588 } | 1464 memory_accesses.refs_list.safe_push (ref); |
1465 *slot = ref; | |
1466 | |
1467 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1468 { | |
1469 fprintf (dump_file, "Memory reference %u: ", id); | |
1470 print_generic_expr (dump_file, ref->mem.ref, TDF_SLIM); | |
1471 fprintf (dump_file, "\n"); | |
1472 } | |
1473 } | |
1474 | |
1475 record_mem_ref_loc (ref, stmt, mem); | |
1476 } | |
1477 bitmap_set_bit (&memory_accesses.refs_in_loop[loop->num], ref->id); | |
1589 if (is_stored) | 1478 if (is_stored) |
1590 mark_ref_stored (ref, loop); | 1479 { |
1591 | 1480 bitmap_set_bit (&memory_accesses.refs_stored_in_loop[loop->num], ref->id); |
1592 if ((vname = gimple_vuse (stmt)) != NULL_TREE) | 1481 mark_ref_stored (ref, loop); |
1593 bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname))); | 1482 } |
1594 record_mem_ref_loc (ref, loop, stmt, mem); | 1483 init_lim_data (stmt)->ref = ref->id; |
1595 return; | 1484 return; |
1596 | 1485 } |
1597 fail: | 1486 |
1598 clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num); | 1487 static unsigned *bb_loop_postorder; |
1599 if ((vname = gimple_vuse (stmt)) != NULL_TREE) | 1488 |
1600 bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname))); | 1489 /* qsort sort function to sort blocks after their loop fathers postorder. */ |
1490 | |
1491 static int | |
1492 sort_bbs_in_loop_postorder_cmp (const void *bb1_, const void *bb2_) | |
1493 { | |
1494 basic_block bb1 = *(basic_block *)const_cast<void *>(bb1_); | |
1495 basic_block bb2 = *(basic_block *)const_cast<void *>(bb2_); | |
1496 struct loop *loop1 = bb1->loop_father; | |
1497 struct loop *loop2 = bb2->loop_father; | |
1498 if (loop1->num == loop2->num) | |
1499 return 0; | |
1500 return bb_loop_postorder[loop1->num] < bb_loop_postorder[loop2->num] ? -1 : 1; | |
1501 } | |
1502 | |
1503 /* qsort sort function to sort ref locs after their loop fathers postorder. */ | |
1504 | |
1505 static int | |
1506 sort_locs_in_loop_postorder_cmp (const void *loc1_, const void *loc2_) | |
1507 { | |
1508 mem_ref_loc *loc1 = (mem_ref_loc *)const_cast<void *>(loc1_); | |
1509 mem_ref_loc *loc2 = (mem_ref_loc *)const_cast<void *>(loc2_); | |
1510 struct loop *loop1 = gimple_bb (loc1->stmt)->loop_father; | |
1511 struct loop *loop2 = gimple_bb (loc2->stmt)->loop_father; | |
1512 if (loop1->num == loop2->num) | |
1513 return 0; | |
1514 return bb_loop_postorder[loop1->num] < bb_loop_postorder[loop2->num] ? -1 : 1; | |
1601 } | 1515 } |
1602 | 1516 |
1603 /* Gathers memory references in loops. */ | 1517 /* Gathers memory references in loops. */ |
1604 | |
1605 static void | |
1606 gather_mem_refs_in_loops (void) | |
1607 { | |
1608 gimple_stmt_iterator bsi; | |
1609 basic_block bb; | |
1610 struct loop *loop; | |
1611 loop_iterator li; | |
1612 bitmap clvo, clvi; | |
1613 bitmap lrefs, alrefs, alrefso; | |
1614 | |
1615 FOR_EACH_BB (bb) | |
1616 { | |
1617 loop = bb->loop_father; | |
1618 if (loop == current_loops->tree_root) | |
1619 continue; | |
1620 | |
1621 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | |
1622 gather_mem_refs_stmt (loop, gsi_stmt (bsi)); | |
1623 } | |
1624 | |
1625 /* Propagate the information about clobbered vops and accessed memory | |
1626 references up the loop hierarchy. */ | |
1627 FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST) | |
1628 { | |
1629 lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1630 alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num); | |
1631 bitmap_ior_into (alrefs, lrefs); | |
1632 | |
1633 if (loop_outer (loop) == current_loops->tree_root) | |
1634 continue; | |
1635 | |
1636 clvi = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num); | |
1637 clvo = VEC_index (bitmap, memory_accesses.clobbered_vops, | |
1638 loop_outer (loop)->num); | |
1639 bitmap_ior_into (clvo, clvi); | |
1640 | |
1641 alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop, | |
1642 loop_outer (loop)->num); | |
1643 bitmap_ior_into (alrefso, alrefs); | |
1644 } | |
1645 } | |
1646 | |
1647 /* Element of the hash table that maps vops to memory references. */ | |
1648 | |
1649 struct vop_to_refs_elt | |
1650 { | |
1651 /* DECL_UID of the vop. */ | |
1652 unsigned uid; | |
1653 | |
1654 /* List of the all references. */ | |
1655 bitmap refs_all; | |
1656 | |
1657 /* List of stored references. */ | |
1658 bitmap refs_stored; | |
1659 }; | |
1660 | |
1661 /* A hash function for struct vop_to_refs_elt object OBJ. */ | |
1662 | |
1663 static hashval_t | |
1664 vtoe_hash (const void *obj) | |
1665 { | |
1666 const struct vop_to_refs_elt *const vtoe = | |
1667 (const struct vop_to_refs_elt *) obj; | |
1668 | |
1669 return vtoe->uid; | |
1670 } | |
1671 | |
1672 /* An equality function for struct vop_to_refs_elt object OBJ1 with | |
1673 uid of a vop OBJ2. */ | |
1674 | |
1675 static int | |
1676 vtoe_eq (const void *obj1, const void *obj2) | |
1677 { | |
1678 const struct vop_to_refs_elt *const vtoe = | |
1679 (const struct vop_to_refs_elt *) obj1; | |
1680 const unsigned *const uid = (const unsigned *) obj2; | |
1681 | |
1682 return vtoe->uid == *uid; | |
1683 } | |
1684 | |
1685 /* A function to free the struct vop_to_refs_elt object. */ | |
1686 | |
1687 static void | |
1688 vtoe_free (void *obj) | |
1689 { | |
1690 struct vop_to_refs_elt *const vtoe = | |
1691 (struct vop_to_refs_elt *) obj; | |
1692 | |
1693 BITMAP_FREE (vtoe->refs_all); | |
1694 BITMAP_FREE (vtoe->refs_stored); | |
1695 free (vtoe); | |
1696 } | |
1697 | |
1698 /* Records REF to hashtable VOP_TO_REFS for the index VOP. STORED is true | |
1699 if the reference REF is stored. */ | |
1700 | |
1701 static void | |
1702 record_vop_access (htab_t vop_to_refs, unsigned vop, unsigned ref, bool stored) | |
1703 { | |
1704 void **slot = htab_find_slot_with_hash (vop_to_refs, &vop, vop, INSERT); | |
1705 struct vop_to_refs_elt *vtoe; | |
1706 | |
1707 if (!*slot) | |
1708 { | |
1709 vtoe = XNEW (struct vop_to_refs_elt); | |
1710 vtoe->uid = vop; | |
1711 vtoe->refs_all = BITMAP_ALLOC (NULL); | |
1712 vtoe->refs_stored = BITMAP_ALLOC (NULL); | |
1713 *slot = vtoe; | |
1714 } | |
1715 else | |
1716 vtoe = (struct vop_to_refs_elt *) *slot; | |
1717 | |
1718 bitmap_set_bit (vtoe->refs_all, ref); | |
1719 if (stored) | |
1720 bitmap_set_bit (vtoe->refs_stored, ref); | |
1721 } | |
1722 | |
1723 /* Returns the set of references that access VOP according to the table | |
1724 VOP_TO_REFS. */ | |
1725 | |
1726 static bitmap | |
1727 get_vop_accesses (htab_t vop_to_refs, unsigned vop) | |
1728 { | |
1729 struct vop_to_refs_elt *const vtoe = | |
1730 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop); | |
1731 return vtoe->refs_all; | |
1732 } | |
1733 | |
1734 /* Returns the set of stores that access VOP according to the table | |
1735 VOP_TO_REFS. */ | |
1736 | |
1737 static bitmap | |
1738 get_vop_stores (htab_t vop_to_refs, unsigned vop) | |
1739 { | |
1740 struct vop_to_refs_elt *const vtoe = | |
1741 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop); | |
1742 return vtoe->refs_stored; | |
1743 } | |
1744 | |
1745 /* Adds REF to mapping from virtual operands to references in LOOP. */ | |
1746 | |
1747 static void | |
1748 add_vop_ref_mapping (struct loop *loop, mem_ref_p ref) | |
1749 { | |
1750 htab_t map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num); | |
1751 bool stored = bitmap_bit_p (ref->stored, loop->num); | |
1752 bitmap clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops, | |
1753 loop->num); | |
1754 bitmap_iterator bi; | |
1755 unsigned vop; | |
1756 | |
1757 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, vop, bi) | |
1758 { | |
1759 record_vop_access (map, vop, ref->id, stored); | |
1760 } | |
1761 } | |
1762 | |
1763 /* Create a mapping from virtual operands to references that touch them | |
1764 in LOOP. */ | |
1765 | |
1766 static void | |
1767 create_vop_ref_mapping_loop (struct loop *loop) | |
1768 { | |
1769 bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1770 struct loop *sloop; | |
1771 bitmap_iterator bi; | |
1772 unsigned i; | |
1773 mem_ref_p ref; | |
1774 | |
1775 EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi) | |
1776 { | |
1777 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | |
1778 for (sloop = loop; sloop != current_loops->tree_root; sloop = loop_outer (sloop)) | |
1779 add_vop_ref_mapping (sloop, ref); | |
1780 } | |
1781 } | |
1782 | |
1783 /* For each non-clobbered virtual operand and each loop, record the memory | |
1784 references in this loop that touch the operand. */ | |
1785 | |
1786 static void | |
1787 create_vop_ref_mapping (void) | |
1788 { | |
1789 loop_iterator li; | |
1790 struct loop *loop; | |
1791 | |
1792 FOR_EACH_LOOP (li, loop, 0) | |
1793 { | |
1794 create_vop_ref_mapping_loop (loop); | |
1795 } | |
1796 } | |
1797 | |
1798 /* Gathers information about memory accesses in the loops. */ | |
1799 | 1518 |
1800 static void | 1519 static void |
1801 analyze_memory_references (void) | 1520 analyze_memory_references (void) |
1802 { | 1521 { |
1803 unsigned i; | 1522 gimple_stmt_iterator bsi; |
1804 bitmap empty; | 1523 basic_block bb, *bbs; |
1805 htab_t hempty; | 1524 struct loop *loop, *outer; |
1806 | 1525 unsigned i, n; |
1807 memory_accesses.refs | 1526 |
1808 = htab_create (100, memref_hash, memref_eq, memref_free); | 1527 /* Collect all basic-blocks in loops and sort them after their |
1809 memory_accesses.refs_list = NULL; | 1528 loops postorder. */ |
1810 memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap, | 1529 i = 0; |
1811 number_of_loops ()); | 1530 bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS); |
1812 memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap, | 1531 FOR_EACH_BB_FN (bb, cfun) |
1813 number_of_loops ()); | 1532 if (bb->loop_father != current_loops->tree_root) |
1814 memory_accesses.clobbered_vops = VEC_alloc (bitmap, heap, | 1533 bbs[i++] = bb; |
1815 number_of_loops ()); | 1534 n = i; |
1816 memory_accesses.vop_ref_map = VEC_alloc (htab_t, heap, | 1535 qsort (bbs, n, sizeof (basic_block), sort_bbs_in_loop_postorder_cmp); |
1817 number_of_loops ()); | 1536 |
1818 | 1537 /* Visit blocks in loop postorder and assign mem-ref IDs in that order. |
1819 for (i = 0; i < number_of_loops (); i++) | 1538 That results in better locality for all the bitmaps. */ |
1820 { | 1539 for (i = 0; i < n; ++i) |
1821 empty = BITMAP_ALLOC (NULL); | 1540 { |
1822 VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty); | 1541 basic_block bb = bbs[i]; |
1823 empty = BITMAP_ALLOC (NULL); | 1542 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
1824 VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty); | 1543 gather_mem_refs_stmt (bb->loop_father, gsi_stmt (bsi)); |
1825 empty = BITMAP_ALLOC (NULL); | 1544 } |
1826 VEC_quick_push (bitmap, memory_accesses.clobbered_vops, empty); | 1545 |
1827 hempty = htab_create (10, vtoe_hash, vtoe_eq, vtoe_free); | 1546 /* Sort the location list of gathered memory references after their |
1828 VEC_quick_push (htab_t, memory_accesses.vop_ref_map, hempty); | 1547 loop postorder number. */ |
1829 } | 1548 im_mem_ref *ref; |
1830 | 1549 FOR_EACH_VEC_ELT (memory_accesses.refs_list, i, ref) |
1831 memory_accesses.ttae_cache = NULL; | 1550 ref->accesses_in_loop.qsort (sort_locs_in_loop_postorder_cmp); |
1832 | 1551 |
1833 gather_mem_refs_in_loops (); | 1552 free (bbs); |
1834 create_vop_ref_mapping (); | 1553 // free (bb_loop_postorder); |
1835 } | 1554 |
1836 | 1555 /* Propagate the information about accessed memory references up |
1837 /* Returns true if a region of size SIZE1 at position 0 and a region of | 1556 the loop hierarchy. */ |
1838 size SIZE2 at position DIFF cannot overlap. */ | 1557 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) |
1839 | 1558 { |
1840 static bool | 1559 /* Finalize the overall touched references (including subloops). */ |
1841 cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2) | 1560 bitmap_ior_into (&memory_accesses.all_refs_stored_in_loop[loop->num], |
1842 { | 1561 &memory_accesses.refs_stored_in_loop[loop->num]); |
1843 double_int d, bound; | 1562 |
1844 | 1563 /* Propagate the information about accessed memory references up |
1845 /* Unless the difference is a constant, we fail. */ | 1564 the loop hierarchy. */ |
1846 if (diff->n != 0) | 1565 outer = loop_outer (loop); |
1847 return false; | 1566 if (outer == current_loops->tree_root) |
1848 | 1567 continue; |
1849 d = diff->offset; | 1568 |
1850 if (double_int_negative_p (d)) | 1569 bitmap_ior_into (&memory_accesses.all_refs_stored_in_loop[outer->num], |
1851 { | 1570 &memory_accesses.all_refs_stored_in_loop[loop->num]); |
1852 /* The second object is before the first one, we succeed if the last | |
1853 element of the second object is before the start of the first one. */ | |
1854 bound = double_int_add (d, double_int_add (size2, double_int_minus_one)); | |
1855 return double_int_negative_p (bound); | |
1856 } | |
1857 else | |
1858 { | |
1859 /* We succeed if the second object starts after the first one ends. */ | |
1860 return double_int_scmp (size1, d) <= 0; | |
1861 } | 1571 } |
1862 } | 1572 } |
1863 | 1573 |
1864 /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in | 1574 /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in |
1865 tree_to_aff_combination_expand. */ | 1575 tree_to_aff_combination_expand. */ |
1866 | 1576 |
1867 static bool | 1577 static bool |
1868 mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache) | 1578 mem_refs_may_alias_p (im_mem_ref *mem1, im_mem_ref *mem2, |
1579 hash_map<tree, name_expansion *> **ttae_cache) | |
1869 { | 1580 { |
1870 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same | 1581 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same |
1871 object and their offset differ in such a way that the locations cannot | 1582 object and their offset differ in such a way that the locations cannot |
1872 overlap, then they cannot alias. */ | 1583 overlap, then they cannot alias. */ |
1873 double_int size1, size2; | 1584 widest_int size1, size2; |
1874 aff_tree off1, off2; | 1585 aff_tree off1, off2; |
1875 | 1586 |
1876 /* Perform basic offset and type-based disambiguation. */ | 1587 /* Perform basic offset and type-based disambiguation. */ |
1877 if (!refs_may_alias_p (mem1, mem2)) | 1588 if (!refs_may_alias_p_1 (&mem1->mem, &mem2->mem, true)) |
1878 return false; | 1589 return false; |
1879 | 1590 |
1880 /* The expansion of addresses may be a bit expensive, thus we only do | 1591 /* The expansion of addresses may be a bit expensive, thus we only do |
1881 the check at -O2 and higher optimization levels. */ | 1592 the check at -O2 and higher optimization levels. */ |
1882 if (optimize < 2) | 1593 if (optimize < 2) |
1883 return true; | 1594 return true; |
1884 | 1595 |
1885 get_inner_reference_aff (mem1, &off1, &size1); | 1596 get_inner_reference_aff (mem1->mem.ref, &off1, &size1); |
1886 get_inner_reference_aff (mem2, &off2, &size2); | 1597 get_inner_reference_aff (mem2->mem.ref, &off2, &size2); |
1887 aff_combination_expand (&off1, ttae_cache); | 1598 aff_combination_expand (&off1, ttae_cache); |
1888 aff_combination_expand (&off2, ttae_cache); | 1599 aff_combination_expand (&off2, ttae_cache); |
1889 aff_combination_scale (&off1, double_int_minus_one); | 1600 aff_combination_scale (&off1, -1); |
1890 aff_combination_add (&off2, &off1); | 1601 aff_combination_add (&off2, &off1); |
1891 | 1602 |
1892 if (cannot_overlap_p (&off2, size1, size2)) | 1603 if (aff_comb_cannot_overlap_p (&off2, size1, size2)) |
1893 return false; | 1604 return false; |
1894 | 1605 |
1895 return true; | 1606 return true; |
1896 } | 1607 } |
1897 | 1608 |
1609 /* Compare function for bsearch searching for reference locations | |
1610 in a loop. */ | |
1611 | |
1612 static int | |
1613 find_ref_loc_in_loop_cmp (const void *loop_, const void *loc_) | |
1614 { | |
1615 struct loop *loop = (struct loop *)const_cast<void *>(loop_); | |
1616 mem_ref_loc *loc = (mem_ref_loc *)const_cast<void *>(loc_); | |
1617 struct loop *loc_loop = gimple_bb (loc->stmt)->loop_father; | |
1618 if (loop->num == loc_loop->num | |
1619 || flow_loop_nested_p (loop, loc_loop)) | |
1620 return 0; | |
1621 return (bb_loop_postorder[loop->num] < bb_loop_postorder[loc_loop->num] | |
1622 ? -1 : 1); | |
1623 } | |
1624 | |
1625 /* Iterates over all locations of REF in LOOP and its subloops calling | |
1626 fn.operator() with the location as argument. When that operator | |
1627 returns true the iteration is stopped and true is returned. | |
1628 Otherwise false is returned. */ | |
1629 | |
1630 template <typename FN> | |
1631 static bool | |
1632 for_all_locs_in_loop (struct loop *loop, im_mem_ref *ref, FN fn) | |
1633 { | |
1634 unsigned i; | |
1635 mem_ref_loc *loc; | |
1636 | |
1637 /* Search for the cluster of locs in the accesses_in_loop vector | |
1638 which is sorted after postorder index of the loop father. */ | |
1639 loc = ref->accesses_in_loop.bsearch (loop, find_ref_loc_in_loop_cmp); | |
1640 if (!loc) | |
1641 return false; | |
1642 | |
1643 /* We have found one location inside loop or its sub-loops. Iterate | |
1644 both forward and backward to cover the whole cluster. */ | |
1645 i = loc - ref->accesses_in_loop.address (); | |
1646 while (i > 0) | |
1647 { | |
1648 --i; | |
1649 mem_ref_loc *l = &ref->accesses_in_loop[i]; | |
1650 if (!flow_bb_inside_loop_p (loop, gimple_bb (l->stmt))) | |
1651 break; | |
1652 if (fn (l)) | |
1653 return true; | |
1654 } | |
1655 for (i = loc - ref->accesses_in_loop.address (); | |
1656 i < ref->accesses_in_loop.length (); ++i) | |
1657 { | |
1658 mem_ref_loc *l = &ref->accesses_in_loop[i]; | |
1659 if (!flow_bb_inside_loop_p (loop, gimple_bb (l->stmt))) | |
1660 break; | |
1661 if (fn (l)) | |
1662 return true; | |
1663 } | |
1664 | |
1665 return false; | |
1666 } | |
1667 | |
1898 /* Rewrites location LOC by TMP_VAR. */ | 1668 /* Rewrites location LOC by TMP_VAR. */ |
1899 | 1669 |
1900 static void | 1670 struct rewrite_mem_ref_loc |
1901 rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var) | 1671 { |
1902 { | 1672 rewrite_mem_ref_loc (tree tmp_var_) : tmp_var (tmp_var_) {} |
1903 mark_virtual_ops_for_renaming (loc->stmt); | 1673 bool operator () (mem_ref_loc *loc); |
1674 tree tmp_var; | |
1675 }; | |
1676 | |
1677 bool | |
1678 rewrite_mem_ref_loc::operator () (mem_ref_loc *loc) | |
1679 { | |
1904 *loc->ref = tmp_var; | 1680 *loc->ref = tmp_var; |
1905 update_stmt (loc->stmt); | 1681 update_stmt (loc->stmt); |
1906 } | 1682 return false; |
1907 | 1683 } |
1908 /* Adds all locations of REF in LOOP and its subloops to LOCS. */ | 1684 |
1685 /* Rewrites all references to REF in LOOP by variable TMP_VAR. */ | |
1909 | 1686 |
1910 static void | 1687 static void |
1911 get_all_locs_in_loop (struct loop *loop, mem_ref_p ref, | 1688 rewrite_mem_refs (struct loop *loop, im_mem_ref *ref, tree tmp_var) |
1912 VEC (mem_ref_loc_p, heap) **locs) | 1689 { |
1913 { | 1690 for_all_locs_in_loop (loop, ref, rewrite_mem_ref_loc (tmp_var)); |
1914 mem_ref_locs_p accs; | 1691 } |
1915 unsigned i; | 1692 |
1916 mem_ref_loc_p loc; | 1693 /* Stores the first reference location in LOCP. */ |
1917 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, | 1694 |
1918 loop->num); | 1695 struct first_mem_ref_loc_1 |
1919 struct loop *subloop; | 1696 { |
1920 | 1697 first_mem_ref_loc_1 (mem_ref_loc **locp_) : locp (locp_) {} |
1921 if (!bitmap_bit_p (refs, ref->id)) | 1698 bool operator () (mem_ref_loc *loc); |
1922 return; | 1699 mem_ref_loc **locp; |
1923 | 1700 }; |
1924 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop) | 1701 |
1925 > (unsigned) loop->num) | 1702 bool |
1926 { | 1703 first_mem_ref_loc_1::operator () (mem_ref_loc *loc) |
1927 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num); | 1704 { |
1928 if (accs) | 1705 *locp = loc; |
1929 { | 1706 return true; |
1930 FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc) | 1707 } |
1931 VEC_safe_push (mem_ref_loc_p, heap, *locs, loc); | 1708 |
1932 } | 1709 /* Returns the first reference location to REF in LOOP. */ |
1933 } | 1710 |
1934 | 1711 static mem_ref_loc * |
1935 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next) | 1712 first_mem_ref_loc (struct loop *loop, im_mem_ref *ref) |
1936 get_all_locs_in_loop (subloop, ref, locs); | 1713 { |
1937 } | 1714 mem_ref_loc *locp = NULL; |
1938 | 1715 for_all_locs_in_loop (loop, ref, first_mem_ref_loc_1 (&locp)); |
1939 /* Rewrites all references to REF in LOOP by variable TMP_VAR. */ | 1716 return locp; |
1717 } | |
1718 | |
1719 struct prev_flag_edges { | |
1720 /* Edge to insert new flag comparison code. */ | |
1721 edge append_cond_position; | |
1722 | |
1723 /* Edge for fall through from previous flag comparison. */ | |
1724 edge last_cond_fallthru; | |
1725 }; | |
1726 | |
1727 /* Helper function for execute_sm. Emit code to store TMP_VAR into | |
1728 MEM along edge EX. | |
1729 | |
1730 The store is only done if MEM has changed. We do this so no | |
1731 changes to MEM occur on code paths that did not originally store | |
1732 into it. | |
1733 | |
1734 The common case for execute_sm will transform: | |
1735 | |
1736 for (...) { | |
1737 if (foo) | |
1738 stuff; | |
1739 else | |
1740 MEM = TMP_VAR; | |
1741 } | |
1742 | |
1743 into: | |
1744 | |
1745 lsm = MEM; | |
1746 for (...) { | |
1747 if (foo) | |
1748 stuff; | |
1749 else | |
1750 lsm = TMP_VAR; | |
1751 } | |
1752 MEM = lsm; | |
1753 | |
1754 This function will generate: | |
1755 | |
1756 lsm = MEM; | |
1757 | |
1758 lsm_flag = false; | |
1759 ... | |
1760 for (...) { | |
1761 if (foo) | |
1762 stuff; | |
1763 else { | |
1764 lsm = TMP_VAR; | |
1765 lsm_flag = true; | |
1766 } | |
1767 } | |
1768 if (lsm_flag) <-- | |
1769 MEM = lsm; <-- | |
1770 */ | |
1940 | 1771 |
1941 static void | 1772 static void |
1942 rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var) | 1773 execute_sm_if_changed (edge ex, tree mem, tree tmp_var, tree flag, |
1943 { | 1774 edge preheader, hash_set <basic_block> *flag_bbs) |
1944 unsigned i; | 1775 { |
1945 mem_ref_loc_p loc; | 1776 basic_block new_bb, then_bb, old_dest; |
1946 VEC (mem_ref_loc_p, heap) *locs = NULL; | 1777 bool loop_has_only_one_exit; |
1947 | 1778 edge then_old_edge, orig_ex = ex; |
1948 get_all_locs_in_loop (loop, ref, &locs); | 1779 gimple_stmt_iterator gsi; |
1949 FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc) | 1780 gimple *stmt; |
1950 rewrite_mem_ref_loc (loc, tmp_var); | 1781 struct prev_flag_edges *prev_edges = (struct prev_flag_edges *) ex->aux; |
1951 VEC_free (mem_ref_loc_p, heap, locs); | 1782 bool irr = ex->flags & EDGE_IRREDUCIBLE_LOOP; |
1952 } | 1783 |
1953 | 1784 int freq_sum = 0; |
1954 /* The name and the length of the currently generated variable | 1785 profile_count count_sum = profile_count::zero (); |
1955 for lsm. */ | 1786 int nbbs = 0, ncount = 0; |
1956 #define MAX_LSM_NAME_LENGTH 40 | 1787 profile_probability flag_probability = profile_probability::uninitialized (); |
1957 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1]; | 1788 |
1958 static int lsm_tmp_name_length; | 1789 /* Flag is set in FLAG_BBS. Determine probability that flag will be true |
1959 | 1790 at loop exit. |
1960 /* Adds S to lsm_tmp_name. */ | 1791 |
1961 | 1792 This code may look fancy, but it can not update profile very realistically |
1962 static void | 1793 because we do not know the probability that flag will be true at given |
1963 lsm_tmp_name_add (const char *s) | 1794 loop exit. |
1964 { | 1795 |
1965 int l = strlen (s) + lsm_tmp_name_length; | 1796 We look for two interesting extremes |
1966 if (l > MAX_LSM_NAME_LENGTH) | 1797 - when exit is dominated by block setting the flag, we know it will |
1967 return; | 1798 always be true. This is a common case. |
1968 | 1799 - when all blocks setting the flag have very low frequency we know |
1969 strcpy (lsm_tmp_name + lsm_tmp_name_length, s); | 1800 it will likely be false. |
1970 lsm_tmp_name_length = l; | 1801 In all other cases we default to 2/3 for flag being true. */ |
1971 } | 1802 |
1972 | 1803 for (hash_set<basic_block>::iterator it = flag_bbs->begin (); |
1973 /* Stores the name for temporary variable that replaces REF to | 1804 it != flag_bbs->end (); ++it) |
1974 lsm_tmp_name. */ | 1805 { |
1975 | 1806 freq_sum += (*it)->frequency; |
1976 static void | 1807 if ((*it)->count.initialized_p ()) |
1977 gen_lsm_tmp_name (tree ref) | 1808 count_sum += (*it)->count, ncount ++; |
1978 { | 1809 if (dominated_by_p (CDI_DOMINATORS, ex->src, *it)) |
1979 const char *name; | 1810 flag_probability = profile_probability::always (); |
1980 | 1811 nbbs++; |
1981 switch (TREE_CODE (ref)) | 1812 } |
1982 { | 1813 |
1983 case MEM_REF: | 1814 profile_probability cap = profile_probability::always ().apply_scale (2, 3); |
1984 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | 1815 |
1985 lsm_tmp_name_add ("_"); | 1816 if (flag_probability.initialized_p ()) |
1986 break; | 1817 ; |
1987 | 1818 else if (ncount == nbbs && count_sum > 0 && preheader->count () >= count_sum) |
1988 case ADDR_EXPR: | 1819 { |
1989 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | 1820 flag_probability = count_sum.probability_in (preheader->count ()); |
1990 break; | 1821 if (flag_probability > cap) |
1991 | 1822 flag_probability = cap; |
1992 case BIT_FIELD_REF: | 1823 } |
1993 case VIEW_CONVERT_EXPR: | 1824 else if (freq_sum > 0 && EDGE_FREQUENCY (preheader) >= freq_sum) |
1994 case ARRAY_RANGE_REF: | 1825 { |
1995 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | 1826 flag_probability = profile_probability::from_reg_br_prob_base |
1996 break; | 1827 (GCOV_COMPUTE_SCALE (freq_sum, EDGE_FREQUENCY (preheader))); |
1997 | 1828 if (flag_probability > cap) |
1998 case REALPART_EXPR: | 1829 flag_probability = cap; |
1999 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | 1830 } |
2000 lsm_tmp_name_add ("_RE"); | 1831 else |
2001 break; | 1832 flag_probability = cap; |
2002 | 1833 |
2003 case IMAGPART_EXPR: | 1834 /* ?? Insert store after previous store if applicable. See note |
2004 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | 1835 below. */ |
2005 lsm_tmp_name_add ("_IM"); | 1836 if (prev_edges) |
2006 break; | 1837 ex = prev_edges->append_cond_position; |
2007 | 1838 |
2008 case COMPONENT_REF: | 1839 loop_has_only_one_exit = single_pred_p (ex->dest); |
2009 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | 1840 |
2010 lsm_tmp_name_add ("_"); | 1841 if (loop_has_only_one_exit) |
2011 name = get_name (TREE_OPERAND (ref, 1)); | 1842 ex = split_block_after_labels (ex->dest); |
2012 if (!name) | 1843 else |
2013 name = "F"; | 1844 { |
2014 lsm_tmp_name_add (name); | 1845 for (gphi_iterator gpi = gsi_start_phis (ex->dest); |
2015 break; | 1846 !gsi_end_p (gpi); gsi_next (&gpi)) |
2016 | 1847 { |
2017 case ARRAY_REF: | 1848 gphi *phi = gpi.phi (); |
2018 gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | 1849 if (virtual_operand_p (gimple_phi_result (phi))) |
2019 lsm_tmp_name_add ("_I"); | 1850 continue; |
2020 break; | 1851 |
2021 | 1852 /* When the destination has a non-virtual PHI node with multiple |
2022 case SSA_NAME: | 1853 predecessors make sure we preserve the PHI structure by |
2023 ref = SSA_NAME_VAR (ref); | 1854 forcing a forwarder block so that hoisting of that PHI will |
2024 /* Fallthru. */ | 1855 still work. */ |
2025 | 1856 split_edge (ex); |
2026 case VAR_DECL: | 1857 break; |
2027 case PARM_DECL: | 1858 } |
2028 name = get_name (ref); | 1859 } |
2029 if (!name) | 1860 |
2030 name = "D"; | 1861 old_dest = ex->dest; |
2031 lsm_tmp_name_add (name); | 1862 new_bb = split_edge (ex); |
2032 break; | 1863 then_bb = create_empty_bb (new_bb); |
2033 | 1864 then_bb->frequency = flag_probability.apply (new_bb->frequency); |
2034 case STRING_CST: | 1865 then_bb->count = new_bb->count.apply_probability (flag_probability); |
2035 lsm_tmp_name_add ("S"); | 1866 if (irr) |
2036 break; | 1867 then_bb->flags = BB_IRREDUCIBLE_LOOP; |
2037 | 1868 add_bb_to_loop (then_bb, new_bb->loop_father); |
2038 case RESULT_DECL: | 1869 |
2039 lsm_tmp_name_add ("R"); | 1870 gsi = gsi_start_bb (new_bb); |
2040 break; | 1871 stmt = gimple_build_cond (NE_EXPR, flag, boolean_false_node, |
2041 | 1872 NULL_TREE, NULL_TREE); |
2042 case INTEGER_CST: | 1873 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING); |
2043 /* Nothing. */ | 1874 |
2044 break; | 1875 gsi = gsi_start_bb (then_bb); |
2045 | 1876 /* Insert actual store. */ |
2046 default: | 1877 stmt = gimple_build_assign (unshare_expr (mem), tmp_var); |
2047 gcc_unreachable (); | 1878 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING); |
2048 } | 1879 |
2049 } | 1880 edge e1 = single_succ_edge (new_bb); |
2050 | 1881 edge e2 = make_edge (new_bb, then_bb, |
2051 /* Determines name for temporary variable that replaces REF. | 1882 EDGE_TRUE_VALUE | (irr ? EDGE_IRREDUCIBLE_LOOP : 0)); |
2052 The name is accumulated into the lsm_tmp_name variable. | 1883 e2->probability = flag_probability; |
2053 N is added to the name of the temporary. */ | 1884 |
2054 | 1885 e1->flags |= EDGE_FALSE_VALUE | (irr ? EDGE_IRREDUCIBLE_LOOP : 0); |
2055 char * | 1886 e1->flags &= ~EDGE_FALLTHRU; |
2056 get_lsm_tmp_name (tree ref, unsigned n) | 1887 |
2057 { | 1888 e1->probability = flag_probability.invert (); |
2058 char ns[2]; | 1889 |
2059 | 1890 then_old_edge = make_single_succ_edge (then_bb, old_dest, |
2060 lsm_tmp_name_length = 0; | 1891 EDGE_FALLTHRU | (irr ? EDGE_IRREDUCIBLE_LOOP : 0)); |
2061 gen_lsm_tmp_name (ref); | 1892 |
2062 lsm_tmp_name_add ("_lsm"); | 1893 set_immediate_dominator (CDI_DOMINATORS, then_bb, new_bb); |
2063 if (n < 10) | 1894 |
2064 { | 1895 if (prev_edges) |
2065 ns[0] = '0' + n; | 1896 { |
2066 ns[1] = 0; | 1897 basic_block prevbb = prev_edges->last_cond_fallthru->src; |
2067 lsm_tmp_name_add (ns); | 1898 redirect_edge_succ (prev_edges->last_cond_fallthru, new_bb); |
2068 } | 1899 set_immediate_dominator (CDI_DOMINATORS, new_bb, prevbb); |
2069 return lsm_tmp_name; | 1900 set_immediate_dominator (CDI_DOMINATORS, old_dest, |
1901 recompute_dominator (CDI_DOMINATORS, old_dest)); | |
1902 } | |
1903 | |
1904 /* ?? Because stores may alias, they must happen in the exact | |
1905 sequence they originally happened. Save the position right after | |
1906 the (_lsm) store we just created so we can continue appending after | |
1907 it and maintain the original order. */ | |
1908 { | |
1909 struct prev_flag_edges *p; | |
1910 | |
1911 if (orig_ex->aux) | |
1912 orig_ex->aux = NULL; | |
1913 alloc_aux_for_edge (orig_ex, sizeof (struct prev_flag_edges)); | |
1914 p = (struct prev_flag_edges *) orig_ex->aux; | |
1915 p->append_cond_position = then_old_edge; | |
1916 p->last_cond_fallthru = find_edge (new_bb, old_dest); | |
1917 orig_ex->aux = (void *) p; | |
1918 } | |
1919 | |
1920 if (!loop_has_only_one_exit) | |
1921 for (gphi_iterator gpi = gsi_start_phis (old_dest); | |
1922 !gsi_end_p (gpi); gsi_next (&gpi)) | |
1923 { | |
1924 gphi *phi = gpi.phi (); | |
1925 unsigned i; | |
1926 | |
1927 for (i = 0; i < gimple_phi_num_args (phi); i++) | |
1928 if (gimple_phi_arg_edge (phi, i)->src == new_bb) | |
1929 { | |
1930 tree arg = gimple_phi_arg_def (phi, i); | |
1931 add_phi_arg (phi, arg, then_old_edge, UNKNOWN_LOCATION); | |
1932 update_stmt (phi); | |
1933 } | |
1934 } | |
1935 } | |
1936 | |
1937 /* When REF is set on the location, set flag indicating the store. */ | |
1938 | |
1939 struct sm_set_flag_if_changed | |
1940 { | |
1941 sm_set_flag_if_changed (tree flag_, hash_set <basic_block> *bbs_) | |
1942 : flag (flag_), bbs (bbs_) {} | |
1943 bool operator () (mem_ref_loc *loc); | |
1944 tree flag; | |
1945 hash_set <basic_block> *bbs; | |
1946 }; | |
1947 | |
1948 bool | |
1949 sm_set_flag_if_changed::operator () (mem_ref_loc *loc) | |
1950 { | |
1951 /* Only set the flag for writes. */ | |
1952 if (is_gimple_assign (loc->stmt) | |
1953 && gimple_assign_lhs_ptr (loc->stmt) == loc->ref) | |
1954 { | |
1955 gimple_stmt_iterator gsi = gsi_for_stmt (loc->stmt); | |
1956 gimple *stmt = gimple_build_assign (flag, boolean_true_node); | |
1957 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING); | |
1958 bbs->add (gimple_bb (stmt)); | |
1959 } | |
1960 return false; | |
1961 } | |
1962 | |
1963 /* Helper function for execute_sm. On every location where REF is | |
1964 set, set an appropriate flag indicating the store. */ | |
1965 | |
1966 static tree | |
1967 execute_sm_if_changed_flag_set (struct loop *loop, im_mem_ref *ref, | |
1968 hash_set <basic_block> *bbs) | |
1969 { | |
1970 tree flag; | |
1971 char *str = get_lsm_tmp_name (ref->mem.ref, ~0, "_flag"); | |
1972 flag = create_tmp_reg (boolean_type_node, str); | |
1973 for_all_locs_in_loop (loop, ref, sm_set_flag_if_changed (flag, bbs)); | |
1974 return flag; | |
2070 } | 1975 } |
2071 | 1976 |
2072 /* Executes store motion of memory reference REF from LOOP. | 1977 /* Executes store motion of memory reference REF from LOOP. |
2073 Exits from the LOOP are stored in EXITS. The initialization of the | 1978 Exits from the LOOP are stored in EXITS. The initialization of the |
2074 temporary variable is put to the preheader of the loop, and assignments | 1979 temporary variable is put to the preheader of the loop, and assignments |
2075 to the reference from the temporary variable are emitted to exits. */ | 1980 to the reference from the temporary variable are emitted to exits. */ |
2076 | 1981 |
2077 static void | 1982 static void |
2078 execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref) | 1983 execute_sm (struct loop *loop, vec<edge> exits, im_mem_ref *ref) |
2079 { | 1984 { |
2080 tree tmp_var; | 1985 tree tmp_var, store_flag = NULL_TREE; |
2081 unsigned i; | 1986 unsigned i; |
2082 gimple load, store; | 1987 gassign *load; |
2083 struct fmt_data fmt_data; | 1988 struct fmt_data fmt_data; |
2084 edge ex; | 1989 edge ex; |
2085 struct lim_aux_data *lim_data; | 1990 struct lim_aux_data *lim_data; |
1991 bool multi_threaded_model_p = false; | |
1992 gimple_stmt_iterator gsi; | |
1993 hash_set<basic_block> flag_bbs; | |
2086 | 1994 |
2087 if (dump_file && (dump_flags & TDF_DETAILS)) | 1995 if (dump_file && (dump_flags & TDF_DETAILS)) |
2088 { | 1996 { |
2089 fprintf (dump_file, "Executing store motion of "); | 1997 fprintf (dump_file, "Executing store motion of "); |
2090 print_generic_expr (dump_file, ref->mem, 0); | 1998 print_generic_expr (dump_file, ref->mem.ref); |
2091 fprintf (dump_file, " from loop %d\n", loop->num); | 1999 fprintf (dump_file, " from loop %d\n", loop->num); |
2092 } | 2000 } |
2093 | 2001 |
2094 tmp_var = make_rename_temp (TREE_TYPE (ref->mem), | 2002 tmp_var = create_tmp_reg (TREE_TYPE (ref->mem.ref), |
2095 get_lsm_tmp_name (ref->mem, ~0)); | 2003 get_lsm_tmp_name (ref->mem.ref, ~0)); |
2096 | 2004 |
2097 fmt_data.loop = loop; | 2005 fmt_data.loop = loop; |
2098 fmt_data.orig_loop = loop; | 2006 fmt_data.orig_loop = loop; |
2099 for_each_index (&ref->mem, force_move_till, &fmt_data); | 2007 for_each_index (&ref->mem.ref, force_move_till, &fmt_data); |
2008 | |
2009 if (bb_in_transaction (loop_preheader_edge (loop)->src) | |
2010 || (! PARAM_VALUE (PARAM_ALLOW_STORE_DATA_RACES) | |
2011 && ! ref_always_accessed_p (loop, ref, true))) | |
2012 multi_threaded_model_p = true; | |
2013 | |
2014 if (multi_threaded_model_p) | |
2015 store_flag = execute_sm_if_changed_flag_set (loop, ref, &flag_bbs); | |
2100 | 2016 |
2101 rewrite_mem_refs (loop, ref, tmp_var); | 2017 rewrite_mem_refs (loop, ref, tmp_var); |
2102 | 2018 |
2103 /* Emit the load & stores. */ | 2019 /* Emit the load code on a random exit edge or into the latch if |
2104 load = gimple_build_assign (tmp_var, unshare_expr (ref->mem)); | 2020 the loop does not exit, so that we are sure it will be processed |
2021 by move_computations after all dependencies. */ | |
2022 gsi = gsi_for_stmt (first_mem_ref_loc (loop, ref)->stmt); | |
2023 | |
2024 /* FIXME/TODO: For the multi-threaded variant, we could avoid this | |
2025 load altogether, since the store is predicated by a flag. We | |
2026 could, do the load only if it was originally in the loop. */ | |
2027 load = gimple_build_assign (tmp_var, unshare_expr (ref->mem.ref)); | |
2105 lim_data = init_lim_data (load); | 2028 lim_data = init_lim_data (load); |
2106 lim_data->max_loop = loop; | 2029 lim_data->max_loop = loop; |
2107 lim_data->tgt_loop = loop; | 2030 lim_data->tgt_loop = loop; |
2108 | 2031 gsi_insert_before (&gsi, load, GSI_SAME_STMT); |
2109 /* Put this into the latch, so that we are sure it will be processed after | 2032 |
2110 all dependencies. */ | 2033 if (multi_threaded_model_p) |
2111 gsi_insert_on_edge (loop_latch_edge (loop), load); | 2034 { |
2112 | 2035 load = gimple_build_assign (store_flag, boolean_false_node); |
2113 FOR_EACH_VEC_ELT (edge, exits, i, ex) | 2036 lim_data = init_lim_data (load); |
2114 { | 2037 lim_data->max_loop = loop; |
2115 store = gimple_build_assign (unshare_expr (ref->mem), tmp_var); | 2038 lim_data->tgt_loop = loop; |
2116 gsi_insert_on_edge (ex, store); | 2039 gsi_insert_before (&gsi, load, GSI_SAME_STMT); |
2117 } | 2040 } |
2041 | |
2042 /* Sink the store to every exit from the loop. */ | |
2043 FOR_EACH_VEC_ELT (exits, i, ex) | |
2044 if (!multi_threaded_model_p) | |
2045 { | |
2046 gassign *store; | |
2047 store = gimple_build_assign (unshare_expr (ref->mem.ref), tmp_var); | |
2048 gsi_insert_on_edge (ex, store); | |
2049 } | |
2050 else | |
2051 execute_sm_if_changed (ex, ref->mem.ref, tmp_var, store_flag, | |
2052 loop_preheader_edge (loop), &flag_bbs); | |
2118 } | 2053 } |
2119 | 2054 |
2120 /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit | 2055 /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit |
2121 edges of the LOOP. */ | 2056 edges of the LOOP. */ |
2122 | 2057 |
2123 static void | 2058 static void |
2124 hoist_memory_references (struct loop *loop, bitmap mem_refs, | 2059 hoist_memory_references (struct loop *loop, bitmap mem_refs, |
2125 VEC (edge, heap) *exits) | 2060 vec<edge> exits) |
2126 { | 2061 { |
2127 mem_ref_p ref; | 2062 im_mem_ref *ref; |
2128 unsigned i; | 2063 unsigned i; |
2129 bitmap_iterator bi; | 2064 bitmap_iterator bi; |
2130 | 2065 |
2131 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi) | 2066 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi) |
2132 { | 2067 { |
2133 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | 2068 ref = memory_accesses.refs_list[i]; |
2134 execute_sm (loop, exits, ref); | 2069 execute_sm (loop, exits, ref); |
2135 } | 2070 } |
2071 } | |
2072 | |
2073 struct ref_always_accessed | |
2074 { | |
2075 ref_always_accessed (struct loop *loop_, bool stored_p_) | |
2076 : loop (loop_), stored_p (stored_p_) {} | |
2077 bool operator () (mem_ref_loc *loc); | |
2078 struct loop *loop; | |
2079 bool stored_p; | |
2080 }; | |
2081 | |
2082 bool | |
2083 ref_always_accessed::operator () (mem_ref_loc *loc) | |
2084 { | |
2085 struct loop *must_exec; | |
2086 | |
2087 if (!get_lim_data (loc->stmt)) | |
2088 return false; | |
2089 | |
2090 /* If we require an always executed store make sure the statement | |
2091 stores to the reference. */ | |
2092 if (stored_p) | |
2093 { | |
2094 tree lhs = gimple_get_lhs (loc->stmt); | |
2095 if (!lhs | |
2096 || lhs != *loc->ref) | |
2097 return false; | |
2098 } | |
2099 | |
2100 must_exec = get_lim_data (loc->stmt)->always_executed_in; | |
2101 if (!must_exec) | |
2102 return false; | |
2103 | |
2104 if (must_exec == loop | |
2105 || flow_loop_nested_p (must_exec, loop)) | |
2106 return true; | |
2107 | |
2108 return false; | |
2136 } | 2109 } |
2137 | 2110 |
2138 /* Returns true if REF is always accessed in LOOP. If STORED_P is true | 2111 /* Returns true if REF is always accessed in LOOP. If STORED_P is true |
2139 make sure REF is always stored to in LOOP. */ | 2112 make sure REF is always stored to in LOOP. */ |
2140 | 2113 |
2141 static bool | 2114 static bool |
2142 ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p) | 2115 ref_always_accessed_p (struct loop *loop, im_mem_ref *ref, bool stored_p) |
2143 { | 2116 { |
2144 VEC (mem_ref_loc_p, heap) *locs = NULL; | 2117 return for_all_locs_in_loop (loop, ref, |
2145 unsigned i; | 2118 ref_always_accessed (loop, stored_p)); |
2146 mem_ref_loc_p loc; | |
2147 bool ret = false; | |
2148 struct loop *must_exec; | |
2149 tree base; | |
2150 | |
2151 base = get_base_address (ref->mem); | |
2152 if (INDIRECT_REF_P (base) | |
2153 || TREE_CODE (base) == MEM_REF) | |
2154 base = TREE_OPERAND (base, 0); | |
2155 | |
2156 get_all_locs_in_loop (loop, ref, &locs); | |
2157 FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc) | |
2158 { | |
2159 if (!get_lim_data (loc->stmt)) | |
2160 continue; | |
2161 | |
2162 /* If we require an always executed store make sure the statement | |
2163 stores to the reference. */ | |
2164 if (stored_p) | |
2165 { | |
2166 tree lhs; | |
2167 if (!gimple_get_lhs (loc->stmt)) | |
2168 continue; | |
2169 lhs = get_base_address (gimple_get_lhs (loc->stmt)); | |
2170 if (!lhs) | |
2171 continue; | |
2172 if (INDIRECT_REF_P (lhs) | |
2173 || TREE_CODE (lhs) == MEM_REF) | |
2174 lhs = TREE_OPERAND (lhs, 0); | |
2175 if (lhs != base) | |
2176 continue; | |
2177 } | |
2178 | |
2179 must_exec = get_lim_data (loc->stmt)->always_executed_in; | |
2180 if (!must_exec) | |
2181 continue; | |
2182 | |
2183 if (must_exec == loop | |
2184 || flow_loop_nested_p (must_exec, loop)) | |
2185 { | |
2186 ret = true; | |
2187 break; | |
2188 } | |
2189 } | |
2190 VEC_free (mem_ref_loc_p, heap, locs); | |
2191 | |
2192 return ret; | |
2193 } | 2119 } |
2194 | 2120 |
2195 /* Returns true if REF1 and REF2 are independent. */ | 2121 /* Returns true if REF1 and REF2 are independent. */ |
2196 | 2122 |
2197 static bool | 2123 static bool |
2198 refs_independent_p (mem_ref_p ref1, mem_ref_p ref2) | 2124 refs_independent_p (im_mem_ref *ref1, im_mem_ref *ref2) |
2199 { | 2125 { |
2200 if (ref1 == ref2 | 2126 if (ref1 == ref2) |
2201 || bitmap_bit_p (ref1->indep_ref, ref2->id)) | |
2202 return true; | 2127 return true; |
2203 if (bitmap_bit_p (ref1->dep_ref, ref2->id)) | |
2204 return false; | |
2205 | 2128 |
2206 if (dump_file && (dump_flags & TDF_DETAILS)) | 2129 if (dump_file && (dump_flags & TDF_DETAILS)) |
2207 fprintf (dump_file, "Querying dependency of refs %u and %u: ", | 2130 fprintf (dump_file, "Querying dependency of refs %u and %u: ", |
2208 ref1->id, ref2->id); | 2131 ref1->id, ref2->id); |
2209 | 2132 |
2210 if (mem_refs_may_alias_p (ref1->mem, ref2->mem, | 2133 if (mem_refs_may_alias_p (ref1, ref2, &memory_accesses.ttae_cache)) |
2211 &memory_accesses.ttae_cache)) | 2134 { |
2212 { | |
2213 bitmap_set_bit (ref1->dep_ref, ref2->id); | |
2214 bitmap_set_bit (ref2->dep_ref, ref1->id); | |
2215 if (dump_file && (dump_flags & TDF_DETAILS)) | 2135 if (dump_file && (dump_flags & TDF_DETAILS)) |
2216 fprintf (dump_file, "dependent.\n"); | 2136 fprintf (dump_file, "dependent.\n"); |
2217 return false; | 2137 return false; |
2218 } | 2138 } |
2219 else | 2139 else |
2220 { | 2140 { |
2221 bitmap_set_bit (ref1->indep_ref, ref2->id); | |
2222 bitmap_set_bit (ref2->indep_ref, ref1->id); | |
2223 if (dump_file && (dump_flags & TDF_DETAILS)) | 2141 if (dump_file && (dump_flags & TDF_DETAILS)) |
2224 fprintf (dump_file, "independent.\n"); | 2142 fprintf (dump_file, "independent.\n"); |
2225 return true; | 2143 return true; |
2226 } | 2144 } |
2227 } | 2145 } |
2228 | 2146 |
2229 /* Records the information whether REF is independent in LOOP (according | 2147 /* Mark REF dependent on stores or loads (according to STORED_P) in LOOP |
2230 to INDEP). */ | 2148 and its super-loops. */ |
2231 | 2149 |
2232 static void | 2150 static void |
2233 record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep) | 2151 record_dep_loop (struct loop *loop, im_mem_ref *ref, bool stored_p) |
2234 { | 2152 { |
2235 if (indep) | 2153 /* We can propagate dependent-in-loop bits up the loop |
2236 bitmap_set_bit (ref->indep_loop, loop->num); | 2154 hierarchy to all outer loops. */ |
2155 while (loop != current_loops->tree_root | |
2156 && bitmap_set_bit (&ref->dep_loop, LOOP_DEP_BIT (loop->num, stored_p))) | |
2157 loop = loop_outer (loop); | |
2158 } | |
2159 | |
2160 /* Returns true if REF is independent on all other memory | |
2161 references in LOOP. REF_LOOP is where REF is accessed, SAFELEN is the | |
2162 safelen to apply. */ | |
2163 | |
2164 static bool | |
2165 ref_indep_loop_p_1 (int safelen, struct loop *loop, im_mem_ref *ref, | |
2166 bool stored_p, struct loop *ref_loop) | |
2167 { | |
2168 stored_p |= (ref->stored && bitmap_bit_p (ref->stored, loop->num)); | |
2169 | |
2170 if (loop->safelen > safelen | |
2171 /* Check that REF is accessed inside LOOP. */ | |
2172 && (loop == ref_loop || flow_loop_nested_p (loop, ref_loop))) | |
2173 safelen = loop->safelen; | |
2174 | |
2175 bool indep_p = true; | |
2176 bitmap refs_to_check; | |
2177 | |
2178 if (stored_p) | |
2179 refs_to_check = &memory_accesses.refs_in_loop[loop->num]; | |
2237 else | 2180 else |
2238 bitmap_set_bit (ref->dep_loop, loop->num); | 2181 refs_to_check = &memory_accesses.refs_stored_in_loop[loop->num]; |
2239 } | 2182 |
2240 | 2183 if (bitmap_bit_p (refs_to_check, UNANALYZABLE_MEM_ID)) |
2241 /* Returns true if REF is independent on all other memory references in | 2184 indep_p = false; |
2242 LOOP. */ | 2185 else if (safelen > 1) |
2243 | 2186 { |
2244 static bool | 2187 if (dump_file && (dump_flags & TDF_DETAILS)) |
2245 ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref) | 2188 { |
2246 { | 2189 fprintf (dump_file,"REF is independent due to safelen %d\n", |
2247 bitmap clobbers, refs_to_check, refs; | 2190 safelen); |
2248 unsigned i; | 2191 print_generic_expr (dump_file, ref->mem.ref, TDF_SLIM); |
2249 bitmap_iterator bi; | 2192 fprintf (dump_file, "\n"); |
2250 bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num); | 2193 } |
2251 htab_t map; | 2194 |
2252 mem_ref_p aref; | 2195 /* We need to recurse to properly handle UNANALYZABLE_MEM_ID. */ |
2253 | 2196 struct loop *inner = loop->inner; |
2254 /* If the reference is clobbered, it is not independent. */ | 2197 while (inner) |
2255 clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num); | 2198 { |
2256 if (bitmap_intersect_p (ref->vops, clobbers)) | 2199 if (!ref_indep_loop_p_1 (safelen, inner, ref, stored_p, ref_loop)) |
2257 return false; | 2200 { |
2258 | 2201 indep_p = false; |
2259 refs_to_check = BITMAP_ALLOC (NULL); | 2202 break; |
2260 | 2203 } |
2261 map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num); | 2204 inner = inner->next; |
2262 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, i, bi) | 2205 } |
2263 { | 2206 |
2264 if (stored) | 2207 /* Avoid caching here as safelen depends on context and refs |
2265 refs = get_vop_accesses (map, i); | 2208 are shared between different contexts. */ |
2266 else | 2209 return indep_p; |
2267 refs = get_vop_stores (map, i); | 2210 } |
2268 | 2211 else |
2269 bitmap_ior_into (refs_to_check, refs); | 2212 { |
2270 } | 2213 if (bitmap_bit_p (&ref->indep_loop, LOOP_DEP_BIT (loop->num, stored_p))) |
2271 | 2214 return true; |
2272 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi) | 2215 if (bitmap_bit_p (&ref->dep_loop, LOOP_DEP_BIT (loop->num, stored_p))) |
2273 { | 2216 return false; |
2274 aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | 2217 |
2275 if (!refs_independent_p (ref, aref)) | 2218 struct loop *inner = loop->inner; |
2276 { | 2219 while (inner) |
2277 ret = false; | 2220 { |
2278 record_indep_loop (loop, aref, false); | 2221 if (!ref_indep_loop_p_1 (safelen, inner, ref, stored_p, ref_loop)) |
2279 break; | 2222 { |
2280 } | 2223 indep_p = false; |
2281 } | 2224 break; |
2282 | 2225 } |
2283 BITMAP_FREE (refs_to_check); | 2226 inner = inner->next; |
2284 return ret; | 2227 } |
2285 } | 2228 |
2286 | 2229 if (indep_p) |
2287 /* Returns true if REF is independent on all other memory references in | 2230 { |
2288 LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */ | 2231 unsigned i; |
2289 | 2232 bitmap_iterator bi; |
2290 static bool | 2233 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi) |
2291 ref_indep_loop_p (struct loop *loop, mem_ref_p ref) | 2234 { |
2292 { | 2235 im_mem_ref *aref = memory_accesses.refs_list[i]; |
2293 bool ret; | 2236 if (!refs_independent_p (ref, aref)) |
2294 | 2237 { |
2295 if (bitmap_bit_p (ref->indep_loop, loop->num)) | 2238 indep_p = false; |
2296 return true; | 2239 break; |
2297 if (bitmap_bit_p (ref->dep_loop, loop->num)) | 2240 } |
2298 return false; | 2241 } |
2299 | 2242 } |
2300 ret = ref_indep_loop_p_1 (loop, ref); | 2243 } |
2301 | 2244 |
2302 if (dump_file && (dump_flags & TDF_DETAILS)) | 2245 if (dump_file && (dump_flags & TDF_DETAILS)) |
2303 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n", | 2246 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n", |
2304 ref->id, loop->num, ret ? "independent" : "dependent"); | 2247 ref->id, loop->num, indep_p ? "independent" : "dependent"); |
2305 | 2248 |
2306 record_indep_loop (loop, ref, ret); | 2249 /* Record the computed result in the cache. */ |
2307 | 2250 if (indep_p) |
2308 return ret; | 2251 { |
2252 if (bitmap_set_bit (&ref->indep_loop, LOOP_DEP_BIT (loop->num, stored_p)) | |
2253 && stored_p) | |
2254 { | |
2255 /* If it's independend against all refs then it's independent | |
2256 against stores, too. */ | |
2257 bitmap_set_bit (&ref->indep_loop, LOOP_DEP_BIT (loop->num, false)); | |
2258 } | |
2259 } | |
2260 else | |
2261 { | |
2262 record_dep_loop (loop, ref, stored_p); | |
2263 if (!stored_p) | |
2264 { | |
2265 /* If it's dependent against stores it's dependent against | |
2266 all refs, too. */ | |
2267 record_dep_loop (loop, ref, true); | |
2268 } | |
2269 } | |
2270 | |
2271 return indep_p; | |
2272 } | |
2273 | |
2274 /* Returns true if REF is independent on all other memory references in | |
2275 LOOP. REF_LOOP is the loop where REF is accessed. */ | |
2276 | |
2277 static bool | |
2278 ref_indep_loop_p (struct loop *loop, im_mem_ref *ref, struct loop *ref_loop) | |
2279 { | |
2280 gcc_checking_assert (MEM_ANALYZABLE (ref)); | |
2281 | |
2282 return ref_indep_loop_p_1 (0, loop, ref, false, ref_loop); | |
2309 } | 2283 } |
2310 | 2284 |
2311 /* Returns true if we can perform store motion of REF from LOOP. */ | 2285 /* Returns true if we can perform store motion of REF from LOOP. */ |
2312 | 2286 |
2313 static bool | 2287 static bool |
2314 can_sm_ref_p (struct loop *loop, mem_ref_p ref) | 2288 can_sm_ref_p (struct loop *loop, im_mem_ref *ref) |
2315 { | 2289 { |
2316 tree base; | 2290 tree base; |
2317 | 2291 |
2318 /* Unless the reference is stored in the loop, there is nothing to do. */ | 2292 /* Can't hoist unanalyzable refs. */ |
2319 if (!bitmap_bit_p (ref->stored, loop->num)) | 2293 if (!MEM_ANALYZABLE (ref)) |
2320 return false; | 2294 return false; |
2321 | 2295 |
2322 /* It should be movable. */ | 2296 /* It should be movable. */ |
2323 if (!is_gimple_reg_type (TREE_TYPE (ref->mem)) | 2297 if (!is_gimple_reg_type (TREE_TYPE (ref->mem.ref)) |
2324 || TREE_THIS_VOLATILE (ref->mem) | 2298 || TREE_THIS_VOLATILE (ref->mem.ref) |
2325 || !for_each_index (&ref->mem, may_move_till, loop)) | 2299 || !for_each_index (&ref->mem.ref, may_move_till, loop)) |
2326 return false; | 2300 return false; |
2327 | 2301 |
2328 /* If it can throw fail, we do not properly update EH info. */ | 2302 /* If it can throw fail, we do not properly update EH info. */ |
2329 if (tree_could_throw_p (ref->mem)) | 2303 if (tree_could_throw_p (ref->mem.ref)) |
2330 return false; | 2304 return false; |
2331 | 2305 |
2332 /* If it can trap, it must be always executed in LOOP. | 2306 /* If it can trap, it must be always executed in LOOP. |
2333 Readonly memory locations may trap when storing to them, but | 2307 Readonly memory locations may trap when storing to them, but |
2334 tree_could_trap_p is a predicate for rvalues, so check that | 2308 tree_could_trap_p is a predicate for rvalues, so check that |
2335 explicitly. */ | 2309 explicitly. */ |
2336 base = get_base_address (ref->mem); | 2310 base = get_base_address (ref->mem.ref); |
2337 if ((tree_could_trap_p (ref->mem) | 2311 if ((tree_could_trap_p (ref->mem.ref) |
2338 || (DECL_P (base) && TREE_READONLY (base))) | 2312 || (DECL_P (base) && TREE_READONLY (base))) |
2339 && !ref_always_accessed_p (loop, ref, true)) | 2313 && !ref_always_accessed_p (loop, ref, true)) |
2340 return false; | 2314 return false; |
2341 | 2315 |
2342 /* And it must be independent on all other memory references | 2316 /* And it must be independent on all other memory references |
2343 in LOOP. */ | 2317 in LOOP. */ |
2344 if (!ref_indep_loop_p (loop, ref)) | 2318 if (!ref_indep_loop_p (loop, ref, loop)) |
2345 return false; | 2319 return false; |
2346 | 2320 |
2347 return true; | 2321 return true; |
2348 } | 2322 } |
2349 | 2323 |
2352 motion was performed in one of the outer loops. */ | 2326 motion was performed in one of the outer loops. */ |
2353 | 2327 |
2354 static void | 2328 static void |
2355 find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm) | 2329 find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm) |
2356 { | 2330 { |
2357 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, | 2331 bitmap refs = &memory_accesses.all_refs_stored_in_loop[loop->num]; |
2358 loop->num); | |
2359 unsigned i; | 2332 unsigned i; |
2360 bitmap_iterator bi; | 2333 bitmap_iterator bi; |
2361 mem_ref_p ref; | 2334 im_mem_ref *ref; |
2362 | 2335 |
2363 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi) | 2336 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi) |
2364 { | 2337 { |
2365 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | 2338 ref = memory_accesses.refs_list[i]; |
2366 if (can_sm_ref_p (loop, ref)) | 2339 if (can_sm_ref_p (loop, ref)) |
2367 bitmap_set_bit (refs_to_sm, i); | 2340 bitmap_set_bit (refs_to_sm, i); |
2368 } | 2341 } |
2369 } | 2342 } |
2370 | 2343 |
2372 for a store motion optimization (i.e. whether we can insert statement | 2345 for a store motion optimization (i.e. whether we can insert statement |
2373 on its exits). */ | 2346 on its exits). */ |
2374 | 2347 |
2375 static bool | 2348 static bool |
2376 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, | 2349 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, |
2377 VEC (edge, heap) *exits) | 2350 vec<edge> exits) |
2378 { | 2351 { |
2379 unsigned i; | 2352 unsigned i; |
2380 edge ex; | 2353 edge ex; |
2381 | 2354 |
2382 FOR_EACH_VEC_ELT (edge, exits, i, ex) | 2355 FOR_EACH_VEC_ELT (exits, i, ex) |
2383 if (ex->flags & (EDGE_ABNORMAL | EDGE_EH)) | 2356 if (ex->flags & (EDGE_ABNORMAL | EDGE_EH)) |
2384 return false; | 2357 return false; |
2385 | 2358 |
2386 return true; | 2359 return true; |
2387 } | 2360 } |
2391 store motion was executed in one of the outer loops. */ | 2364 store motion was executed in one of the outer loops. */ |
2392 | 2365 |
2393 static void | 2366 static void |
2394 store_motion_loop (struct loop *loop, bitmap sm_executed) | 2367 store_motion_loop (struct loop *loop, bitmap sm_executed) |
2395 { | 2368 { |
2396 VEC (edge, heap) *exits = get_loop_exit_edges (loop); | 2369 vec<edge> exits = get_loop_exit_edges (loop); |
2397 struct loop *subloop; | 2370 struct loop *subloop; |
2398 bitmap sm_in_loop = BITMAP_ALLOC (NULL); | 2371 bitmap sm_in_loop = BITMAP_ALLOC (&lim_bitmap_obstack); |
2399 | 2372 |
2400 if (loop_suitable_for_sm (loop, exits)) | 2373 if (loop_suitable_for_sm (loop, exits)) |
2401 { | 2374 { |
2402 find_refs_for_sm (loop, sm_executed, sm_in_loop); | 2375 find_refs_for_sm (loop, sm_executed, sm_in_loop); |
2403 hoist_memory_references (loop, sm_in_loop, exits); | 2376 hoist_memory_references (loop, sm_in_loop, exits); |
2404 } | 2377 } |
2405 VEC_free (edge, heap, exits); | 2378 exits.release (); |
2406 | 2379 |
2407 bitmap_ior_into (sm_executed, sm_in_loop); | 2380 bitmap_ior_into (sm_executed, sm_in_loop); |
2408 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next) | 2381 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next) |
2409 store_motion_loop (subloop, sm_executed); | 2382 store_motion_loop (subloop, sm_executed); |
2410 bitmap_and_compl_into (sm_executed, sm_in_loop); | 2383 bitmap_and_compl_into (sm_executed, sm_in_loop); |
2416 | 2389 |
2417 static void | 2390 static void |
2418 store_motion (void) | 2391 store_motion (void) |
2419 { | 2392 { |
2420 struct loop *loop; | 2393 struct loop *loop; |
2421 bitmap sm_executed = BITMAP_ALLOC (NULL); | 2394 bitmap sm_executed = BITMAP_ALLOC (&lim_bitmap_obstack); |
2422 | 2395 |
2423 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next) | 2396 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next) |
2424 store_motion_loop (loop, sm_executed); | 2397 store_motion_loop (loop, sm_executed); |
2425 | 2398 |
2426 BITMAP_FREE (sm_executed); | 2399 BITMAP_FREE (sm_executed); |
2431 for each such basic block bb records the outermost loop for that execution | 2404 for each such basic block bb records the outermost loop for that execution |
2432 of its header implies execution of bb. CONTAINS_CALL is the bitmap of | 2405 of its header implies execution of bb. CONTAINS_CALL is the bitmap of |
2433 blocks that contain a nonpure call. */ | 2406 blocks that contain a nonpure call. */ |
2434 | 2407 |
2435 static void | 2408 static void |
2436 fill_always_executed_in (struct loop *loop, sbitmap contains_call) | 2409 fill_always_executed_in_1 (struct loop *loop, sbitmap contains_call) |
2437 { | 2410 { |
2438 basic_block bb = NULL, *bbs, last = NULL; | 2411 basic_block bb = NULL, *bbs, last = NULL; |
2439 unsigned i; | 2412 unsigned i; |
2440 edge e; | 2413 edge e; |
2441 struct loop *inn_loop = loop; | 2414 struct loop *inn_loop = loop; |
2442 | 2415 |
2443 if (!loop->header->aux) | 2416 if (ALWAYS_EXECUTED_IN (loop->header) == NULL) |
2444 { | 2417 { |
2445 bbs = get_loop_body_in_dom_order (loop); | 2418 bbs = get_loop_body_in_dom_order (loop); |
2446 | 2419 |
2447 for (i = 0; i < loop->num_nodes; i++) | 2420 for (i = 0; i < loop->num_nodes; i++) |
2448 { | 2421 { |
2450 bb = bbs[i]; | 2423 bb = bbs[i]; |
2451 | 2424 |
2452 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb)) | 2425 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb)) |
2453 last = bb; | 2426 last = bb; |
2454 | 2427 |
2455 if (TEST_BIT (contains_call, bb->index)) | 2428 if (bitmap_bit_p (contains_call, bb->index)) |
2456 break; | 2429 break; |
2457 | 2430 |
2458 FOR_EACH_EDGE (e, ei, bb->succs) | 2431 FOR_EACH_EDGE (e, ei, bb->succs) |
2459 if (!flow_bb_inside_loop_p (loop, e->dest)) | 2432 { |
2460 break; | 2433 /* If there is an exit from this BB. */ |
2434 if (!flow_bb_inside_loop_p (loop, e->dest)) | |
2435 break; | |
2436 /* Or we enter a possibly non-finite loop. */ | |
2437 if (flow_loop_nested_p (bb->loop_father, | |
2438 e->dest->loop_father) | |
2439 && ! finite_loop_p (e->dest->loop_father)) | |
2440 break; | |
2441 } | |
2461 if (e) | 2442 if (e) |
2462 break; | 2443 break; |
2463 | 2444 |
2464 /* A loop might be infinite (TODO use simple loop analysis | 2445 /* A loop might be infinite (TODO use simple loop analysis |
2465 to disprove this if possible). */ | 2446 to disprove this if possible). */ |
2480 } | 2461 } |
2481 } | 2462 } |
2482 | 2463 |
2483 while (1) | 2464 while (1) |
2484 { | 2465 { |
2485 last->aux = loop; | 2466 SET_ALWAYS_EXECUTED_IN (last, loop); |
2486 if (last == loop->header) | 2467 if (last == loop->header) |
2487 break; | 2468 break; |
2488 last = get_immediate_dominator (CDI_DOMINATORS, last); | 2469 last = get_immediate_dominator (CDI_DOMINATORS, last); |
2489 } | 2470 } |
2490 | 2471 |
2491 free (bbs); | 2472 free (bbs); |
2492 } | 2473 } |
2493 | 2474 |
2494 for (loop = loop->inner; loop; loop = loop->next) | 2475 for (loop = loop->inner; loop; loop = loop->next) |
2495 fill_always_executed_in (loop, contains_call); | 2476 fill_always_executed_in_1 (loop, contains_call); |
2496 } | 2477 } |
2478 | |
2479 /* Fills ALWAYS_EXECUTED_IN information for basic blocks, i.e. | |
2480 for each such basic block bb records the outermost loop for that execution | |
2481 of its header implies execution of bb. */ | |
2482 | |
2483 static void | |
2484 fill_always_executed_in (void) | |
2485 { | |
2486 basic_block bb; | |
2487 struct loop *loop; | |
2488 | |
2489 auto_sbitmap contains_call (last_basic_block_for_fn (cfun)); | |
2490 bitmap_clear (contains_call); | |
2491 FOR_EACH_BB_FN (bb, cfun) | |
2492 { | |
2493 gimple_stmt_iterator gsi; | |
2494 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
2495 { | |
2496 if (nonpure_call_p (gsi_stmt (gsi))) | |
2497 break; | |
2498 } | |
2499 | |
2500 if (!gsi_end_p (gsi)) | |
2501 bitmap_set_bit (contains_call, bb->index); | |
2502 } | |
2503 | |
2504 for (loop = current_loops->tree_root->inner; loop; loop = loop->next) | |
2505 fill_always_executed_in_1 (loop, contains_call); | |
2506 } | |
2507 | |
2497 | 2508 |
2498 /* Compute the global information needed by the loop invariant motion pass. */ | 2509 /* Compute the global information needed by the loop invariant motion pass. */ |
2499 | 2510 |
2500 static void | 2511 static void |
2501 tree_ssa_lim_initialize (void) | 2512 tree_ssa_lim_initialize (void) |
2502 { | 2513 { |
2503 sbitmap contains_call = sbitmap_alloc (last_basic_block); | |
2504 gimple_stmt_iterator bsi; | |
2505 struct loop *loop; | 2514 struct loop *loop; |
2506 basic_block bb; | 2515 unsigned i; |
2507 | 2516 |
2508 sbitmap_zero (contains_call); | 2517 bitmap_obstack_initialize (&lim_bitmap_obstack); |
2509 FOR_EACH_BB (bb) | 2518 gcc_obstack_init (&mem_ref_obstack); |
2510 { | 2519 lim_aux_data_map = new hash_map<gimple *, lim_aux_data *>; |
2511 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | 2520 |
2512 { | 2521 if (flag_tm) |
2513 if (nonpure_call_p (gsi_stmt (bsi))) | 2522 compute_transaction_bits (); |
2514 break; | 2523 |
2515 } | 2524 alloc_aux_for_edges (0); |
2516 | 2525 |
2517 if (!gsi_end_p (bsi)) | 2526 memory_accesses.refs = new hash_table<mem_ref_hasher> (100); |
2518 SET_BIT (contains_call, bb->index); | 2527 memory_accesses.refs_list.create (100); |
2519 } | 2528 /* Allocate a special, unanalyzable mem-ref with ID zero. */ |
2520 | 2529 memory_accesses.refs_list.quick_push |
2521 for (loop = current_loops->tree_root->inner; loop; loop = loop->next) | 2530 (mem_ref_alloc (error_mark_node, 0, UNANALYZABLE_MEM_ID)); |
2522 fill_always_executed_in (loop, contains_call); | 2531 |
2523 | 2532 memory_accesses.refs_in_loop.create (number_of_loops (cfun)); |
2524 sbitmap_free (contains_call); | 2533 memory_accesses.refs_in_loop.quick_grow (number_of_loops (cfun)); |
2525 | 2534 memory_accesses.refs_stored_in_loop.create (number_of_loops (cfun)); |
2526 lim_aux_data_map = pointer_map_create (); | 2535 memory_accesses.refs_stored_in_loop.quick_grow (number_of_loops (cfun)); |
2536 memory_accesses.all_refs_stored_in_loop.create (number_of_loops (cfun)); | |
2537 memory_accesses.all_refs_stored_in_loop.quick_grow (number_of_loops (cfun)); | |
2538 | |
2539 for (i = 0; i < number_of_loops (cfun); i++) | |
2540 { | |
2541 bitmap_initialize (&memory_accesses.refs_in_loop[i], | |
2542 &lim_bitmap_obstack); | |
2543 bitmap_initialize (&memory_accesses.refs_stored_in_loop[i], | |
2544 &lim_bitmap_obstack); | |
2545 bitmap_initialize (&memory_accesses.all_refs_stored_in_loop[i], | |
2546 &lim_bitmap_obstack); | |
2547 } | |
2548 | |
2549 memory_accesses.ttae_cache = NULL; | |
2550 | |
2551 /* Initialize bb_loop_postorder with a mapping from loop->num to | |
2552 its postorder index. */ | |
2553 i = 0; | |
2554 bb_loop_postorder = XNEWVEC (unsigned, number_of_loops (cfun)); | |
2555 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST) | |
2556 bb_loop_postorder[loop->num] = i++; | |
2527 } | 2557 } |
2528 | 2558 |
2529 /* Cleans up after the invariant motion pass. */ | 2559 /* Cleans up after the invariant motion pass. */ |
2530 | 2560 |
2531 static void | 2561 static void |
2532 tree_ssa_lim_finalize (void) | 2562 tree_ssa_lim_finalize (void) |
2533 { | 2563 { |
2534 basic_block bb; | 2564 basic_block bb; |
2535 unsigned i; | 2565 unsigned i; |
2536 bitmap b; | 2566 im_mem_ref *ref; |
2537 htab_t h; | 2567 |
2538 | 2568 free_aux_for_edges (); |
2539 FOR_EACH_BB (bb) | 2569 |
2540 { | 2570 FOR_EACH_BB_FN (bb, cfun) |
2541 bb->aux = NULL; | 2571 SET_ALWAYS_EXECUTED_IN (bb, NULL); |
2542 } | 2572 |
2543 | 2573 bitmap_obstack_release (&lim_bitmap_obstack); |
2544 pointer_map_destroy (lim_aux_data_map); | 2574 delete lim_aux_data_map; |
2545 | 2575 |
2546 VEC_free (mem_ref_p, heap, memory_accesses.refs_list); | 2576 delete memory_accesses.refs; |
2547 htab_delete (memory_accesses.refs); | 2577 memory_accesses.refs = NULL; |
2548 | 2578 |
2549 FOR_EACH_VEC_ELT (bitmap, memory_accesses.refs_in_loop, i, b) | 2579 FOR_EACH_VEC_ELT (memory_accesses.refs_list, i, ref) |
2550 BITMAP_FREE (b); | 2580 memref_free (ref); |
2551 VEC_free (bitmap, heap, memory_accesses.refs_in_loop); | 2581 memory_accesses.refs_list.release (); |
2552 | 2582 obstack_free (&mem_ref_obstack, NULL); |
2553 FOR_EACH_VEC_ELT (bitmap, memory_accesses.all_refs_in_loop, i, b) | 2583 |
2554 BITMAP_FREE (b); | 2584 memory_accesses.refs_in_loop.release (); |
2555 VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop); | 2585 memory_accesses.refs_stored_in_loop.release (); |
2556 | 2586 memory_accesses.all_refs_stored_in_loop.release (); |
2557 FOR_EACH_VEC_ELT (bitmap, memory_accesses.clobbered_vops, i, b) | |
2558 BITMAP_FREE (b); | |
2559 VEC_free (bitmap, heap, memory_accesses.clobbered_vops); | |
2560 | |
2561 FOR_EACH_VEC_ELT (htab_t, memory_accesses.vop_ref_map, i, h) | |
2562 htab_delete (h); | |
2563 VEC_free (htab_t, heap, memory_accesses.vop_ref_map); | |
2564 | 2587 |
2565 if (memory_accesses.ttae_cache) | 2588 if (memory_accesses.ttae_cache) |
2566 pointer_map_destroy (memory_accesses.ttae_cache); | 2589 free_affine_expand_cache (&memory_accesses.ttae_cache); |
2590 | |
2591 free (bb_loop_postorder); | |
2567 } | 2592 } |
2568 | 2593 |
2569 /* Moves invariants from loops. Only "expensive" invariants are moved out -- | 2594 /* Moves invariants from loops. Only "expensive" invariants are moved out -- |
2570 i.e. those that are likely to be win regardless of the register pressure. */ | 2595 i.e. those that are likely to be win regardless of the register pressure. */ |
2571 | 2596 |
2572 unsigned int | 2597 static unsigned int |
2573 tree_ssa_lim (void) | 2598 tree_ssa_lim (void) |
2574 { | 2599 { |
2575 unsigned int todo; | 2600 unsigned int todo; |
2576 | 2601 |
2577 tree_ssa_lim_initialize (); | 2602 tree_ssa_lim_initialize (); |
2578 | 2603 |
2579 /* Gathers information about memory accesses in the loops. */ | 2604 /* Gathers information about memory accesses in the loops. */ |
2580 analyze_memory_references (); | 2605 analyze_memory_references (); |
2581 | 2606 |
2607 /* Fills ALWAYS_EXECUTED_IN information for basic blocks. */ | |
2608 fill_always_executed_in (); | |
2609 | |
2582 /* For each statement determine the outermost loop in that it is | 2610 /* For each statement determine the outermost loop in that it is |
2583 invariant and cost for computing the invariant. */ | 2611 invariant and cost for computing the invariant. */ |
2584 determine_invariantness (); | 2612 invariantness_dom_walker (CDI_DOMINATORS) |
2613 .walk (cfun->cfg->x_entry_block_ptr); | |
2585 | 2614 |
2586 /* Execute store motion. Force the necessary invariants to be moved | 2615 /* Execute store motion. Force the necessary invariants to be moved |
2587 out of the loops as well. */ | 2616 out of the loops as well. */ |
2588 store_motion (); | 2617 store_motion (); |
2589 | 2618 |
2592 | 2621 |
2593 tree_ssa_lim_finalize (); | 2622 tree_ssa_lim_finalize (); |
2594 | 2623 |
2595 return todo; | 2624 return todo; |
2596 } | 2625 } |
2626 | |
2627 /* Loop invariant motion pass. */ | |
2628 | |
2629 namespace { | |
2630 | |
2631 const pass_data pass_data_lim = | |
2632 { | |
2633 GIMPLE_PASS, /* type */ | |
2634 "lim", /* name */ | |
2635 OPTGROUP_LOOP, /* optinfo_flags */ | |
2636 TV_LIM, /* tv_id */ | |
2637 PROP_cfg, /* properties_required */ | |
2638 0, /* properties_provided */ | |
2639 0, /* properties_destroyed */ | |
2640 0, /* todo_flags_start */ | |
2641 0, /* todo_flags_finish */ | |
2642 }; | |
2643 | |
2644 class pass_lim : public gimple_opt_pass | |
2645 { | |
2646 public: | |
2647 pass_lim (gcc::context *ctxt) | |
2648 : gimple_opt_pass (pass_data_lim, ctxt) | |
2649 {} | |
2650 | |
2651 /* opt_pass methods: */ | |
2652 opt_pass * clone () { return new pass_lim (m_ctxt); } | |
2653 virtual bool gate (function *) { return flag_tree_loop_im != 0; } | |
2654 virtual unsigned int execute (function *); | |
2655 | |
2656 }; // class pass_lim | |
2657 | |
2658 unsigned int | |
2659 pass_lim::execute (function *fun) | |
2660 { | |
2661 bool in_loop_pipeline = scev_initialized_p (); | |
2662 if (!in_loop_pipeline) | |
2663 loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS); | |
2664 | |
2665 if (number_of_loops (fun) <= 1) | |
2666 return 0; | |
2667 unsigned int todo = tree_ssa_lim (); | |
2668 | |
2669 if (!in_loop_pipeline) | |
2670 loop_optimizer_finalize (); | |
2671 return todo; | |
2672 } | |
2673 | |
2674 } // anon namespace | |
2675 | |
2676 gimple_opt_pass * | |
2677 make_pass_lim (gcc::context *ctxt) | |
2678 { | |
2679 return new pass_lim (ctxt); | |
2680 } | |
2681 | |
2682 |