111
|
1 /* Header file for SSA iterators.
|
145
|
2 Copyright (C) 2013-2020 Free Software Foundation, Inc.
|
111
|
3
|
|
4 This file is part of GCC.
|
|
5
|
|
6 GCC is free software; you can redistribute it and/or modify it under
|
|
7 the terms of the GNU General Public License as published by the Free
|
|
8 Software Foundation; either version 3, or (at your option) any later
|
|
9 version.
|
|
10
|
|
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
14 for more details.
|
|
15
|
|
16 You should have received a copy of the GNU General Public License
|
|
17 along with GCC; see the file COPYING3. If not see
|
|
18 <http://www.gnu.org/licenses/>. */
|
|
19
|
|
20 #ifndef GCC_SSA_ITERATORS_H
|
|
21 #define GCC_SSA_ITERATORS_H
|
|
22
|
|
23 /* Immediate use lists are used to directly access all uses for an SSA
|
|
24 name and get pointers to the statement for each use.
|
|
25
|
|
26 The structure ssa_use_operand_t consists of PREV and NEXT pointers
|
|
27 to maintain the list. A USE pointer, which points to address where
|
|
28 the use is located and a LOC pointer which can point to the
|
|
29 statement where the use is located, or, in the case of the root
|
|
30 node, it points to the SSA name itself.
|
|
31
|
|
32 The list is anchored by an occurrence of ssa_operand_d *in* the
|
|
33 ssa_name node itself (named 'imm_uses'). This node is uniquely
|
|
34 identified by having a NULL USE pointer. and the LOC pointer
|
|
35 pointing back to the ssa_name node itself. This node forms the
|
|
36 base for a circular list, and initially this is the only node in
|
|
37 the list.
|
|
38
|
|
39 Fast iteration allows each use to be examined, but does not allow
|
|
40 any modifications to the uses or stmts.
|
|
41
|
|
42 Normal iteration allows insertion, deletion, and modification. the
|
|
43 iterator manages this by inserting a marker node into the list
|
|
44 immediately before the node currently being examined in the list.
|
|
45 this marker node is uniquely identified by having null stmt *and* a
|
|
46 null use pointer.
|
|
47
|
|
48 When iterating to the next use, the iteration routines check to see
|
|
49 if the node after the marker has changed. if it has, then the node
|
|
50 following the marker is now the next one to be visited. if not, the
|
|
51 marker node is moved past that node in the list (visualize it as
|
|
52 bumping the marker node through the list). this continues until
|
|
53 the marker node is moved to the original anchor position. the
|
|
54 marker node is then removed from the list.
|
|
55
|
|
56 If iteration is halted early, the marker node must be removed from
|
|
57 the list before continuing. */
|
|
58 struct imm_use_iterator
|
|
59 {
|
|
60 /* This is the current use the iterator is processing. */
|
|
61 ssa_use_operand_t *imm_use;
|
|
62 /* This marks the last use in the list (use node from SSA_NAME) */
|
|
63 ssa_use_operand_t *end_p;
|
|
64 /* This node is inserted and used to mark the end of the uses for a stmt. */
|
|
65 ssa_use_operand_t iter_node;
|
|
66 /* This is the next ssa_name to visit. IMM_USE may get removed before
|
|
67 the next one is traversed to, so it must be cached early. */
|
|
68 ssa_use_operand_t *next_imm_name;
|
|
69 };
|
|
70
|
|
71
|
|
72 /* Use this iterator when simply looking at stmts. Adding, deleting or
|
|
73 modifying stmts will cause this iterator to malfunction. */
|
|
74
|
|
75 #define FOR_EACH_IMM_USE_FAST(DEST, ITER, SSAVAR) \
|
|
76 for ((DEST) = first_readonly_imm_use (&(ITER), (SSAVAR)); \
|
|
77 !end_readonly_imm_use_p (&(ITER)); \
|
|
78 (void) ((DEST) = next_readonly_imm_use (&(ITER))))
|
|
79
|
|
80 /* Use this iterator to visit each stmt which has a use of SSAVAR. */
|
|
81
|
|
82 #define FOR_EACH_IMM_USE_STMT(STMT, ITER, SSAVAR) \
|
|
83 for ((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR)); \
|
|
84 !end_imm_use_stmt_p (&(ITER)); \
|
|
85 (void) ((STMT) = next_imm_use_stmt (&(ITER))))
|
|
86
|
|
87 /* Use this to terminate the FOR_EACH_IMM_USE_STMT loop early. Failure to
|
|
88 do so will result in leaving a iterator marker node in the immediate
|
|
89 use list, and nothing good will come from that. */
|
|
90 #define BREAK_FROM_IMM_USE_STMT(ITER) \
|
|
91 { \
|
|
92 end_imm_use_stmt_traverse (&(ITER)); \
|
|
93 break; \
|
|
94 }
|
|
95
|
131
|
96 /* Similarly for return. */
|
|
97 #define RETURN_FROM_IMM_USE_STMT(ITER, VAL) \
|
|
98 { \
|
|
99 end_imm_use_stmt_traverse (&(ITER)); \
|
|
100 return (VAL); \
|
|
101 }
|
111
|
102
|
|
103 /* Use this iterator in combination with FOR_EACH_IMM_USE_STMT to
|
|
104 get access to each occurrence of ssavar on the stmt returned by
|
|
105 that iterator.. for instance:
|
|
106
|
|
107 FOR_EACH_IMM_USE_STMT (stmt, iter, ssavar)
|
|
108 {
|
|
109 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
|
|
110 {
|
|
111 SET_USE (use_p, blah);
|
|
112 }
|
|
113 update_stmt (stmt);
|
|
114 } */
|
|
115
|
|
116 #define FOR_EACH_IMM_USE_ON_STMT(DEST, ITER) \
|
|
117 for ((DEST) = first_imm_use_on_stmt (&(ITER)); \
|
|
118 !end_imm_use_on_stmt_p (&(ITER)); \
|
|
119 (void) ((DEST) = next_imm_use_on_stmt (&(ITER))))
|
|
120
|
|
121
|
|
122
|
|
123 extern bool single_imm_use_1 (const ssa_use_operand_t *head,
|
|
124 use_operand_p *use_p, gimple **stmt);
|
|
125
|
|
126
|
|
127 enum ssa_op_iter_type {
|
|
128 ssa_op_iter_none = 0,
|
|
129 ssa_op_iter_tree,
|
|
130 ssa_op_iter_use,
|
|
131 ssa_op_iter_def
|
|
132 };
|
|
133
|
|
134 /* This structure is used in the operand iterator loops. It contains the
|
|
135 items required to determine which operand is retrieved next. During
|
|
136 optimization, this structure is scalarized, and any unused fields are
|
|
137 optimized away, resulting in little overhead. */
|
|
138
|
|
139 struct ssa_op_iter
|
|
140 {
|
|
141 enum ssa_op_iter_type iter_type;
|
|
142 bool done;
|
|
143 int flags;
|
|
144 unsigned i;
|
|
145 unsigned numops;
|
|
146 use_optype_p uses;
|
|
147 gimple *stmt;
|
|
148 };
|
|
149
|
|
150 /* NOTE: Keep these in sync with doc/tree-ssa.texi. */
|
|
151 /* These flags are used to determine which operands are returned during
|
|
152 execution of the loop. */
|
|
153 #define SSA_OP_USE 0x01 /* Real USE operands. */
|
|
154 #define SSA_OP_DEF 0x02 /* Real DEF operands. */
|
|
155 #define SSA_OP_VUSE 0x04 /* VUSE operands. */
|
|
156 #define SSA_OP_VDEF 0x08 /* VDEF operands. */
|
|
157 /* These are commonly grouped operand flags. */
|
|
158 #define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE)
|
|
159 #define SSA_OP_VIRTUAL_DEFS (SSA_OP_VDEF)
|
|
160 #define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_DEFS)
|
|
161 #define SSA_OP_ALL_USES (SSA_OP_VIRTUAL_USES | SSA_OP_USE)
|
|
162 #define SSA_OP_ALL_DEFS (SSA_OP_VIRTUAL_DEFS | SSA_OP_DEF)
|
|
163 #define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS)
|
|
164
|
|
165 /* This macro executes a loop over the operands of STMT specified in FLAG,
|
|
166 returning each operand as a 'tree' in the variable TREEVAR. ITER is an
|
|
167 ssa_op_iter structure used to control the loop. */
|
|
168 #define FOR_EACH_SSA_TREE_OPERAND(TREEVAR, STMT, ITER, FLAGS) \
|
|
169 for (TREEVAR = op_iter_init_tree (&(ITER), STMT, FLAGS); \
|
|
170 !op_iter_done (&(ITER)); \
|
|
171 (void) (TREEVAR = op_iter_next_tree (&(ITER))))
|
|
172
|
|
173 /* This macro executes a loop over the operands of STMT specified in FLAG,
|
|
174 returning each operand as a 'use_operand_p' in the variable USEVAR.
|
|
175 ITER is an ssa_op_iter structure used to control the loop. */
|
|
176 #define FOR_EACH_SSA_USE_OPERAND(USEVAR, STMT, ITER, FLAGS) \
|
|
177 for (USEVAR = op_iter_init_use (&(ITER), STMT, FLAGS); \
|
|
178 !op_iter_done (&(ITER)); \
|
|
179 USEVAR = op_iter_next_use (&(ITER)))
|
|
180
|
|
181 /* This macro executes a loop over the operands of STMT specified in FLAG,
|
|
182 returning each operand as a 'def_operand_p' in the variable DEFVAR.
|
|
183 ITER is an ssa_op_iter structure used to control the loop. */
|
|
184 #define FOR_EACH_SSA_DEF_OPERAND(DEFVAR, STMT, ITER, FLAGS) \
|
|
185 for (DEFVAR = op_iter_init_def (&(ITER), STMT, FLAGS); \
|
|
186 !op_iter_done (&(ITER)); \
|
|
187 DEFVAR = op_iter_next_def (&(ITER)))
|
|
188
|
|
189 /* This macro will execute a loop over all the arguments of a PHI which
|
|
190 match FLAGS. A use_operand_p is always returned via USEVAR. FLAGS
|
|
191 can be either SSA_OP_USE or SSA_OP_VIRTUAL_USES or SSA_OP_ALL_USES. */
|
|
192 #define FOR_EACH_PHI_ARG(USEVAR, STMT, ITER, FLAGS) \
|
|
193 for ((USEVAR) = op_iter_init_phiuse (&(ITER), STMT, FLAGS); \
|
|
194 !op_iter_done (&(ITER)); \
|
|
195 (USEVAR) = op_iter_next_use (&(ITER)))
|
|
196
|
|
197
|
|
198 /* This macro will execute a loop over a stmt, regardless of whether it is
|
|
199 a real stmt or a PHI node, looking at the USE nodes matching FLAGS. */
|
|
200 #define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS) \
|
|
201 for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI \
|
|
202 ? op_iter_init_phiuse (&(ITER), \
|
|
203 as_a <gphi *> (STMT), \
|
|
204 FLAGS) \
|
|
205 : op_iter_init_use (&(ITER), STMT, FLAGS)); \
|
|
206 !op_iter_done (&(ITER)); \
|
|
207 (USEVAR) = op_iter_next_use (&(ITER)))
|
|
208
|
|
209 /* This macro will execute a loop over a stmt, regardless of whether it is
|
|
210 a real stmt or a PHI node, looking at the DEF nodes matching FLAGS. */
|
|
211 #define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS) \
|
|
212 for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI \
|
|
213 ? op_iter_init_phidef (&(ITER), \
|
|
214 as_a <gphi *> (STMT), \
|
|
215 FLAGS) \
|
|
216 : op_iter_init_def (&(ITER), STMT, FLAGS)); \
|
|
217 !op_iter_done (&(ITER)); \
|
|
218 (DEFVAR) = op_iter_next_def (&(ITER)))
|
|
219
|
|
220 /* This macro returns an operand in STMT as a tree if it is the ONLY
|
|
221 operand matching FLAGS. If there are 0 or more than 1 operand matching
|
|
222 FLAGS, then NULL_TREE is returned. */
|
|
223 #define SINGLE_SSA_TREE_OPERAND(STMT, FLAGS) \
|
|
224 single_ssa_tree_operand (STMT, FLAGS)
|
|
225
|
|
226 /* This macro returns an operand in STMT as a use_operand_p if it is the ONLY
|
|
227 operand matching FLAGS. If there are 0 or more than 1 operand matching
|
|
228 FLAGS, then NULL_USE_OPERAND_P is returned. */
|
|
229 #define SINGLE_SSA_USE_OPERAND(STMT, FLAGS) \
|
|
230 single_ssa_use_operand (STMT, FLAGS)
|
|
231
|
|
232 /* This macro returns an operand in STMT as a def_operand_p if it is the ONLY
|
|
233 operand matching FLAGS. If there are 0 or more than 1 operand matching
|
|
234 FLAGS, then NULL_DEF_OPERAND_P is returned. */
|
|
235 #define SINGLE_SSA_DEF_OPERAND(STMT, FLAGS) \
|
|
236 single_ssa_def_operand (STMT, FLAGS)
|
|
237
|
|
238 /* This macro returns TRUE if there are no operands matching FLAGS in STMT. */
|
|
239 #define ZERO_SSA_OPERANDS(STMT, FLAGS) zero_ssa_operands (STMT, FLAGS)
|
|
240
|
|
241 /* This macro counts the number of operands in STMT matching FLAGS. */
|
|
242 #define NUM_SSA_OPERANDS(STMT, FLAGS) num_ssa_operands (STMT, FLAGS)
|
|
243
|
|
244
|
|
245 /* Delink an immediate_uses node from its chain. */
|
|
246 static inline void
|
|
247 delink_imm_use (ssa_use_operand_t *linknode)
|
|
248 {
|
|
249 /* Return if this node is not in a list. */
|
|
250 if (linknode->prev == NULL)
|
|
251 return;
|
|
252
|
|
253 linknode->prev->next = linknode->next;
|
|
254 linknode->next->prev = linknode->prev;
|
|
255 linknode->prev = NULL;
|
|
256 linknode->next = NULL;
|
|
257 }
|
|
258
|
|
259 /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
|
|
260 static inline void
|
|
261 link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
|
|
262 {
|
|
263 /* Link the new node at the head of the list. If we are in the process of
|
|
264 traversing the list, we won't visit any new nodes added to it. */
|
|
265 linknode->prev = list;
|
|
266 linknode->next = list->next;
|
|
267 list->next->prev = linknode;
|
|
268 list->next = linknode;
|
|
269 }
|
|
270
|
|
271 /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
|
|
272 static inline void
|
|
273 link_imm_use (ssa_use_operand_t *linknode, tree def)
|
|
274 {
|
|
275 ssa_use_operand_t *root;
|
|
276
|
|
277 if (!def || TREE_CODE (def) != SSA_NAME)
|
|
278 linknode->prev = NULL;
|
|
279 else
|
|
280 {
|
|
281 root = &(SSA_NAME_IMM_USE_NODE (def));
|
|
282 if (linknode->use)
|
|
283 gcc_checking_assert (*(linknode->use) == def);
|
|
284 link_imm_use_to_list (linknode, root);
|
|
285 }
|
|
286 }
|
|
287
|
|
288 /* Set the value of a use pointed to by USE to VAL. */
|
|
289 static inline void
|
|
290 set_ssa_use_from_ptr (use_operand_p use, tree val)
|
|
291 {
|
|
292 delink_imm_use (use);
|
|
293 *(use->use) = val;
|
|
294 link_imm_use (use, val);
|
|
295 }
|
|
296
|
|
297 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
|
|
298 in STMT. */
|
|
299 static inline void
|
|
300 link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple *stmt)
|
|
301 {
|
|
302 if (stmt)
|
|
303 link_imm_use (linknode, def);
|
|
304 else
|
|
305 link_imm_use (linknode, NULL);
|
|
306 linknode->loc.stmt = stmt;
|
|
307 }
|
|
308
|
|
309 /* Relink a new node in place of an old node in the list. */
|
|
310 static inline void
|
|
311 relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
|
|
312 {
|
|
313 /* The node one had better be in the same list. */
|
|
314 gcc_checking_assert (*(old->use) == *(node->use));
|
|
315 node->prev = old->prev;
|
|
316 node->next = old->next;
|
|
317 if (old->prev)
|
|
318 {
|
|
319 old->prev->next = node;
|
|
320 old->next->prev = node;
|
|
321 /* Remove the old node from the list. */
|
|
322 old->prev = NULL;
|
|
323 }
|
|
324 }
|
|
325
|
|
326 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
|
|
327 in STMT. */
|
|
328 static inline void
|
|
329 relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
|
|
330 gimple *stmt)
|
|
331 {
|
|
332 if (stmt)
|
|
333 relink_imm_use (linknode, old);
|
|
334 else
|
|
335 link_imm_use (linknode, NULL);
|
|
336 linknode->loc.stmt = stmt;
|
|
337 }
|
|
338
|
|
339
|
|
340 /* Return true is IMM has reached the end of the immediate use list. */
|
|
341 static inline bool
|
|
342 end_readonly_imm_use_p (const imm_use_iterator *imm)
|
|
343 {
|
|
344 return (imm->imm_use == imm->end_p);
|
|
345 }
|
|
346
|
|
347 /* Initialize iterator IMM to process the list for VAR. */
|
|
348 static inline use_operand_p
|
|
349 first_readonly_imm_use (imm_use_iterator *imm, tree var)
|
|
350 {
|
|
351 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
|
|
352 imm->imm_use = imm->end_p->next;
|
|
353 imm->iter_node.next = imm->imm_use->next;
|
|
354 if (end_readonly_imm_use_p (imm))
|
|
355 return NULL_USE_OPERAND_P;
|
|
356 return imm->imm_use;
|
|
357 }
|
|
358
|
|
359 /* Bump IMM to the next use in the list. */
|
|
360 static inline use_operand_p
|
|
361 next_readonly_imm_use (imm_use_iterator *imm)
|
|
362 {
|
|
363 use_operand_p old = imm->imm_use;
|
|
364
|
|
365 /* If this assertion fails, it indicates the 'next' pointer has changed
|
|
366 since the last bump. This indicates that the list is being modified
|
|
367 via stmt changes, or SET_USE, or somesuch thing, and you need to be
|
|
368 using the SAFE version of the iterator. */
|
|
369 if (flag_checking)
|
|
370 {
|
|
371 gcc_assert (imm->iter_node.next == old->next);
|
|
372 imm->iter_node.next = old->next->next;
|
|
373 }
|
|
374
|
|
375 imm->imm_use = old->next;
|
|
376 if (end_readonly_imm_use_p (imm))
|
|
377 return NULL_USE_OPERAND_P;
|
|
378 return imm->imm_use;
|
|
379 }
|
|
380
|
|
381
|
|
382 /* Return true if VAR has no nondebug uses. */
|
|
383 static inline bool
|
|
384 has_zero_uses (const_tree var)
|
|
385 {
|
|
386 const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (var));
|
|
387 const ssa_use_operand_t *ptr;
|
|
388
|
|
389 for (ptr = head->next; ptr != head; ptr = ptr->next)
|
|
390 if (USE_STMT (ptr) && !is_gimple_debug (USE_STMT (ptr)))
|
|
391 return false;
|
|
392
|
|
393 return true;
|
|
394 }
|
|
395
|
|
396 /* Return true if VAR has a single nondebug use. */
|
|
397 static inline bool
|
|
398 has_single_use (const_tree var)
|
|
399 {
|
|
400 const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (var));
|
|
401 const ssa_use_operand_t *ptr;
|
|
402 bool single = false;
|
|
403
|
|
404 for (ptr = head->next; ptr != head; ptr = ptr->next)
|
|
405 if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
|
|
406 {
|
|
407 if (single)
|
|
408 return false;
|
|
409 else
|
|
410 single = true;
|
|
411 }
|
|
412
|
|
413 return single;
|
|
414 }
|
|
415
|
|
416 /* If VAR has only a single immediate nondebug use, return true, and
|
|
417 set USE_P and STMT to the use pointer and stmt of occurrence. */
|
|
418 static inline bool
|
|
419 single_imm_use (const_tree var, use_operand_p *use_p, gimple **stmt)
|
|
420 {
|
|
421 const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
|
|
422
|
|
423 /* If there aren't any uses whatsoever, we're done. */
|
|
424 if (ptr == ptr->next)
|
|
425 {
|
|
426 return_false:
|
|
427 *use_p = NULL_USE_OPERAND_P;
|
|
428 *stmt = NULL;
|
|
429 return false;
|
|
430 }
|
|
431
|
|
432 /* If there's a single use, check that it's not a debug stmt. */
|
|
433 if (ptr == ptr->next->next)
|
|
434 {
|
|
435 if (USE_STMT (ptr->next) && !is_gimple_debug (USE_STMT (ptr->next)))
|
|
436 {
|
|
437 *use_p = ptr->next;
|
|
438 *stmt = ptr->next->loc.stmt;
|
|
439 return true;
|
|
440 }
|
|
441 else
|
|
442 goto return_false;
|
|
443 }
|
|
444
|
|
445 return single_imm_use_1 (ptr, use_p, stmt);
|
|
446 }
|
|
447
|
|
448 /* Return the number of nondebug immediate uses of VAR. */
|
|
449 static inline unsigned int
|
|
450 num_imm_uses (const_tree var)
|
|
451 {
|
|
452 const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
|
|
453 const ssa_use_operand_t *ptr;
|
|
454 unsigned int num = 0;
|
|
455
|
131
|
456 if (!MAY_HAVE_DEBUG_BIND_STMTS)
|
111
|
457 {
|
|
458 for (ptr = start->next; ptr != start; ptr = ptr->next)
|
|
459 if (USE_STMT (ptr))
|
|
460 num++;
|
|
461 }
|
|
462 else
|
|
463 for (ptr = start->next; ptr != start; ptr = ptr->next)
|
|
464 if (USE_STMT (ptr) && !is_gimple_debug (USE_STMT (ptr)))
|
|
465 num++;
|
|
466
|
|
467 return num;
|
|
468 }
|
|
469
|
|
470 /* ----------------------------------------------------------------------- */
|
|
471
|
|
472 /* The following set of routines are used to iterator over various type of
|
|
473 SSA operands. */
|
|
474
|
|
475 /* Return true if PTR is finished iterating. */
|
|
476 static inline bool
|
|
477 op_iter_done (const ssa_op_iter *ptr)
|
|
478 {
|
|
479 return ptr->done;
|
|
480 }
|
|
481
|
|
482 /* Get the next iterator use value for PTR. */
|
|
483 static inline use_operand_p
|
|
484 op_iter_next_use (ssa_op_iter *ptr)
|
|
485 {
|
|
486 use_operand_p use_p;
|
|
487 gcc_checking_assert (ptr->iter_type == ssa_op_iter_use);
|
|
488 if (ptr->uses)
|
|
489 {
|
|
490 use_p = USE_OP_PTR (ptr->uses);
|
|
491 ptr->uses = ptr->uses->next;
|
|
492 return use_p;
|
|
493 }
|
|
494 if (ptr->i < ptr->numops)
|
|
495 {
|
|
496 return PHI_ARG_DEF_PTR (ptr->stmt, (ptr->i)++);
|
|
497 }
|
|
498 ptr->done = true;
|
|
499 return NULL_USE_OPERAND_P;
|
|
500 }
|
|
501
|
|
502 /* Get the next iterator def value for PTR. */
|
|
503 static inline def_operand_p
|
|
504 op_iter_next_def (ssa_op_iter *ptr)
|
|
505 {
|
|
506 gcc_checking_assert (ptr->iter_type == ssa_op_iter_def);
|
|
507 if (ptr->flags & SSA_OP_VDEF)
|
|
508 {
|
|
509 tree *p;
|
|
510 ptr->flags &= ~SSA_OP_VDEF;
|
|
511 p = gimple_vdef_ptr (ptr->stmt);
|
|
512 if (p && *p)
|
|
513 return p;
|
|
514 }
|
|
515 if (ptr->flags & SSA_OP_DEF)
|
|
516 {
|
|
517 while (ptr->i < ptr->numops)
|
|
518 {
|
|
519 tree *val = gimple_op_ptr (ptr->stmt, ptr->i);
|
|
520 ptr->i++;
|
|
521 if (*val)
|
|
522 {
|
|
523 if (TREE_CODE (*val) == TREE_LIST)
|
|
524 val = &TREE_VALUE (*val);
|
|
525 if (TREE_CODE (*val) == SSA_NAME
|
|
526 || is_gimple_reg (*val))
|
|
527 return val;
|
|
528 }
|
|
529 }
|
|
530 ptr->flags &= ~SSA_OP_DEF;
|
|
531 }
|
|
532
|
|
533 ptr->done = true;
|
|
534 return NULL_DEF_OPERAND_P;
|
|
535 }
|
|
536
|
|
537 /* Get the next iterator tree value for PTR. */
|
|
538 static inline tree
|
|
539 op_iter_next_tree (ssa_op_iter *ptr)
|
|
540 {
|
|
541 tree val;
|
|
542 gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree);
|
|
543 if (ptr->uses)
|
|
544 {
|
|
545 val = USE_OP (ptr->uses);
|
|
546 ptr->uses = ptr->uses->next;
|
|
547 return val;
|
|
548 }
|
|
549 if (ptr->flags & SSA_OP_VDEF)
|
|
550 {
|
|
551 ptr->flags &= ~SSA_OP_VDEF;
|
|
552 if ((val = gimple_vdef (ptr->stmt)))
|
|
553 return val;
|
|
554 }
|
|
555 if (ptr->flags & SSA_OP_DEF)
|
|
556 {
|
|
557 while (ptr->i < ptr->numops)
|
|
558 {
|
|
559 val = gimple_op (ptr->stmt, ptr->i);
|
|
560 ptr->i++;
|
|
561 if (val)
|
|
562 {
|
|
563 if (TREE_CODE (val) == TREE_LIST)
|
|
564 val = TREE_VALUE (val);
|
|
565 if (TREE_CODE (val) == SSA_NAME
|
|
566 || is_gimple_reg (val))
|
|
567 return val;
|
|
568 }
|
|
569 }
|
|
570 ptr->flags &= ~SSA_OP_DEF;
|
|
571 }
|
|
572
|
|
573 ptr->done = true;
|
|
574 return NULL_TREE;
|
|
575 }
|
|
576
|
|
577
|
|
578 /* This functions clears the iterator PTR, and marks it done. This is normally
|
|
579 used to prevent warnings in the compile about might be uninitialized
|
|
580 components. */
|
|
581
|
|
582 static inline void
|
|
583 clear_and_done_ssa_iter (ssa_op_iter *ptr)
|
|
584 {
|
|
585 ptr->i = 0;
|
|
586 ptr->numops = 0;
|
|
587 ptr->uses = NULL;
|
|
588 ptr->iter_type = ssa_op_iter_none;
|
|
589 ptr->stmt = NULL;
|
|
590 ptr->done = true;
|
|
591 ptr->flags = 0;
|
|
592 }
|
|
593
|
|
594 /* Initialize the iterator PTR to the virtual defs in STMT. */
|
|
595 static inline void
|
|
596 op_iter_init (ssa_op_iter *ptr, gimple *stmt, int flags)
|
|
597 {
|
|
598 /* PHI nodes require a different iterator initialization path. We
|
|
599 do not support iterating over virtual defs or uses without
|
|
600 iterating over defs or uses at the same time. */
|
|
601 gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI
|
|
602 && (!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
|
|
603 && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
|
|
604 ptr->numops = 0;
|
|
605 if (flags & (SSA_OP_DEF | SSA_OP_VDEF))
|
|
606 {
|
|
607 switch (gimple_code (stmt))
|
|
608 {
|
|
609 case GIMPLE_ASSIGN:
|
|
610 case GIMPLE_CALL:
|
|
611 ptr->numops = 1;
|
|
612 break;
|
|
613 case GIMPLE_ASM:
|
|
614 ptr->numops = gimple_asm_noutputs (as_a <gasm *> (stmt));
|
|
615 break;
|
|
616 case GIMPLE_TRANSACTION:
|
|
617 ptr->numops = 0;
|
|
618 flags &= ~SSA_OP_DEF;
|
|
619 break;
|
|
620 default:
|
|
621 ptr->numops = 0;
|
|
622 flags &= ~(SSA_OP_DEF | SSA_OP_VDEF);
|
|
623 break;
|
|
624 }
|
|
625 }
|
|
626 ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
|
|
627 if (!(flags & SSA_OP_VUSE)
|
|
628 && ptr->uses
|
|
629 && gimple_vuse (stmt) != NULL_TREE)
|
|
630 ptr->uses = ptr->uses->next;
|
|
631 ptr->done = false;
|
|
632 ptr->i = 0;
|
|
633
|
|
634 ptr->stmt = stmt;
|
|
635 ptr->flags = flags;
|
|
636 }
|
|
637
|
|
638 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
|
|
639 the first use. */
|
|
640 static inline use_operand_p
|
|
641 op_iter_init_use (ssa_op_iter *ptr, gimple *stmt, int flags)
|
|
642 {
|
|
643 gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
|
|
644 && (flags & SSA_OP_USE));
|
|
645 op_iter_init (ptr, stmt, flags);
|
|
646 ptr->iter_type = ssa_op_iter_use;
|
|
647 return op_iter_next_use (ptr);
|
|
648 }
|
|
649
|
|
650 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
|
|
651 the first def. */
|
|
652 static inline def_operand_p
|
|
653 op_iter_init_def (ssa_op_iter *ptr, gimple *stmt, int flags)
|
|
654 {
|
|
655 gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
|
|
656 && (flags & SSA_OP_DEF));
|
|
657 op_iter_init (ptr, stmt, flags);
|
|
658 ptr->iter_type = ssa_op_iter_def;
|
|
659 return op_iter_next_def (ptr);
|
|
660 }
|
|
661
|
|
662 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
|
|
663 the first operand as a tree. */
|
|
664 static inline tree
|
|
665 op_iter_init_tree (ssa_op_iter *ptr, gimple *stmt, int flags)
|
|
666 {
|
|
667 op_iter_init (ptr, stmt, flags);
|
|
668 ptr->iter_type = ssa_op_iter_tree;
|
|
669 return op_iter_next_tree (ptr);
|
|
670 }
|
|
671
|
|
672
|
|
673 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
|
|
674 return NULL. */
|
|
675 static inline tree
|
|
676 single_ssa_tree_operand (gimple *stmt, int flags)
|
|
677 {
|
|
678 tree var;
|
|
679 ssa_op_iter iter;
|
|
680
|
|
681 var = op_iter_init_tree (&iter, stmt, flags);
|
|
682 if (op_iter_done (&iter))
|
|
683 return NULL_TREE;
|
|
684 op_iter_next_tree (&iter);
|
|
685 if (op_iter_done (&iter))
|
|
686 return var;
|
|
687 return NULL_TREE;
|
|
688 }
|
|
689
|
|
690
|
|
691 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
|
|
692 return NULL. */
|
|
693 static inline use_operand_p
|
|
694 single_ssa_use_operand (gimple *stmt, int flags)
|
|
695 {
|
|
696 use_operand_p var;
|
|
697 ssa_op_iter iter;
|
|
698
|
|
699 var = op_iter_init_use (&iter, stmt, flags);
|
|
700 if (op_iter_done (&iter))
|
|
701 return NULL_USE_OPERAND_P;
|
|
702 op_iter_next_use (&iter);
|
|
703 if (op_iter_done (&iter))
|
|
704 return var;
|
|
705 return NULL_USE_OPERAND_P;
|
|
706 }
|
|
707
|
|
708 /* Return the single virtual use operand in STMT if present. Otherwise
|
|
709 return NULL. */
|
|
710 static inline use_operand_p
|
|
711 ssa_vuse_operand (gimple *stmt)
|
|
712 {
|
|
713 if (! gimple_vuse (stmt))
|
|
714 return NULL_USE_OPERAND_P;
|
|
715 return USE_OP_PTR (gimple_use_ops (stmt));
|
|
716 }
|
|
717
|
|
718
|
|
719 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
|
|
720 return NULL. */
|
|
721 static inline def_operand_p
|
|
722 single_ssa_def_operand (gimple *stmt, int flags)
|
|
723 {
|
|
724 def_operand_p var;
|
|
725 ssa_op_iter iter;
|
|
726
|
|
727 var = op_iter_init_def (&iter, stmt, flags);
|
|
728 if (op_iter_done (&iter))
|
|
729 return NULL_DEF_OPERAND_P;
|
|
730 op_iter_next_def (&iter);
|
|
731 if (op_iter_done (&iter))
|
|
732 return var;
|
|
733 return NULL_DEF_OPERAND_P;
|
|
734 }
|
|
735
|
|
736
|
|
737 /* Return true if there are zero operands in STMT matching the type
|
|
738 given in FLAGS. */
|
|
739 static inline bool
|
|
740 zero_ssa_operands (gimple *stmt, int flags)
|
|
741 {
|
|
742 ssa_op_iter iter;
|
|
743
|
|
744 op_iter_init_tree (&iter, stmt, flags);
|
|
745 return op_iter_done (&iter);
|
|
746 }
|
|
747
|
|
748
|
|
749 /* Return the number of operands matching FLAGS in STMT. */
|
|
750 static inline int
|
|
751 num_ssa_operands (gimple *stmt, int flags)
|
|
752 {
|
|
753 ssa_op_iter iter;
|
|
754 tree t;
|
|
755 int num = 0;
|
|
756
|
|
757 gcc_checking_assert (gimple_code (stmt) != GIMPLE_PHI);
|
|
758 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
|
|
759 num++;
|
|
760 return num;
|
|
761 }
|
|
762
|
|
763 /* If there is a single DEF in the PHI node which matches FLAG, return it.
|
|
764 Otherwise return NULL_DEF_OPERAND_P. */
|
|
765 static inline tree
|
|
766 single_phi_def (gphi *stmt, int flags)
|
|
767 {
|
|
768 tree def = PHI_RESULT (stmt);
|
|
769 if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
|
|
770 return def;
|
|
771 if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
|
|
772 return def;
|
|
773 return NULL_TREE;
|
|
774 }
|
|
775
|
|
776 /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
|
|
777 be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
|
|
778 static inline use_operand_p
|
|
779 op_iter_init_phiuse (ssa_op_iter *ptr, gphi *phi, int flags)
|
|
780 {
|
|
781 tree phi_def = gimple_phi_result (phi);
|
|
782 int comp;
|
|
783
|
|
784 clear_and_done_ssa_iter (ptr);
|
|
785 ptr->done = false;
|
|
786
|
|
787 gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
|
|
788
|
|
789 comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
|
|
790
|
|
791 /* If the PHI node doesn't the operand type we care about, we're done. */
|
|
792 if ((flags & comp) == 0)
|
|
793 {
|
|
794 ptr->done = true;
|
|
795 return NULL_USE_OPERAND_P;
|
|
796 }
|
|
797
|
|
798 ptr->stmt = phi;
|
|
799 ptr->numops = gimple_phi_num_args (phi);
|
|
800 ptr->iter_type = ssa_op_iter_use;
|
|
801 ptr->flags = flags;
|
|
802 return op_iter_next_use (ptr);
|
|
803 }
|
|
804
|
|
805
|
|
806 /* Start an iterator for a PHI definition. */
|
|
807
|
|
808 static inline def_operand_p
|
|
809 op_iter_init_phidef (ssa_op_iter *ptr, gphi *phi, int flags)
|
|
810 {
|
|
811 tree phi_def = PHI_RESULT (phi);
|
|
812 int comp;
|
|
813
|
|
814 clear_and_done_ssa_iter (ptr);
|
|
815 ptr->done = false;
|
|
816
|
|
817 gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
|
|
818
|
|
819 comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
|
|
820
|
|
821 /* If the PHI node doesn't have the operand type we care about,
|
|
822 we're done. */
|
|
823 if ((flags & comp) == 0)
|
|
824 {
|
|
825 ptr->done = true;
|
|
826 return NULL_DEF_OPERAND_P;
|
|
827 }
|
|
828
|
|
829 ptr->iter_type = ssa_op_iter_def;
|
|
830 /* The first call to op_iter_next_def will terminate the iterator since
|
|
831 all the fields are NULL. Simply return the result here as the first and
|
|
832 therefore only result. */
|
|
833 return PHI_RESULT_PTR (phi);
|
|
834 }
|
|
835
|
|
836 /* Return true is IMM has reached the end of the immediate use stmt list. */
|
|
837
|
|
838 static inline bool
|
|
839 end_imm_use_stmt_p (const imm_use_iterator *imm)
|
|
840 {
|
|
841 return (imm->imm_use == imm->end_p);
|
|
842 }
|
|
843
|
|
844 /* Finished the traverse of an immediate use stmt list IMM by removing the
|
|
845 placeholder node from the list. */
|
|
846
|
|
847 static inline void
|
|
848 end_imm_use_stmt_traverse (imm_use_iterator *imm)
|
|
849 {
|
|
850 delink_imm_use (&(imm->iter_node));
|
|
851 }
|
|
852
|
|
853 /* Immediate use traversal of uses within a stmt require that all the
|
|
854 uses on a stmt be sequentially listed. This routine is used to build up
|
|
855 this sequential list by adding USE_P to the end of the current list
|
|
856 currently delimited by HEAD and LAST_P. The new LAST_P value is
|
|
857 returned. */
|
|
858
|
|
859 static inline use_operand_p
|
|
860 move_use_after_head (use_operand_p use_p, use_operand_p head,
|
|
861 use_operand_p last_p)
|
|
862 {
|
|
863 gcc_checking_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
|
|
864 /* Skip head when we find it. */
|
|
865 if (use_p != head)
|
|
866 {
|
|
867 /* If use_p is already linked in after last_p, continue. */
|
|
868 if (last_p->next == use_p)
|
|
869 last_p = use_p;
|
|
870 else
|
|
871 {
|
|
872 /* Delink from current location, and link in at last_p. */
|
|
873 delink_imm_use (use_p);
|
|
874 link_imm_use_to_list (use_p, last_p);
|
|
875 last_p = use_p;
|
|
876 }
|
|
877 }
|
|
878 return last_p;
|
|
879 }
|
|
880
|
|
881
|
|
882 /* This routine will relink all uses with the same stmt as HEAD into the list
|
|
883 immediately following HEAD for iterator IMM. */
|
|
884
|
|
885 static inline void
|
|
886 link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
|
|
887 {
|
|
888 use_operand_p use_p;
|
|
889 use_operand_p last_p = head;
|
|
890 gimple *head_stmt = USE_STMT (head);
|
|
891 tree use = USE_FROM_PTR (head);
|
|
892 ssa_op_iter op_iter;
|
|
893 int flag;
|
|
894
|
|
895 /* Only look at virtual or real uses, depending on the type of HEAD. */
|
|
896 flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
|
|
897
|
|
898 if (gphi *phi = dyn_cast <gphi *> (head_stmt))
|
|
899 {
|
|
900 FOR_EACH_PHI_ARG (use_p, phi, op_iter, flag)
|
|
901 if (USE_FROM_PTR (use_p) == use)
|
|
902 last_p = move_use_after_head (use_p, head, last_p);
|
|
903 }
|
|
904 else
|
|
905 {
|
|
906 if (flag == SSA_OP_USE)
|
|
907 {
|
|
908 FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
|
|
909 if (USE_FROM_PTR (use_p) == use)
|
|
910 last_p = move_use_after_head (use_p, head, last_p);
|
|
911 }
|
|
912 else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
|
|
913 {
|
|
914 if (USE_FROM_PTR (use_p) == use)
|
|
915 last_p = move_use_after_head (use_p, head, last_p);
|
|
916 }
|
|
917 }
|
|
918 /* Link iter node in after last_p. */
|
|
919 if (imm->iter_node.prev != NULL)
|
|
920 delink_imm_use (&imm->iter_node);
|
|
921 link_imm_use_to_list (&(imm->iter_node), last_p);
|
|
922 }
|
|
923
|
|
924 /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
|
|
925 static inline gimple *
|
|
926 first_imm_use_stmt (imm_use_iterator *imm, tree var)
|
|
927 {
|
|
928 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
|
|
929 imm->imm_use = imm->end_p->next;
|
|
930 imm->next_imm_name = NULL_USE_OPERAND_P;
|
|
931
|
|
932 /* iter_node is used as a marker within the immediate use list to indicate
|
|
933 where the end of the current stmt's uses are. Initialize it to NULL
|
|
934 stmt and use, which indicates a marker node. */
|
|
935 imm->iter_node.prev = NULL_USE_OPERAND_P;
|
|
936 imm->iter_node.next = NULL_USE_OPERAND_P;
|
|
937 imm->iter_node.loc.stmt = NULL;
|
|
938 imm->iter_node.use = NULL;
|
|
939
|
|
940 if (end_imm_use_stmt_p (imm))
|
|
941 return NULL;
|
|
942
|
|
943 link_use_stmts_after (imm->imm_use, imm);
|
|
944
|
|
945 return USE_STMT (imm->imm_use);
|
|
946 }
|
|
947
|
|
948 /* Bump IMM to the next stmt which has a use of var. */
|
|
949
|
|
950 static inline gimple *
|
|
951 next_imm_use_stmt (imm_use_iterator *imm)
|
|
952 {
|
|
953 imm->imm_use = imm->iter_node.next;
|
|
954 if (end_imm_use_stmt_p (imm))
|
|
955 {
|
|
956 if (imm->iter_node.prev != NULL)
|
|
957 delink_imm_use (&imm->iter_node);
|
|
958 return NULL;
|
|
959 }
|
|
960
|
|
961 link_use_stmts_after (imm->imm_use, imm);
|
|
962 return USE_STMT (imm->imm_use);
|
|
963 }
|
|
964
|
|
965 /* This routine will return the first use on the stmt IMM currently refers
|
|
966 to. */
|
|
967
|
|
968 static inline use_operand_p
|
|
969 first_imm_use_on_stmt (imm_use_iterator *imm)
|
|
970 {
|
|
971 imm->next_imm_name = imm->imm_use->next;
|
|
972 return imm->imm_use;
|
|
973 }
|
|
974
|
|
975 /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
|
|
976
|
|
977 static inline bool
|
|
978 end_imm_use_on_stmt_p (const imm_use_iterator *imm)
|
|
979 {
|
|
980 return (imm->imm_use == &(imm->iter_node));
|
|
981 }
|
|
982
|
|
983 /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
|
|
984
|
|
985 static inline use_operand_p
|
|
986 next_imm_use_on_stmt (imm_use_iterator *imm)
|
|
987 {
|
|
988 imm->imm_use = imm->next_imm_name;
|
|
989 if (end_imm_use_on_stmt_p (imm))
|
|
990 return NULL_USE_OPERAND_P;
|
|
991 else
|
|
992 {
|
|
993 imm->next_imm_name = imm->imm_use->next;
|
|
994 return imm->imm_use;
|
|
995 }
|
|
996 }
|
|
997
|
|
998 /* Delink all immediate_use information for STMT. */
|
|
999 static inline void
|
|
1000 delink_stmt_imm_use (gimple *stmt)
|
|
1001 {
|
|
1002 ssa_op_iter iter;
|
|
1003 use_operand_p use_p;
|
|
1004
|
|
1005 if (ssa_operands_active (cfun))
|
|
1006 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_ALL_USES)
|
|
1007 delink_imm_use (use_p);
|
|
1008 }
|
|
1009
|
|
1010 #endif /* GCC_TREE_SSA_ITERATORS_H */
|