comparison gcc/tree-object-size.c @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 77e2b8dfacca
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "toplev.h"
27 #include "diagnostic.h"
28 #include "tree-flow.h"
29 #include "tree-pass.h"
30 #include "tree-ssa-propagate.h"
31
32 struct object_size_info
33 {
34 int object_size_type;
35 bitmap visited, reexamine;
36 int pass;
37 bool changed;
38 unsigned int *depths;
39 unsigned int *stack, *tos;
40 };
41
42 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
43
44 static tree compute_object_offset (const_tree, const_tree);
45 static unsigned HOST_WIDE_INT addr_object_size (const_tree, int);
46 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
47 static tree pass_through_call (const_gimple);
48 static void collect_object_sizes_for (struct object_size_info *, tree);
49 static void expr_object_size (struct object_size_info *, tree, tree);
50 static bool merge_object_sizes (struct object_size_info *, tree, tree,
51 unsigned HOST_WIDE_INT);
52 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
53 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
54 static unsigned int compute_object_sizes (void);
55 static void init_offset_limit (void);
56 static void check_for_plus_in_loops (struct object_size_info *, tree);
57 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
58 unsigned int);
59
60 /* object_sizes[0] is upper bound for number of bytes till the end of
61 the object.
62 object_sizes[1] is upper bound for number of bytes till the end of
63 the subobject (innermost array or field with address taken).
64 object_sizes[2] is lower bound for number of bytes till the end of
65 the object and object_sizes[3] lower bound for subobject. */
66 static unsigned HOST_WIDE_INT *object_sizes[4];
67
68 /* Bitmaps what object sizes have been computed already. */
69 static bitmap computed[4];
70
71 /* Maximum value of offset we consider to be addition. */
72 static unsigned HOST_WIDE_INT offset_limit;
73
74
75 /* Initialize OFFSET_LIMIT variable. */
76 static void
77 init_offset_limit (void)
78 {
79 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
80 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
81 else
82 offset_limit = -1;
83 offset_limit /= 2;
84 }
85
86
87 /* Compute offset of EXPR within VAR. Return error_mark_node
88 if unknown. */
89
90 static tree
91 compute_object_offset (const_tree expr, const_tree var)
92 {
93 enum tree_code code = PLUS_EXPR;
94 tree base, off, t;
95
96 if (expr == var)
97 return size_zero_node;
98
99 switch (TREE_CODE (expr))
100 {
101 case COMPONENT_REF:
102 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
103 if (base == error_mark_node)
104 return base;
105
106 t = TREE_OPERAND (expr, 1);
107 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
108 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
109 / BITS_PER_UNIT));
110 break;
111
112 case REALPART_EXPR:
113 CASE_CONVERT:
114 case VIEW_CONVERT_EXPR:
115 case NON_LVALUE_EXPR:
116 return compute_object_offset (TREE_OPERAND (expr, 0), var);
117
118 case IMAGPART_EXPR:
119 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
120 if (base == error_mark_node)
121 return base;
122
123 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
124 break;
125
126 case ARRAY_REF:
127 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
128 if (base == error_mark_node)
129 return base;
130
131 t = TREE_OPERAND (expr, 1);
132 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
133 {
134 code = MINUS_EXPR;
135 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
136 }
137 t = fold_convert (sizetype, t);
138 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
139 break;
140
141 default:
142 return error_mark_node;
143 }
144
145 return size_binop (code, base, off);
146 }
147
148
149 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
150 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
151 If unknown, return unknown[object_size_type]. */
152
153 static unsigned HOST_WIDE_INT
154 addr_object_size (const_tree ptr, int object_size_type)
155 {
156 tree pt_var;
157
158 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
159
160 pt_var = TREE_OPERAND (ptr, 0);
161 if (REFERENCE_CLASS_P (pt_var))
162 pt_var = get_base_address (pt_var);
163
164 if (pt_var
165 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
166 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
167 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
168 && (unsigned HOST_WIDE_INT)
169 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit)
170 {
171 tree bytes;
172
173 if (pt_var != TREE_OPERAND (ptr, 0))
174 {
175 tree var;
176
177 if (object_size_type & 1)
178 {
179 var = TREE_OPERAND (ptr, 0);
180
181 while (var != pt_var
182 && TREE_CODE (var) != BIT_FIELD_REF
183 && TREE_CODE (var) != COMPONENT_REF
184 && TREE_CODE (var) != ARRAY_REF
185 && TREE_CODE (var) != ARRAY_RANGE_REF
186 && TREE_CODE (var) != REALPART_EXPR
187 && TREE_CODE (var) != IMAGPART_EXPR)
188 var = TREE_OPERAND (var, 0);
189 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
190 var = TREE_OPERAND (var, 0);
191 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
192 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
193 || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)),
194 TYPE_SIZE_UNIT (TREE_TYPE (var))))
195 var = pt_var;
196 }
197 else
198 var = pt_var;
199
200 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
201 if (bytes != error_mark_node)
202 {
203 if (TREE_CODE (bytes) == INTEGER_CST
204 && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes))
205 bytes = size_zero_node;
206 else
207 bytes = size_binop (MINUS_EXPR,
208 TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes);
209 }
210 }
211 else
212 bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
213
214 if (host_integerp (bytes, 1))
215 return tree_low_cst (bytes, 1);
216 }
217
218 return unknown[object_size_type];
219 }
220
221
222 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
223 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
224 argument from __builtin_object_size. If unknown, return
225 unknown[object_size_type]. */
226
227 static unsigned HOST_WIDE_INT
228 alloc_object_size (const_gimple call, int object_size_type)
229 {
230 tree callee, bytes = NULL_TREE;
231 tree alloc_size;
232 int arg1 = -1, arg2 = -1;
233
234 gcc_assert (is_gimple_call (call));
235
236 callee = gimple_call_fndecl (call);
237 if (!callee)
238 return unknown[object_size_type];
239
240 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
241 if (alloc_size && TREE_VALUE (alloc_size))
242 {
243 tree p = TREE_VALUE (alloc_size);
244
245 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
246 if (TREE_CHAIN (p))
247 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
248 }
249
250 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
251 switch (DECL_FUNCTION_CODE (callee))
252 {
253 case BUILT_IN_CALLOC:
254 arg2 = 1;
255 /* fall through */
256 case BUILT_IN_MALLOC:
257 case BUILT_IN_ALLOCA:
258 arg1 = 0;
259 default:
260 break;
261 }
262
263 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
264 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
265 || (arg2 >= 0
266 && (arg2 >= (int)gimple_call_num_args (call)
267 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
268 return unknown[object_size_type];
269
270 if (arg2 >= 0)
271 bytes = size_binop (MULT_EXPR,
272 fold_convert (sizetype, gimple_call_arg (call, arg1)),
273 fold_convert (sizetype, gimple_call_arg (call, arg2)));
274 else if (arg1 >= 0)
275 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
276
277 if (bytes && host_integerp (bytes, 1))
278 return tree_low_cst (bytes, 1);
279
280 return unknown[object_size_type];
281 }
282
283
284 /* If object size is propagated from one of function's arguments directly
285 to its return value, return that argument for GIMPLE_CALL statement CALL.
286 Otherwise return NULL. */
287
288 static tree
289 pass_through_call (const_gimple call)
290 {
291 tree callee = gimple_call_fndecl (call);
292
293 if (callee
294 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
295 switch (DECL_FUNCTION_CODE (callee))
296 {
297 case BUILT_IN_MEMCPY:
298 case BUILT_IN_MEMMOVE:
299 case BUILT_IN_MEMSET:
300 case BUILT_IN_STRCPY:
301 case BUILT_IN_STRNCPY:
302 case BUILT_IN_STRCAT:
303 case BUILT_IN_STRNCAT:
304 case BUILT_IN_MEMCPY_CHK:
305 case BUILT_IN_MEMMOVE_CHK:
306 case BUILT_IN_MEMSET_CHK:
307 case BUILT_IN_STRCPY_CHK:
308 case BUILT_IN_STRNCPY_CHK:
309 case BUILT_IN_STRCAT_CHK:
310 case BUILT_IN_STRNCAT_CHK:
311 if (gimple_call_num_args (call) >= 1)
312 return gimple_call_arg (call, 0);
313 break;
314 default:
315 break;
316 }
317
318 return NULL_TREE;
319 }
320
321
322 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
323 second argument from __builtin_object_size. */
324
325 unsigned HOST_WIDE_INT
326 compute_builtin_object_size (tree ptr, int object_size_type)
327 {
328 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
329
330 if (! offset_limit)
331 init_offset_limit ();
332
333 if (TREE_CODE (ptr) == ADDR_EXPR)
334 return addr_object_size (ptr, object_size_type);
335
336 if (TREE_CODE (ptr) == SSA_NAME
337 && POINTER_TYPE_P (TREE_TYPE (ptr))
338 && object_sizes[object_size_type] != NULL)
339 {
340 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
341 {
342 struct object_size_info osi;
343 bitmap_iterator bi;
344 unsigned int i;
345
346 if (dump_file)
347 {
348 fprintf (dump_file, "Computing %s %sobject size for ",
349 (object_size_type & 2) ? "minimum" : "maximum",
350 (object_size_type & 1) ? "sub" : "");
351 print_generic_expr (dump_file, ptr, dump_flags);
352 fprintf (dump_file, ":\n");
353 }
354
355 osi.visited = BITMAP_ALLOC (NULL);
356 osi.reexamine = BITMAP_ALLOC (NULL);
357 osi.object_size_type = object_size_type;
358 osi.depths = NULL;
359 osi.stack = NULL;
360 osi.tos = NULL;
361
362 /* First pass: walk UD chains, compute object sizes that
363 can be computed. osi.reexamine bitmap at the end will
364 contain what variables were found in dependency cycles
365 and therefore need to be reexamined. */
366 osi.pass = 0;
367 osi.changed = false;
368 collect_object_sizes_for (&osi, ptr);
369
370 /* Second pass: keep recomputing object sizes of variables
371 that need reexamination, until no object sizes are
372 increased or all object sizes are computed. */
373 if (! bitmap_empty_p (osi.reexamine))
374 {
375 bitmap reexamine = BITMAP_ALLOC (NULL);
376
377 /* If looking for minimum instead of maximum object size,
378 detect cases where a pointer is increased in a loop.
379 Although even without this detection pass 2 would eventually
380 terminate, it could take a long time. If a pointer is
381 increasing this way, we need to assume 0 object size.
382 E.g. p = &buf[0]; while (cond) p = p + 4; */
383 if (object_size_type & 2)
384 {
385 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
386 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
387 osi.tos = osi.stack;
388 osi.pass = 1;
389 /* collect_object_sizes_for is changing
390 osi.reexamine bitmap, so iterate over a copy. */
391 bitmap_copy (reexamine, osi.reexamine);
392 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
393 if (bitmap_bit_p (osi.reexamine, i))
394 check_for_plus_in_loops (&osi, ssa_name (i));
395
396 free (osi.depths);
397 osi.depths = NULL;
398 free (osi.stack);
399 osi.stack = NULL;
400 osi.tos = NULL;
401 }
402
403 do
404 {
405 osi.pass = 2;
406 osi.changed = false;
407 /* collect_object_sizes_for is changing
408 osi.reexamine bitmap, so iterate over a copy. */
409 bitmap_copy (reexamine, osi.reexamine);
410 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
411 if (bitmap_bit_p (osi.reexamine, i))
412 {
413 collect_object_sizes_for (&osi, ssa_name (i));
414 if (dump_file && (dump_flags & TDF_DETAILS))
415 {
416 fprintf (dump_file, "Reexamining ");
417 print_generic_expr (dump_file, ssa_name (i),
418 dump_flags);
419 fprintf (dump_file, "\n");
420 }
421 }
422 }
423 while (osi.changed);
424
425 BITMAP_FREE (reexamine);
426 }
427 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
428 bitmap_set_bit (computed[object_size_type], i);
429
430 /* Debugging dumps. */
431 if (dump_file)
432 {
433 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
434 if (object_sizes[object_size_type][i]
435 != unknown[object_size_type])
436 {
437 print_generic_expr (dump_file, ssa_name (i),
438 dump_flags);
439 fprintf (dump_file,
440 ": %s %sobject size "
441 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
442 (object_size_type & 2) ? "minimum" : "maximum",
443 (object_size_type & 1) ? "sub" : "",
444 object_sizes[object_size_type][i]);
445 }
446 }
447
448 BITMAP_FREE (osi.reexamine);
449 BITMAP_FREE (osi.visited);
450 }
451
452 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
453 }
454
455 return unknown[object_size_type];
456 }
457
458 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
459
460 static void
461 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
462 {
463 int object_size_type = osi->object_size_type;
464 unsigned int varno = SSA_NAME_VERSION (ptr);
465 unsigned HOST_WIDE_INT bytes;
466
467 gcc_assert (object_sizes[object_size_type][varno]
468 != unknown[object_size_type]);
469 gcc_assert (osi->pass == 0);
470
471 if (TREE_CODE (value) == WITH_SIZE_EXPR)
472 value = TREE_OPERAND (value, 0);
473
474 /* Pointer variables should have been handled by merge_object_sizes. */
475 gcc_assert (TREE_CODE (value) != SSA_NAME
476 || !POINTER_TYPE_P (TREE_TYPE (value)));
477
478 if (TREE_CODE (value) == ADDR_EXPR)
479 bytes = addr_object_size (value, object_size_type);
480 else
481 bytes = unknown[object_size_type];
482
483 if ((object_size_type & 2) == 0)
484 {
485 if (object_sizes[object_size_type][varno] < bytes)
486 object_sizes[object_size_type][varno] = bytes;
487 }
488 else
489 {
490 if (object_sizes[object_size_type][varno] > bytes)
491 object_sizes[object_size_type][varno] = bytes;
492 }
493 }
494
495
496 /* Compute object_sizes for PTR, defined to the result of a call. */
497
498 static void
499 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
500 {
501 int object_size_type = osi->object_size_type;
502 unsigned int varno = SSA_NAME_VERSION (ptr);
503 unsigned HOST_WIDE_INT bytes;
504
505 gcc_assert (is_gimple_call (call));
506
507 gcc_assert (object_sizes[object_size_type][varno]
508 != unknown[object_size_type]);
509 gcc_assert (osi->pass == 0);
510
511 bytes = alloc_object_size (call, object_size_type);
512
513 if ((object_size_type & 2) == 0)
514 {
515 if (object_sizes[object_size_type][varno] < bytes)
516 object_sizes[object_size_type][varno] = bytes;
517 }
518 else
519 {
520 if (object_sizes[object_size_type][varno] > bytes)
521 object_sizes[object_size_type][varno] = bytes;
522 }
523 }
524
525
526 /* Compute object_sizes for PTR, defined to an unknown value. */
527
528 static void
529 unknown_object_size (struct object_size_info *osi, tree ptr)
530 {
531 int object_size_type = osi->object_size_type;
532 unsigned int varno = SSA_NAME_VERSION (ptr);
533 unsigned HOST_WIDE_INT bytes;
534
535 gcc_assert (object_sizes[object_size_type][varno]
536 != unknown[object_size_type]);
537 gcc_assert (osi->pass == 0);
538
539 bytes = unknown[object_size_type];
540
541 if ((object_size_type & 2) == 0)
542 {
543 if (object_sizes[object_size_type][varno] < bytes)
544 object_sizes[object_size_type][varno] = bytes;
545 }
546 else
547 {
548 if (object_sizes[object_size_type][varno] > bytes)
549 object_sizes[object_size_type][varno] = bytes;
550 }
551 }
552
553
554 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
555 the object size might need reexamination later. */
556
557 static bool
558 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
559 unsigned HOST_WIDE_INT offset)
560 {
561 int object_size_type = osi->object_size_type;
562 unsigned int varno = SSA_NAME_VERSION (dest);
563 unsigned HOST_WIDE_INT orig_bytes;
564
565 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
566 return false;
567 if (offset >= offset_limit)
568 {
569 object_sizes[object_size_type][varno] = unknown[object_size_type];
570 return false;
571 }
572
573 if (osi->pass == 0)
574 collect_object_sizes_for (osi, orig);
575
576 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
577 if (orig_bytes != unknown[object_size_type])
578 orig_bytes = (offset > orig_bytes)
579 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
580
581 if ((object_size_type & 2) == 0)
582 {
583 if (object_sizes[object_size_type][varno] < orig_bytes)
584 {
585 object_sizes[object_size_type][varno] = orig_bytes;
586 osi->changed = true;
587 }
588 }
589 else
590 {
591 if (object_sizes[object_size_type][varno] > orig_bytes)
592 {
593 object_sizes[object_size_type][varno] = orig_bytes;
594 osi->changed = true;
595 }
596 }
597 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
598 }
599
600
601 /* Compute object_sizes for VAR, defined to the result of an assignment
602 with operator POINTER_PLUS_EXPR. Return true if the object size might
603 need reexamination later. */
604
605 static bool
606 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
607 {
608 int object_size_type = osi->object_size_type;
609 unsigned int varno = SSA_NAME_VERSION (var);
610 unsigned HOST_WIDE_INT bytes;
611 tree op0, op1;
612
613 gcc_assert (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR);
614
615 op0 = gimple_assign_rhs1 (stmt);
616 op1 = gimple_assign_rhs2 (stmt);
617
618 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
619 return false;
620
621 /* Handle PTR + OFFSET here. */
622 if (TREE_CODE (op1) == INTEGER_CST
623 && (TREE_CODE (op0) == SSA_NAME
624 || TREE_CODE (op0) == ADDR_EXPR))
625 {
626 if (! host_integerp (op1, 1))
627 bytes = unknown[object_size_type];
628 else if (TREE_CODE (op0) == SSA_NAME)
629 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
630 else
631 {
632 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
633
634 /* op0 will be ADDR_EXPR here. */
635 bytes = compute_builtin_object_size (op0, object_size_type);
636 if (bytes == unknown[object_size_type])
637 ;
638 else if (off > offset_limit)
639 bytes = unknown[object_size_type];
640 else if (off > bytes)
641 bytes = 0;
642 else
643 bytes -= off;
644 }
645 }
646 else
647 bytes = unknown[object_size_type];
648
649 if ((object_size_type & 2) == 0)
650 {
651 if (object_sizes[object_size_type][varno] < bytes)
652 object_sizes[object_size_type][varno] = bytes;
653 }
654 else
655 {
656 if (object_sizes[object_size_type][varno] > bytes)
657 object_sizes[object_size_type][varno] = bytes;
658 }
659 return false;
660 }
661
662
663 /* Compute object_sizes for VAR, defined to VALUE, which is
664 a COND_EXPR. Return true if the object size might need reexamination
665 later. */
666
667 static bool
668 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
669 {
670 tree then_, else_;
671 int object_size_type = osi->object_size_type;
672 unsigned int varno = SSA_NAME_VERSION (var);
673 bool reexamine = false;
674
675 gcc_assert (TREE_CODE (value) == COND_EXPR);
676
677 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
678 return false;
679
680 then_ = COND_EXPR_THEN (value);
681 else_ = COND_EXPR_ELSE (value);
682
683 if (TREE_CODE (then_) == SSA_NAME)
684 reexamine |= merge_object_sizes (osi, var, then_, 0);
685 else
686 expr_object_size (osi, var, then_);
687
688 if (TREE_CODE (else_) == SSA_NAME)
689 reexamine |= merge_object_sizes (osi, var, else_, 0);
690 else
691 expr_object_size (osi, var, else_);
692
693 return reexamine;
694 }
695
696 /* Compute object sizes for VAR.
697 For ADDR_EXPR an object size is the number of remaining bytes
698 to the end of the object (where what is considered an object depends on
699 OSI->object_size_type).
700 For allocation GIMPLE_CALL like malloc or calloc object size is the size
701 of the allocation.
702 For POINTER_PLUS_EXPR where second operand is a constant integer,
703 object size is object size of the first operand minus the constant.
704 If the constant is bigger than the number of remaining bytes until the
705 end of the object, object size is 0, but if it is instead a pointer
706 subtraction, object size is unknown[object_size_type].
707 To differentiate addition from subtraction, ADDR_EXPR returns
708 unknown[object_size_type] for all objects bigger than half of the address
709 space, and constants less than half of the address space are considered
710 addition, while bigger constants subtraction.
711 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
712 object size is object size of that argument.
713 Otherwise, object size is the maximum of object sizes of variables
714 that it might be set to. */
715
716 static void
717 collect_object_sizes_for (struct object_size_info *osi, tree var)
718 {
719 int object_size_type = osi->object_size_type;
720 unsigned int varno = SSA_NAME_VERSION (var);
721 gimple stmt;
722 bool reexamine;
723
724 if (bitmap_bit_p (computed[object_size_type], varno))
725 return;
726
727 if (osi->pass == 0)
728 {
729 if (! bitmap_bit_p (osi->visited, varno))
730 {
731 bitmap_set_bit (osi->visited, varno);
732 object_sizes[object_size_type][varno]
733 = (object_size_type & 2) ? -1 : 0;
734 }
735 else
736 {
737 /* Found a dependency loop. Mark the variable for later
738 re-examination. */
739 bitmap_set_bit (osi->reexamine, varno);
740 if (dump_file && (dump_flags & TDF_DETAILS))
741 {
742 fprintf (dump_file, "Found a dependency loop at ");
743 print_generic_expr (dump_file, var, dump_flags);
744 fprintf (dump_file, "\n");
745 }
746 return;
747 }
748 }
749
750 if (dump_file && (dump_flags & TDF_DETAILS))
751 {
752 fprintf (dump_file, "Visiting use-def links for ");
753 print_generic_expr (dump_file, var, dump_flags);
754 fprintf (dump_file, "\n");
755 }
756
757 stmt = SSA_NAME_DEF_STMT (var);
758 reexamine = false;
759
760 switch (gimple_code (stmt))
761 {
762 case GIMPLE_ASSIGN:
763 {
764 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
765 reexamine = plus_stmt_object_size (osi, var, stmt);
766 else if (gimple_assign_single_p (stmt)
767 || gimple_assign_unary_nop_p (stmt))
768 {
769 tree rhs = gimple_assign_rhs1 (stmt);
770
771 if (TREE_CODE (rhs) == SSA_NAME
772 && POINTER_TYPE_P (TREE_TYPE (rhs)))
773 reexamine = merge_object_sizes (osi, var, rhs, 0);
774 else if (TREE_CODE (rhs) == COND_EXPR)
775 reexamine = cond_expr_object_size (osi, var, rhs);
776 else
777 expr_object_size (osi, var, rhs);
778 }
779 else
780 unknown_object_size (osi, var);
781 break;
782 }
783
784 case GIMPLE_CALL:
785 {
786 tree arg = pass_through_call (stmt);
787 if (arg)
788 {
789 if (TREE_CODE (arg) == SSA_NAME
790 && POINTER_TYPE_P (TREE_TYPE (arg)))
791 reexamine = merge_object_sizes (osi, var, arg, 0);
792 else if (TREE_CODE (arg) == COND_EXPR)
793 reexamine = cond_expr_object_size (osi, var, arg);
794 else
795 expr_object_size (osi, var, arg);
796 }
797 else
798 call_object_size (osi, var, stmt);
799 break;
800 }
801
802 case GIMPLE_ASM:
803 /* Pointers defined by __asm__ statements can point anywhere. */
804 object_sizes[object_size_type][varno] = unknown[object_size_type];
805 break;
806
807 case GIMPLE_NOP:
808 {
809 tree decl = SSA_NAME_VAR (var);
810
811 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
812 expr_object_size (osi, var, DECL_INITIAL (decl));
813 else
814 expr_object_size (osi, var, decl);
815 }
816 break;
817
818 case GIMPLE_PHI:
819 {
820 unsigned i;
821
822 for (i = 0; i < gimple_phi_num_args (stmt); i++)
823 {
824 tree rhs = gimple_phi_arg (stmt, i)->def;
825
826 if (object_sizes[object_size_type][varno]
827 == unknown[object_size_type])
828 break;
829
830 if (TREE_CODE (rhs) == SSA_NAME)
831 reexamine |= merge_object_sizes (osi, var, rhs, 0);
832 else if (osi->pass == 0)
833 expr_object_size (osi, var, rhs);
834 }
835 break;
836 }
837
838 default:
839 gcc_unreachable ();
840 }
841
842 if (! reexamine
843 || object_sizes[object_size_type][varno] == unknown[object_size_type])
844 {
845 bitmap_set_bit (computed[object_size_type], varno);
846 bitmap_clear_bit (osi->reexamine, varno);
847 }
848 else
849 {
850 bitmap_set_bit (osi->reexamine, varno);
851 if (dump_file && (dump_flags & TDF_DETAILS))
852 {
853 fprintf (dump_file, "Need to reexamine ");
854 print_generic_expr (dump_file, var, dump_flags);
855 fprintf (dump_file, "\n");
856 }
857 }
858 }
859
860
861 /* Helper function for check_for_plus_in_loops. Called recursively
862 to detect loops. */
863
864 static void
865 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
866 unsigned int depth)
867 {
868 gimple stmt = SSA_NAME_DEF_STMT (var);
869 unsigned int varno = SSA_NAME_VERSION (var);
870
871 if (osi->depths[varno])
872 {
873 if (osi->depths[varno] != depth)
874 {
875 unsigned int *sp;
876
877 /* Found a loop involving pointer addition. */
878 for (sp = osi->tos; sp > osi->stack; )
879 {
880 --sp;
881 bitmap_clear_bit (osi->reexamine, *sp);
882 bitmap_set_bit (computed[osi->object_size_type], *sp);
883 object_sizes[osi->object_size_type][*sp] = 0;
884 if (*sp == varno)
885 break;
886 }
887 }
888 return;
889 }
890 else if (! bitmap_bit_p (osi->reexamine, varno))
891 return;
892
893 osi->depths[varno] = depth;
894 *osi->tos++ = varno;
895
896 switch (gimple_code (stmt))
897 {
898
899 case GIMPLE_ASSIGN:
900 {
901 if ((gimple_assign_single_p (stmt)
902 || gimple_assign_unary_nop_p (stmt))
903 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
904 {
905 tree rhs = gimple_assign_rhs1 (stmt);
906
907 check_for_plus_in_loops_1 (osi, rhs, depth);
908 }
909 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
910 {
911 tree basevar = gimple_assign_rhs1 (stmt);
912 tree cst = gimple_assign_rhs2 (stmt);
913
914 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
915
916 check_for_plus_in_loops_1 (osi, basevar,
917 depth + !integer_zerop (cst));
918 }
919 else
920 gcc_unreachable ();
921 break;
922 }
923
924 case GIMPLE_CALL:
925 {
926 tree arg = pass_through_call (stmt);
927 if (arg)
928 {
929 if (TREE_CODE (arg) == SSA_NAME)
930 check_for_plus_in_loops_1 (osi, arg, depth);
931 else
932 gcc_unreachable ();
933 }
934 break;
935 }
936
937 case GIMPLE_PHI:
938 {
939 unsigned i;
940
941 for (i = 0; i < gimple_phi_num_args (stmt); i++)
942 {
943 tree rhs = gimple_phi_arg (stmt, i)->def;
944
945 if (TREE_CODE (rhs) == SSA_NAME)
946 check_for_plus_in_loops_1 (osi, rhs, depth);
947 }
948 break;
949 }
950
951 default:
952 gcc_unreachable ();
953 }
954
955 osi->depths[varno] = 0;
956 osi->tos--;
957 }
958
959
960 /* Check if some pointer we are computing object size of is being increased
961 within a loop. If yes, assume all the SSA variables participating in
962 that loop have minimum object sizes 0. */
963
964 static void
965 check_for_plus_in_loops (struct object_size_info *osi, tree var)
966 {
967 gimple stmt = SSA_NAME_DEF_STMT (var);
968
969 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
970 and looked for a POINTER_PLUS_EXPR in the pass-through
971 argument, if any. In GIMPLE, however, such an expression
972 is not a valid call operand. */
973
974 if (is_gimple_assign (stmt)
975 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
976 {
977 tree basevar = gimple_assign_rhs1 (stmt);
978 tree cst = gimple_assign_rhs2 (stmt);
979
980 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
981
982 if (integer_zerop (cst))
983 return;
984
985 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
986 *osi->tos++ = SSA_NAME_VERSION (basevar);
987 check_for_plus_in_loops_1 (osi, var, 2);
988 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
989 osi->tos--;
990 }
991 }
992
993
994 /* Initialize data structures for the object size computation. */
995
996 void
997 init_object_sizes (void)
998 {
999 int object_size_type;
1000
1001 if (object_sizes[0])
1002 return;
1003
1004 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1005 {
1006 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1007 computed[object_size_type] = BITMAP_ALLOC (NULL);
1008 }
1009
1010 init_offset_limit ();
1011 }
1012
1013
1014 /* Destroy data structures after the object size computation. */
1015
1016 void
1017 fini_object_sizes (void)
1018 {
1019 int object_size_type;
1020
1021 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1022 {
1023 free (object_sizes[object_size_type]);
1024 BITMAP_FREE (computed[object_size_type]);
1025 object_sizes[object_size_type] = NULL;
1026 }
1027 }
1028
1029
1030 /* Simple pass to optimize all __builtin_object_size () builtins. */
1031
1032 static unsigned int
1033 compute_object_sizes (void)
1034 {
1035 basic_block bb;
1036 FOR_EACH_BB (bb)
1037 {
1038 gimple_stmt_iterator i;
1039 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1040 {
1041 tree callee, result;
1042 gimple call = gsi_stmt (i);
1043
1044 if (gimple_code (call) != GIMPLE_CALL)
1045 continue;
1046
1047 callee = gimple_call_fndecl (call);
1048 if (!callee
1049 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1050 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1051 continue;
1052
1053 init_object_sizes ();
1054 result = fold_call_stmt (call, false);
1055 if (!result)
1056 {
1057 if (gimple_call_num_args (call) == 2
1058 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1059 {
1060 tree ost = gimple_call_arg (call, 1);
1061
1062 if (host_integerp (ost, 1))
1063 {
1064 unsigned HOST_WIDE_INT object_size_type
1065 = tree_low_cst (ost, 1);
1066
1067 if (object_size_type < 2)
1068 result = fold_convert (size_type_node,
1069 integer_minus_one_node);
1070 else if (object_size_type < 4)
1071 result = size_zero_node;
1072 }
1073 }
1074
1075 if (!result)
1076 continue;
1077 }
1078
1079 if (dump_file && (dump_flags & TDF_DETAILS))
1080 {
1081 fprintf (dump_file, "Simplified\n ");
1082 print_gimple_stmt (dump_file, call, 0, dump_flags);
1083 }
1084
1085 if (!update_call_from_tree (&i, result))
1086 gcc_unreachable ();
1087
1088 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1089 now handled by gsi_replace, called from update_call_from_tree. */
1090
1091 if (dump_file && (dump_flags & TDF_DETAILS))
1092 {
1093 fprintf (dump_file, "to\n ");
1094 print_gimple_stmt (dump_file, call, 0, dump_flags);
1095 fprintf (dump_file, "\n");
1096 }
1097 }
1098 }
1099
1100 fini_object_sizes ();
1101 return 0;
1102 }
1103
1104 struct gimple_opt_pass pass_object_sizes =
1105 {
1106 {
1107 GIMPLE_PASS,
1108 "objsz", /* name */
1109 NULL, /* gate */
1110 compute_object_sizes, /* execute */
1111 NULL, /* sub */
1112 NULL, /* next */
1113 0, /* static_pass_number */
1114 0, /* tv_id */
1115 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1116 0, /* properties_provided */
1117 0, /* properties_destroyed */
1118 0, /* todo_flags_start */
1119 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1120 }
1121 };