111
|
1 /* Pointer Bounds Checker insrumentation pass.
|
|
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
|
|
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify it under
|
|
8 the terms of the GNU General Public License as published by the Free
|
|
9 Software Foundation; either version 3, or (at your option) any later
|
|
10 version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
15 for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "backend.h"
|
|
25 #include "target.h"
|
|
26 #include "rtl.h"
|
|
27 #include "tree.h"
|
|
28 #include "gimple.h"
|
|
29 #include "cfghooks.h"
|
|
30 #include "tree-pass.h"
|
|
31 #include "ssa.h"
|
|
32 #include "cgraph.h"
|
|
33 #include "diagnostic.h"
|
|
34 #include "fold-const.h"
|
|
35 #include "stor-layout.h"
|
|
36 #include "varasm.h"
|
|
37 #include "tree-iterator.h"
|
|
38 #include "tree-cfg.h"
|
|
39 #include "langhooks.h"
|
|
40 #include "tree-ssa-address.h"
|
|
41 #include "tree-ssa-loop-niter.h"
|
|
42 #include "gimple-pretty-print.h"
|
|
43 #include "gimple-iterator.h"
|
|
44 #include "gimplify.h"
|
|
45 #include "gimplify-me.h"
|
|
46 #include "print-tree.h"
|
|
47 #include "calls.h"
|
|
48 #include "expr.h"
|
|
49 #include "tree-ssa-propagate.h"
|
|
50 #include "tree-chkp.h"
|
|
51 #include "gimple-walk.h"
|
|
52 #include "tree-dfa.h"
|
|
53 #include "ipa-chkp.h"
|
|
54 #include "params.h"
|
|
55 #include "stringpool.h"
|
|
56 #include "attribs.h"
|
|
57
|
|
58 /* Pointer Bounds Checker instruments code with memory checks to find
|
|
59 out-of-bounds memory accesses. Checks are performed by computing
|
|
60 bounds for each pointer and then comparing address of accessed
|
|
61 memory before pointer dereferencing.
|
|
62
|
|
63 1. Function clones.
|
|
64
|
|
65 See ipa-chkp.c.
|
|
66
|
|
67 2. Instrumentation.
|
|
68
|
|
69 There are few things to instrument:
|
|
70
|
|
71 a) Memory accesses - add checker calls to check address of accessed memory
|
|
72 against bounds of dereferenced pointer. Obviously safe memory
|
|
73 accesses like static variable access does not have to be instrumented
|
|
74 with checks.
|
|
75
|
|
76 Example:
|
|
77
|
|
78 val_2 = *p_1;
|
|
79
|
|
80 with 4 bytes access is transformed into:
|
|
81
|
|
82 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
|
|
83 D.1_4 = p_1 + 3;
|
|
84 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
|
|
85 val_2 = *p_1;
|
|
86
|
|
87 where __bound_tmp.1_3 are bounds computed for pointer p_1,
|
|
88 __builtin___chkp_bndcl is a lower bound check and
|
|
89 __builtin___chkp_bndcu is an upper bound check.
|
|
90
|
|
91 b) Pointer stores.
|
|
92
|
|
93 When pointer is stored in memory we need to store its bounds. To
|
|
94 achieve compatibility of instrumented code with regular codes
|
|
95 we have to keep data layout and store bounds in special bound tables
|
|
96 via special checker call. Implementation of bounds table may vary for
|
|
97 different platforms. It has to associate pointer value and its
|
|
98 location (it is required because we may have two equal pointers
|
|
99 with different bounds stored in different places) with bounds.
|
|
100 Another checker builtin allows to get bounds for specified pointer
|
|
101 loaded from specified location.
|
|
102
|
|
103 Example:
|
|
104
|
|
105 buf1[i_1] = &buf2;
|
|
106
|
|
107 is transformed into:
|
|
108
|
|
109 buf1[i_1] = &buf2;
|
|
110 D.1_2 = &buf1[i_1];
|
|
111 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
|
|
112
|
|
113 where __bound_tmp.1_2 are bounds of &buf2.
|
|
114
|
|
115 c) Static initialization.
|
|
116
|
|
117 The special case of pointer store is static pointer initialization.
|
|
118 Bounds initialization is performed in a few steps:
|
|
119 - register all static initializations in front-end using
|
|
120 chkp_register_var_initializer
|
|
121 - when file compilation finishes we create functions with special
|
|
122 attribute 'chkp ctor' and put explicit initialization code
|
|
123 (assignments) for all statically initialized pointers.
|
|
124 - when checker constructor is compiled checker pass adds required
|
|
125 bounds initialization for all statically initialized pointers
|
|
126 - since we do not actually need excess pointers initialization
|
|
127 in checker constructor we remove such assignments from them
|
|
128
|
|
129 d) Calls.
|
|
130
|
|
131 For each call in the code we add additional arguments to pass
|
|
132 bounds for pointer arguments. We determine type of call arguments
|
|
133 using arguments list from function declaration; if function
|
|
134 declaration is not available we use function type; otherwise
|
|
135 (e.g. for unnamed arguments) we use type of passed value. Function
|
|
136 declaration/type is replaced with the instrumented one.
|
|
137
|
|
138 Example:
|
|
139
|
|
140 val_1 = foo (&buf1, &buf2, &buf1, 0);
|
|
141
|
|
142 is translated into:
|
|
143
|
|
144 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
|
|
145 &buf1, __bound_tmp.1_2, 0);
|
|
146
|
|
147 e) Returns.
|
|
148
|
|
149 If function returns a pointer value we have to return bounds also.
|
|
150 A new operand was added for return statement to hold returned bounds.
|
|
151
|
|
152 Example:
|
|
153
|
|
154 return &_buf1;
|
|
155
|
|
156 is transformed into
|
|
157
|
|
158 return &_buf1, __bound_tmp.1_1;
|
|
159
|
|
160 3. Bounds computation.
|
|
161
|
|
162 Compiler is fully responsible for computing bounds to be used for each
|
|
163 memory access. The first step for bounds computation is to find the
|
|
164 origin of pointer dereferenced for memory access. Basing on pointer
|
|
165 origin we define a way to compute its bounds. There are just few
|
|
166 possible cases:
|
|
167
|
|
168 a) Pointer is returned by call.
|
|
169
|
|
170 In this case we use corresponding checker builtin method to obtain returned
|
|
171 bounds.
|
|
172
|
|
173 Example:
|
|
174
|
|
175 buf_1 = malloc (size_2);
|
|
176 foo (buf_1);
|
|
177
|
|
178 is translated into:
|
|
179
|
|
180 buf_1 = malloc (size_2);
|
|
181 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
|
|
182 foo (buf_1, __bound_tmp.1_3);
|
|
183
|
|
184 b) Pointer is an address of an object.
|
|
185
|
|
186 In this case compiler tries to compute objects size and create corresponding
|
|
187 bounds. If object has incomplete type then special checker builtin is used to
|
|
188 obtain its size at runtime.
|
|
189
|
|
190 Example:
|
|
191
|
|
192 foo ()
|
|
193 {
|
|
194 <unnamed type> __bound_tmp.3;
|
|
195 static int buf[100];
|
|
196
|
|
197 <bb 3>:
|
|
198 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
|
|
199
|
|
200 <bb 2>:
|
|
201 return &buf, __bound_tmp.3_2;
|
|
202 }
|
|
203
|
|
204 Example:
|
|
205
|
|
206 Address of an object 'extern int buf[]' with incomplete type is
|
|
207 returned.
|
|
208
|
|
209 foo ()
|
|
210 {
|
|
211 <unnamed type> __bound_tmp.4;
|
|
212 long unsigned int __size_tmp.3;
|
|
213
|
|
214 <bb 3>:
|
|
215 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
|
|
216 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
|
|
217
|
|
218 <bb 2>:
|
|
219 return &buf, __bound_tmp.4_3;
|
|
220 }
|
|
221
|
|
222 c) Pointer is the result of object narrowing.
|
|
223
|
|
224 It happens when we use pointer to an object to compute pointer to a part
|
|
225 of an object. E.g. we take pointer to a field of a structure. In this
|
|
226 case we perform bounds intersection using bounds of original object and
|
|
227 bounds of object's part (which are computed basing on its type).
|
|
228
|
|
229 There may be some debatable questions about when narrowing should occur
|
|
230 and when it should not. To avoid false bound violations in correct
|
|
231 programs we do not perform narrowing when address of an array element is
|
|
232 obtained (it has address of the whole array) and when address of the first
|
|
233 structure field is obtained (because it is guaranteed to be equal to
|
|
234 address of the whole structure and it is legal to cast it back to structure).
|
|
235
|
|
236 Default narrowing behavior may be changed using compiler flags.
|
|
237
|
|
238 Example:
|
|
239
|
|
240 In this example address of the second structure field is returned.
|
|
241
|
|
242 foo (struct A * p, __bounds_type __bounds_of_p)
|
|
243 {
|
|
244 <unnamed type> __bound_tmp.3;
|
|
245 int * _2;
|
|
246 int * _5;
|
|
247
|
|
248 <bb 2>:
|
|
249 _5 = &p_1(D)->second_field;
|
|
250 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
|
|
251 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
|
|
252 __bounds_of_p_3(D));
|
|
253 _2 = &p_1(D)->second_field;
|
|
254 return _2, __bound_tmp.3_8;
|
|
255 }
|
|
256
|
|
257 Example:
|
|
258
|
|
259 In this example address of the first field of array element is returned.
|
|
260
|
|
261 foo (struct A * p, __bounds_type __bounds_of_p, int i)
|
|
262 {
|
|
263 long unsigned int _3;
|
|
264 long unsigned int _4;
|
|
265 struct A * _6;
|
|
266 int * _7;
|
|
267
|
|
268 <bb 2>:
|
|
269 _3 = (long unsigned int) i_1(D);
|
|
270 _4 = _3 * 8;
|
|
271 _6 = p_5(D) + _4;
|
|
272 _7 = &_6->first_field;
|
|
273 return _7, __bounds_of_p_2(D);
|
|
274 }
|
|
275
|
|
276
|
|
277 d) Pointer is the result of pointer arithmetic or type cast.
|
|
278
|
|
279 In this case bounds of the base pointer are used. In case of binary
|
|
280 operation producing a pointer we are analyzing data flow further
|
|
281 looking for operand's bounds. One operand is considered as a base
|
|
282 if it has some valid bounds. If we fall into a case when none of
|
|
283 operands (or both of them) has valid bounds, a default bounds value
|
|
284 is used.
|
|
285
|
|
286 Trying to find out bounds for binary operations we may fall into
|
|
287 cyclic dependencies for pointers. To avoid infinite recursion all
|
|
288 walked phi nodes instantly obtain corresponding bounds but created
|
|
289 bounds are marked as incomplete. It helps us to stop DF walk during
|
|
290 bounds search.
|
|
291
|
|
292 When we reach pointer source, some args of incomplete bounds phi obtain
|
|
293 valid bounds and those values are propagated further through phi nodes.
|
|
294 If no valid bounds were found for phi node then we mark its result as
|
|
295 invalid bounds. Process stops when all incomplete bounds become either
|
|
296 valid or invalid and we are able to choose a pointer base.
|
|
297
|
|
298 e) Pointer is loaded from the memory.
|
|
299
|
|
300 In this case we just need to load bounds from the bounds table.
|
|
301
|
|
302 Example:
|
|
303
|
|
304 foo ()
|
|
305 {
|
|
306 <unnamed type> __bound_tmp.3;
|
|
307 static int * buf;
|
|
308 int * _2;
|
|
309
|
|
310 <bb 2>:
|
|
311 _2 = buf;
|
|
312 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
|
|
313 return _2, __bound_tmp.3_4;
|
|
314 }
|
|
315
|
|
316 */
|
|
317
|
|
318 typedef void (*assign_handler)(tree, tree, void *);
|
|
319
|
|
320 static tree chkp_get_zero_bounds ();
|
|
321 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
|
|
322 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
|
|
323 gimple_stmt_iterator *iter);
|
|
324 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
|
|
325 tree *elt, bool *safe,
|
|
326 bool *bitfield,
|
|
327 tree *bounds,
|
|
328 gimple_stmt_iterator *iter,
|
|
329 bool innermost_bounds);
|
|
330 static void chkp_parse_bit_field_ref (tree node, location_t loc,
|
|
331 tree *offset, tree *size);
|
|
332 static tree
|
|
333 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter);
|
|
334
|
|
335 #define chkp_bndldx_fndecl \
|
|
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
|
|
337 #define chkp_bndstx_fndecl \
|
|
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
|
|
339 #define chkp_checkl_fndecl \
|
|
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
|
|
341 #define chkp_checku_fndecl \
|
|
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
|
|
343 #define chkp_bndmk_fndecl \
|
|
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
|
|
345 #define chkp_ret_bnd_fndecl \
|
|
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
|
|
347 #define chkp_intersect_fndecl \
|
|
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
|
|
349 #define chkp_narrow_bounds_fndecl \
|
|
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
|
|
351 #define chkp_sizeof_fndecl \
|
|
352 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
|
|
353 #define chkp_extract_lower_fndecl \
|
|
354 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
|
|
355 #define chkp_extract_upper_fndecl \
|
|
356 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
|
|
357
|
|
358 static GTY (()) tree chkp_uintptr_type;
|
|
359
|
|
360 static GTY (()) tree chkp_zero_bounds_var;
|
|
361 static GTY (()) tree chkp_none_bounds_var;
|
|
362
|
|
363 static GTY (()) basic_block entry_block;
|
|
364 static GTY (()) tree zero_bounds;
|
|
365 static GTY (()) tree none_bounds;
|
|
366 static GTY (()) tree incomplete_bounds;
|
|
367 static GTY (()) tree tmp_var;
|
|
368 static GTY (()) tree size_tmp_var;
|
|
369 static GTY (()) bitmap chkp_abnormal_copies;
|
|
370
|
|
371 struct hash_set<tree> *chkp_invalid_bounds;
|
|
372 struct hash_set<tree> *chkp_completed_bounds_set;
|
|
373 struct hash_map<tree, tree> *chkp_reg_bounds;
|
|
374 struct hash_map<tree, tree> *chkp_bound_vars;
|
|
375 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
|
|
376 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
|
|
377 struct hash_map<tree, tree> *chkp_bounds_map;
|
|
378 struct hash_map<tree, tree> *chkp_static_var_bounds;
|
|
379
|
|
380 static bool in_chkp_pass;
|
|
381
|
|
382 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
|
|
383 #define CHKP_SIZE_TMP_NAME "__size_tmp"
|
|
384 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
|
|
385 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
|
|
386 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
|
|
387 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
|
|
388 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
|
|
389
|
|
390 /* Static checker constructors may become very large and their
|
|
391 compilation with optimization may take too much time.
|
|
392 Therefore we put a limit to number of statements in one
|
|
393 constructor. Tests with 100 000 statically initialized
|
|
394 pointers showed following compilation times on Sandy Bridge
|
|
395 server (used -O2):
|
|
396 limit 100 => ~18 sec.
|
|
397 limit 300 => ~22 sec.
|
|
398 limit 1000 => ~30 sec.
|
|
399 limit 3000 => ~49 sec.
|
|
400 limit 5000 => ~55 sec.
|
|
401 limit 10000 => ~76 sec.
|
|
402 limit 100000 => ~532 sec. */
|
|
403 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
|
|
404
|
|
405 struct chkp_ctor_stmt_list
|
|
406 {
|
|
407 tree stmts;
|
|
408 int avail;
|
|
409 };
|
|
410
|
|
411 /* Return 1 if function FNDECL is instrumented by Pointer
|
|
412 Bounds Checker. */
|
|
413 bool
|
|
414 chkp_function_instrumented_p (tree fndecl)
|
|
415 {
|
|
416 return fndecl
|
|
417 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
|
|
418 }
|
|
419
|
|
420 /* Mark function FNDECL as instrumented. */
|
|
421 void
|
|
422 chkp_function_mark_instrumented (tree fndecl)
|
|
423 {
|
|
424 if (chkp_function_instrumented_p (fndecl))
|
|
425 return;
|
|
426
|
|
427 DECL_ATTRIBUTES (fndecl)
|
|
428 = tree_cons (get_identifier ("chkp instrumented"), NULL,
|
|
429 DECL_ATTRIBUTES (fndecl));
|
|
430 }
|
|
431
|
|
432 /* Return true when STMT is builtin call to instrumentation function
|
|
433 corresponding to CODE. */
|
|
434
|
|
435 bool
|
|
436 chkp_gimple_call_builtin_p (gimple *call,
|
|
437 enum built_in_function code)
|
|
438 {
|
|
439 tree fndecl;
|
|
440 /* We are skipping the check for address-spaces, that's
|
|
441 why we don't use gimple_call_builtin_p directly here. */
|
|
442 if (is_gimple_call (call)
|
|
443 && (fndecl = gimple_call_fndecl (call)) != NULL
|
|
444 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD
|
|
445 && (fndecl = targetm.builtin_chkp_function (code))
|
|
446 && (DECL_FUNCTION_CODE (gimple_call_fndecl (call))
|
|
447 == DECL_FUNCTION_CODE (fndecl)))
|
|
448 return true;
|
|
449 return false;
|
|
450 }
|
|
451
|
|
452 /* Emit code to build zero bounds and return RTL holding
|
|
453 the result. */
|
|
454 rtx
|
|
455 chkp_expand_zero_bounds ()
|
|
456 {
|
|
457 tree zero_bnd;
|
|
458
|
|
459 if (flag_chkp_use_static_const_bounds)
|
|
460 zero_bnd = chkp_get_zero_bounds_var ();
|
|
461 else
|
|
462 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
|
|
463 integer_zero_node);
|
|
464 return expand_normal (zero_bnd);
|
|
465 }
|
|
466
|
|
467 /* Emit code to store zero bounds for PTR located at MEM. */
|
|
468 void
|
|
469 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
|
|
470 {
|
|
471 tree zero_bnd, bnd, addr, bndstx;
|
|
472
|
|
473 if (flag_chkp_use_static_const_bounds)
|
|
474 zero_bnd = chkp_get_zero_bounds_var ();
|
|
475 else
|
|
476 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
|
|
477 integer_zero_node);
|
|
478 bnd = make_tree (pointer_bounds_type_node,
|
|
479 assign_temp (pointer_bounds_type_node, 0, 1));
|
|
480 addr = build1 (ADDR_EXPR,
|
|
481 build_pointer_type (TREE_TYPE (mem)), mem);
|
|
482 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
|
|
483
|
|
484 expand_assignment (bnd, zero_bnd, false);
|
|
485 expand_normal (bndstx);
|
|
486 }
|
|
487
|
|
488 /* Build retbnd call for returned value RETVAL.
|
|
489
|
|
490 If BNDVAL is not NULL then result is stored
|
|
491 in it. Otherwise a temporary is created to
|
|
492 hold returned value.
|
|
493
|
|
494 GSI points to a position for a retbnd call
|
|
495 and is set to created stmt.
|
|
496
|
|
497 Cgraph edge is created for a new call if
|
|
498 UPDATE_EDGE is 1.
|
|
499
|
|
500 Obtained bounds are returned. */
|
|
501 tree
|
|
502 chkp_insert_retbnd_call (tree bndval, tree retval,
|
|
503 gimple_stmt_iterator *gsi)
|
|
504 {
|
|
505 gimple *call;
|
|
506
|
|
507 if (!bndval)
|
|
508 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
|
|
509
|
|
510 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
|
|
511 gimple_call_set_lhs (call, bndval);
|
|
512 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
|
|
513
|
|
514 return bndval;
|
|
515 }
|
|
516
|
|
517 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
|
|
518 arguments. */
|
|
519
|
|
520 gcall *
|
|
521 chkp_copy_call_skip_bounds (gcall *call)
|
|
522 {
|
|
523 bitmap bounds;
|
|
524 unsigned i;
|
|
525
|
|
526 bitmap_obstack_initialize (NULL);
|
|
527 bounds = BITMAP_ALLOC (NULL);
|
|
528
|
|
529 for (i = 0; i < gimple_call_num_args (call); i++)
|
|
530 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
|
|
531 bitmap_set_bit (bounds, i);
|
|
532
|
|
533 if (!bitmap_empty_p (bounds))
|
|
534 call = gimple_call_copy_skip_args (call, bounds);
|
|
535 gimple_call_set_with_bounds (call, false);
|
|
536
|
|
537 BITMAP_FREE (bounds);
|
|
538 bitmap_obstack_release (NULL);
|
|
539
|
|
540 return call;
|
|
541 }
|
|
542
|
|
543 /* Redirect edge E to the correct node according to call_stmt.
|
|
544 Return 1 if bounds removal from call_stmt should be done
|
|
545 instead of redirection. */
|
|
546
|
|
547 bool
|
|
548 chkp_redirect_edge (cgraph_edge *e)
|
|
549 {
|
|
550 bool instrumented = false;
|
|
551 tree decl = e->callee->decl;
|
|
552
|
|
553 if (e->callee->instrumentation_clone
|
|
554 || chkp_function_instrumented_p (decl))
|
|
555 instrumented = true;
|
|
556
|
|
557 if (instrumented
|
|
558 && !gimple_call_with_bounds_p (e->call_stmt))
|
|
559 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
|
|
560 else if (!instrumented
|
|
561 && gimple_call_with_bounds_p (e->call_stmt)
|
|
562 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
|
|
563 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
|
|
564 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
|
|
565 {
|
|
566 if (e->callee->instrumented_version)
|
|
567 e->redirect_callee (e->callee->instrumented_version);
|
|
568 else
|
|
569 {
|
|
570 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
|
|
571 /* Avoid bounds removal if all args will be removed. */
|
|
572 if (!args || TREE_VALUE (args) != void_type_node)
|
|
573 return true;
|
|
574 else
|
|
575 gimple_call_set_with_bounds (e->call_stmt, false);
|
|
576 }
|
|
577 }
|
|
578
|
|
579 return false;
|
|
580 }
|
|
581
|
|
582 /* Mark statement S to not be instrumented. */
|
|
583 static void
|
|
584 chkp_mark_stmt (gimple *s)
|
|
585 {
|
|
586 gimple_set_plf (s, GF_PLF_1, true);
|
|
587 }
|
|
588
|
|
589 /* Mark statement S to be instrumented. */
|
|
590 static void
|
|
591 chkp_unmark_stmt (gimple *s)
|
|
592 {
|
|
593 gimple_set_plf (s, GF_PLF_1, false);
|
|
594 }
|
|
595
|
|
596 /* Return 1 if statement S should not be instrumented. */
|
|
597 static bool
|
|
598 chkp_marked_stmt_p (gimple *s)
|
|
599 {
|
|
600 return gimple_plf (s, GF_PLF_1);
|
|
601 }
|
|
602
|
|
603 /* Get var to be used for bound temps. */
|
|
604 static tree
|
|
605 chkp_get_tmp_var (void)
|
|
606 {
|
|
607 if (!tmp_var)
|
|
608 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
|
|
609
|
|
610 return tmp_var;
|
|
611 }
|
|
612
|
|
613 /* Get SSA_NAME to be used as temp. */
|
|
614 static tree
|
|
615 chkp_get_tmp_reg (gimple *stmt)
|
|
616 {
|
|
617 if (in_chkp_pass)
|
|
618 return make_ssa_name (chkp_get_tmp_var (), stmt);
|
|
619
|
|
620 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
|
|
621 CHKP_BOUND_TMP_NAME);
|
|
622 }
|
|
623
|
|
624 /* Get var to be used for size temps. */
|
|
625 static tree
|
|
626 chkp_get_size_tmp_var (void)
|
|
627 {
|
|
628 if (!size_tmp_var)
|
|
629 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
|
|
630
|
|
631 return size_tmp_var;
|
|
632 }
|
|
633
|
|
634 /* Register bounds BND for address of OBJ. */
|
|
635 static void
|
|
636 chkp_register_addr_bounds (tree obj, tree bnd)
|
|
637 {
|
|
638 if (bnd == incomplete_bounds)
|
|
639 return;
|
|
640
|
|
641 chkp_reg_addr_bounds->put (obj, bnd);
|
|
642
|
|
643 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
644 {
|
|
645 fprintf (dump_file, "Regsitered bound ");
|
|
646 print_generic_expr (dump_file, bnd);
|
|
647 fprintf (dump_file, " for address of ");
|
|
648 print_generic_expr (dump_file, obj);
|
|
649 fprintf (dump_file, "\n");
|
|
650 }
|
|
651 }
|
|
652
|
|
653 /* Return bounds registered for address of OBJ. */
|
|
654 static tree
|
|
655 chkp_get_registered_addr_bounds (tree obj)
|
|
656 {
|
|
657 tree *slot = chkp_reg_addr_bounds->get (obj);
|
|
658 return slot ? *slot : NULL_TREE;
|
|
659 }
|
|
660
|
|
661 /* Mark BOUNDS as completed. */
|
|
662 static void
|
|
663 chkp_mark_completed_bounds (tree bounds)
|
|
664 {
|
|
665 chkp_completed_bounds_set->add (bounds);
|
|
666
|
|
667 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
668 {
|
|
669 fprintf (dump_file, "Marked bounds ");
|
|
670 print_generic_expr (dump_file, bounds);
|
|
671 fprintf (dump_file, " as completed\n");
|
|
672 }
|
|
673 }
|
|
674
|
|
675 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
|
|
676 static bool
|
|
677 chkp_completed_bounds (tree bounds)
|
|
678 {
|
|
679 return chkp_completed_bounds_set->contains (bounds);
|
|
680 }
|
|
681
|
|
682 /* Clear comleted bound marks. */
|
|
683 static void
|
|
684 chkp_erase_completed_bounds (void)
|
|
685 {
|
|
686 delete chkp_completed_bounds_set;
|
|
687 chkp_completed_bounds_set = new hash_set<tree>;
|
|
688 }
|
|
689
|
|
690 /* This function is used to provide a base address for
|
|
691 chkp_get_hard_register_fake_addr_expr. */
|
|
692 static tree
|
|
693 chkp_get_hard_register_var_fake_base_address ()
|
|
694 {
|
|
695 int prec = TYPE_PRECISION (ptr_type_node);
|
|
696 return wide_int_to_tree (ptr_type_node, wi::min_value (prec, SIGNED));
|
|
697 }
|
|
698
|
|
699 /* If we check bounds for a hard register variable, we cannot
|
|
700 use its address - it is illegal, so instead of that we use
|
|
701 this fake value. */
|
|
702 static tree
|
|
703 chkp_get_hard_register_fake_addr_expr (tree obj)
|
|
704 {
|
|
705 tree addr = chkp_get_hard_register_var_fake_base_address ();
|
|
706 tree outer = obj;
|
|
707 while (TREE_CODE (outer) == COMPONENT_REF || TREE_CODE (outer) == ARRAY_REF)
|
|
708 {
|
|
709 if (TREE_CODE (outer) == COMPONENT_REF)
|
|
710 {
|
|
711 addr = fold_build_pointer_plus (addr,
|
|
712 component_ref_field_offset (outer));
|
|
713 outer = TREE_OPERAND (outer, 0);
|
|
714 }
|
|
715 else if (TREE_CODE (outer) == ARRAY_REF)
|
|
716 {
|
|
717 tree indx = fold_convert(size_type_node, TREE_OPERAND(outer, 1));
|
|
718 tree offset = size_binop (MULT_EXPR,
|
|
719 array_ref_element_size (outer), indx);
|
|
720 addr = fold_build_pointer_plus (addr, offset);
|
|
721 outer = TREE_OPERAND (outer, 0);
|
|
722 }
|
|
723 }
|
|
724
|
|
725 return addr;
|
|
726 }
|
|
727
|
|
728 /* Mark BOUNDS associated with PTR as incomplete. */
|
|
729 static void
|
|
730 chkp_register_incomplete_bounds (tree bounds, tree ptr)
|
|
731 {
|
|
732 chkp_incomplete_bounds_map->put (bounds, ptr);
|
|
733
|
|
734 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
735 {
|
|
736 fprintf (dump_file, "Regsitered incomplete bounds ");
|
|
737 print_generic_expr (dump_file, bounds);
|
|
738 fprintf (dump_file, " for ");
|
|
739 print_generic_expr (dump_file, ptr);
|
|
740 fprintf (dump_file, "\n");
|
|
741 }
|
|
742 }
|
|
743
|
|
744 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
|
|
745 static bool
|
|
746 chkp_incomplete_bounds (tree bounds)
|
|
747 {
|
|
748 if (bounds == incomplete_bounds)
|
|
749 return true;
|
|
750
|
|
751 if (chkp_completed_bounds (bounds))
|
|
752 return false;
|
|
753
|
|
754 return chkp_incomplete_bounds_map->get (bounds) != NULL;
|
|
755 }
|
|
756
|
|
757 /* Clear incomleted bound marks. */
|
|
758 static void
|
|
759 chkp_erase_incomplete_bounds (void)
|
|
760 {
|
|
761 delete chkp_incomplete_bounds_map;
|
|
762 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
|
|
763 }
|
|
764
|
|
765 /* Build and return bndmk call which creates bounds for structure
|
|
766 pointed by PTR. Structure should have complete type. */
|
|
767 tree
|
|
768 chkp_make_bounds_for_struct_addr (tree ptr)
|
|
769 {
|
|
770 tree type = TREE_TYPE (ptr);
|
|
771 tree size;
|
|
772
|
|
773 gcc_assert (POINTER_TYPE_P (type));
|
|
774
|
|
775 size = TYPE_SIZE (TREE_TYPE (type));
|
|
776
|
|
777 gcc_assert (size);
|
|
778
|
|
779 return build_call_nary (pointer_bounds_type_node,
|
|
780 build_fold_addr_expr (chkp_bndmk_fndecl),
|
|
781 2, ptr, size);
|
|
782 }
|
|
783
|
|
784 /* Traversal function for chkp_may_finish_incomplete_bounds.
|
|
785 Set RES to 0 if at least one argument of phi statement
|
|
786 defining bounds (passed in KEY arg) is unknown.
|
|
787 Traversal stops when first unknown phi argument is found. */
|
|
788 bool
|
|
789 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
|
|
790 bool *res)
|
|
791 {
|
|
792 gimple *phi;
|
|
793 unsigned i;
|
|
794
|
|
795 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
|
|
796
|
|
797 phi = SSA_NAME_DEF_STMT (bounds);
|
|
798
|
|
799 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
|
|
800
|
|
801 for (i = 0; i < gimple_phi_num_args (phi); i++)
|
|
802 {
|
|
803 tree phi_arg = gimple_phi_arg_def (phi, i);
|
|
804 if (!phi_arg)
|
|
805 {
|
|
806 *res = false;
|
|
807 /* Do not need to traverse further. */
|
|
808 return false;
|
|
809 }
|
|
810 }
|
|
811
|
|
812 return true;
|
|
813 }
|
|
814
|
|
815 /* Return 1 if all phi nodes created for bounds have their
|
|
816 arguments computed. */
|
|
817 static bool
|
|
818 chkp_may_finish_incomplete_bounds (void)
|
|
819 {
|
|
820 bool res = true;
|
|
821
|
|
822 chkp_incomplete_bounds_map
|
|
823 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
|
|
824
|
|
825 return res;
|
|
826 }
|
|
827
|
|
828 /* Helper function for chkp_finish_incomplete_bounds.
|
|
829 Recompute args for bounds phi node. */
|
|
830 bool
|
|
831 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
|
|
832 void *res ATTRIBUTE_UNUSED)
|
|
833 {
|
|
834 tree ptr = *slot;
|
|
835 gphi *bounds_phi;
|
|
836 gphi *ptr_phi;
|
|
837 unsigned i;
|
|
838
|
|
839 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
|
|
840 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
|
|
841
|
|
842 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
|
|
843 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
|
|
844
|
|
845 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
|
|
846 {
|
|
847 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
|
|
848 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
|
|
849
|
|
850 add_phi_arg (bounds_phi, bound_arg,
|
|
851 gimple_phi_arg_edge (ptr_phi, i),
|
|
852 UNKNOWN_LOCATION);
|
|
853 }
|
|
854
|
|
855 return true;
|
|
856 }
|
|
857
|
|
858 /* Mark BOUNDS as invalid. */
|
|
859 static void
|
|
860 chkp_mark_invalid_bounds (tree bounds)
|
|
861 {
|
|
862 chkp_invalid_bounds->add (bounds);
|
|
863
|
|
864 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
865 {
|
|
866 fprintf (dump_file, "Marked bounds ");
|
|
867 print_generic_expr (dump_file, bounds);
|
|
868 fprintf (dump_file, " as invalid\n");
|
|
869 }
|
|
870 }
|
|
871
|
|
872 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
|
|
873 static bool
|
|
874 chkp_valid_bounds (tree bounds)
|
|
875 {
|
|
876 if (bounds == zero_bounds || bounds == none_bounds)
|
|
877 return false;
|
|
878
|
|
879 return !chkp_invalid_bounds->contains (bounds);
|
|
880 }
|
|
881
|
|
882 /* Helper function for chkp_finish_incomplete_bounds.
|
|
883 Check all arguments of phi nodes trying to find
|
|
884 valid completed bounds. If there is at least one
|
|
885 such arg then bounds produced by phi node are marked
|
|
886 as valid completed bounds and all phi args are
|
|
887 recomputed. */
|
|
888 bool
|
|
889 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
|
|
890 {
|
|
891 gimple *phi;
|
|
892 unsigned i;
|
|
893
|
|
894 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
|
|
895
|
|
896 if (chkp_completed_bounds (bounds))
|
|
897 return true;
|
|
898
|
|
899 phi = SSA_NAME_DEF_STMT (bounds);
|
|
900
|
|
901 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
|
|
902
|
|
903 for (i = 0; i < gimple_phi_num_args (phi); i++)
|
|
904 {
|
|
905 tree phi_arg = gimple_phi_arg_def (phi, i);
|
|
906
|
|
907 gcc_assert (phi_arg);
|
|
908
|
|
909 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
|
|
910 {
|
|
911 *res = true;
|
|
912 chkp_mark_completed_bounds (bounds);
|
|
913 chkp_recompute_phi_bounds (bounds, slot, NULL);
|
|
914 return true;
|
|
915 }
|
|
916 }
|
|
917
|
|
918 return true;
|
|
919 }
|
|
920
|
|
921 /* Helper function for chkp_finish_incomplete_bounds.
|
|
922 Marks all incompleted bounds as invalid. */
|
|
923 bool
|
|
924 chkp_mark_invalid_bounds_walker (tree const &bounds,
|
|
925 tree *slot ATTRIBUTE_UNUSED,
|
|
926 void *res ATTRIBUTE_UNUSED)
|
|
927 {
|
|
928 if (!chkp_completed_bounds (bounds))
|
|
929 {
|
|
930 chkp_mark_invalid_bounds (bounds);
|
|
931 chkp_mark_completed_bounds (bounds);
|
|
932 }
|
|
933 return true;
|
|
934 }
|
|
935
|
|
936 /* When all bound phi nodes have all their args computed
|
|
937 we have enough info to find valid bounds. We iterate
|
|
938 through all incompleted bounds searching for valid
|
|
939 bounds. Found valid bounds are marked as completed
|
|
940 and all remaining incompleted bounds are recomputed.
|
|
941 Process continues until no new valid bounds may be
|
|
942 found. All remained incompleted bounds are marked as
|
|
943 invalid (i.e. have no valid source of bounds). */
|
|
944 static void
|
|
945 chkp_finish_incomplete_bounds (void)
|
|
946 {
|
|
947 bool found_valid = true;
|
|
948
|
|
949 while (found_valid)
|
|
950 {
|
|
951 found_valid = false;
|
|
952
|
|
953 chkp_incomplete_bounds_map->
|
|
954 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
|
|
955
|
|
956 if (found_valid)
|
|
957 chkp_incomplete_bounds_map->
|
|
958 traverse<void *, chkp_recompute_phi_bounds> (NULL);
|
|
959 }
|
|
960
|
|
961 chkp_incomplete_bounds_map->
|
|
962 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
|
|
963 chkp_incomplete_bounds_map->
|
|
964 traverse<void *, chkp_recompute_phi_bounds> (NULL);
|
|
965
|
|
966 chkp_erase_completed_bounds ();
|
|
967 chkp_erase_incomplete_bounds ();
|
|
968 }
|
|
969
|
|
970 /* Return 1 if type TYPE is a pointer type or a
|
|
971 structure having a pointer type as one of its fields.
|
|
972 Otherwise return 0. */
|
|
973 bool
|
|
974 chkp_type_has_pointer (const_tree type)
|
|
975 {
|
|
976 bool res = false;
|
|
977
|
|
978 if (BOUNDED_TYPE_P (type))
|
|
979 res = true;
|
|
980 else if (RECORD_OR_UNION_TYPE_P (type))
|
|
981 {
|
|
982 tree field;
|
|
983
|
|
984 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
|
|
985 if (TREE_CODE (field) == FIELD_DECL)
|
|
986 res = res || chkp_type_has_pointer (TREE_TYPE (field));
|
|
987 }
|
|
988 else if (TREE_CODE (type) == ARRAY_TYPE)
|
|
989 res = chkp_type_has_pointer (TREE_TYPE (type));
|
|
990
|
|
991 return res;
|
|
992 }
|
|
993
|
|
994 unsigned
|
|
995 chkp_type_bounds_count (const_tree type)
|
|
996 {
|
|
997 unsigned res = 0;
|
|
998
|
|
999 if (!type)
|
|
1000 res = 0;
|
|
1001 else if (BOUNDED_TYPE_P (type))
|
|
1002 res = 1;
|
|
1003 else if (RECORD_OR_UNION_TYPE_P (type))
|
|
1004 {
|
|
1005 bitmap have_bound;
|
|
1006
|
|
1007 bitmap_obstack_initialize (NULL);
|
|
1008 have_bound = BITMAP_ALLOC (NULL);
|
|
1009 chkp_find_bound_slots (type, have_bound);
|
|
1010 res = bitmap_count_bits (have_bound);
|
|
1011 BITMAP_FREE (have_bound);
|
|
1012 bitmap_obstack_release (NULL);
|
|
1013 }
|
|
1014
|
|
1015 return res;
|
|
1016 }
|
|
1017
|
|
1018 /* Get bounds associated with NODE via
|
|
1019 chkp_set_bounds call. */
|
|
1020 tree
|
|
1021 chkp_get_bounds (tree node)
|
|
1022 {
|
|
1023 tree *slot;
|
|
1024
|
|
1025 if (!chkp_bounds_map)
|
|
1026 return NULL_TREE;
|
|
1027
|
|
1028 slot = chkp_bounds_map->get (node);
|
|
1029 return slot ? *slot : NULL_TREE;
|
|
1030 }
|
|
1031
|
|
1032 /* Associate bounds VAL with NODE. */
|
|
1033 void
|
|
1034 chkp_set_bounds (tree node, tree val)
|
|
1035 {
|
|
1036 if (!chkp_bounds_map)
|
|
1037 chkp_bounds_map = new hash_map<tree, tree>;
|
|
1038
|
|
1039 chkp_bounds_map->put (node, val);
|
|
1040 }
|
|
1041
|
|
1042 /* Check if statically initialized variable VAR require
|
|
1043 static bounds initialization. If VAR is added into
|
|
1044 bounds initlization list then 1 is returned. Otherwise
|
|
1045 return 0. */
|
|
1046 extern bool
|
|
1047 chkp_register_var_initializer (tree var)
|
|
1048 {
|
|
1049 if (!flag_check_pointer_bounds
|
|
1050 || DECL_INITIAL (var) == error_mark_node)
|
|
1051 return false;
|
|
1052
|
|
1053 gcc_assert (VAR_P (var));
|
|
1054 gcc_assert (DECL_INITIAL (var));
|
|
1055
|
|
1056 if (TREE_STATIC (var)
|
|
1057 && chkp_type_has_pointer (TREE_TYPE (var)))
|
|
1058 {
|
|
1059 varpool_node::get_create (var)->need_bounds_init = 1;
|
|
1060 return true;
|
|
1061 }
|
|
1062
|
|
1063 return false;
|
|
1064 }
|
|
1065
|
|
1066 /* Helper function for chkp_finish_file.
|
|
1067
|
|
1068 Add new modification statement (RHS is assigned to LHS)
|
|
1069 into list of static initializer statementes (passed in ARG).
|
|
1070 If statements list becomes too big, emit checker constructor
|
|
1071 and start the new one. */
|
|
1072 static void
|
|
1073 chkp_add_modification_to_stmt_list (tree lhs,
|
|
1074 tree rhs,
|
|
1075 void *arg)
|
|
1076 {
|
|
1077 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
|
|
1078 tree modify;
|
|
1079
|
|
1080 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
|
|
1081 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
|
|
1082
|
|
1083 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
|
|
1084 append_to_statement_list (modify, &stmts->stmts);
|
|
1085
|
|
1086 stmts->avail--;
|
|
1087 }
|
|
1088
|
|
1089 /* Build and return ADDR_EXPR for specified object OBJ. */
|
|
1090 static tree
|
|
1091 chkp_build_addr_expr (tree obj)
|
|
1092 {
|
|
1093 /* We first check whether it is a "hard reg case". */
|
|
1094 tree base = get_base_address (obj);
|
|
1095 if (VAR_P (base) && DECL_HARD_REGISTER (base))
|
|
1096 return chkp_get_hard_register_fake_addr_expr (obj);
|
|
1097
|
|
1098 /* If not - return regular ADDR_EXPR. */
|
|
1099 return TREE_CODE (obj) == TARGET_MEM_REF
|
|
1100 ? tree_mem_ref_addr (ptr_type_node, obj)
|
|
1101 : build_fold_addr_expr (obj);
|
|
1102 }
|
|
1103
|
|
1104 /* Helper function for chkp_finish_file.
|
|
1105 Initialize bound variable BND_VAR with bounds of variable
|
|
1106 VAR to statements list STMTS. If statements list becomes
|
|
1107 too big, emit checker constructor and start the new one. */
|
|
1108 static void
|
|
1109 chkp_output_static_bounds (tree bnd_var, tree var,
|
|
1110 struct chkp_ctor_stmt_list *stmts)
|
|
1111 {
|
|
1112 tree lb, ub, size;
|
|
1113
|
|
1114 if (TREE_CODE (var) == STRING_CST)
|
|
1115 {
|
|
1116 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
|
|
1117 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
|
|
1118 }
|
|
1119 else if (DECL_SIZE (var)
|
|
1120 && !chkp_variable_size_type (TREE_TYPE (var)))
|
|
1121 {
|
|
1122 /* Compute bounds using statically known size. */
|
|
1123 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
|
|
1124 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
|
|
1125 }
|
|
1126 else
|
|
1127 {
|
|
1128 /* Compute bounds using dynamic size. */
|
|
1129 tree call;
|
|
1130
|
|
1131 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
|
|
1132 call = build1 (ADDR_EXPR,
|
|
1133 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
|
|
1134 chkp_sizeof_fndecl);
|
|
1135 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
|
|
1136 call, 1, var);
|
|
1137
|
|
1138 if (flag_chkp_zero_dynamic_size_as_infinite)
|
|
1139 {
|
|
1140 tree max_size, cond;
|
|
1141
|
|
1142 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
|
|
1143 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
|
|
1144 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
|
|
1145 }
|
|
1146
|
|
1147 size = size_binop (MINUS_EXPR, size, size_one_node);
|
|
1148 }
|
|
1149
|
|
1150 ub = size_binop (PLUS_EXPR, lb, size);
|
|
1151 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
|
|
1152 &stmts->stmts);
|
|
1153 if (stmts->avail <= 0)
|
|
1154 {
|
|
1155 cgraph_build_static_cdtor ('B', stmts->stmts,
|
|
1156 MAX_RESERVED_INIT_PRIORITY + 2);
|
|
1157 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
|
|
1158 stmts->stmts = NULL;
|
|
1159 }
|
|
1160 }
|
|
1161
|
|
1162 /* Return entry block to be used for checker initilization code.
|
|
1163 Create new block if required. */
|
|
1164 static basic_block
|
|
1165 chkp_get_entry_block (void)
|
|
1166 {
|
|
1167 if (!entry_block)
|
|
1168 entry_block
|
|
1169 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
|
|
1170
|
|
1171 return entry_block;
|
|
1172 }
|
|
1173
|
|
1174 /* Return a bounds var to be used for pointer var PTR_VAR. */
|
|
1175 static tree
|
|
1176 chkp_get_bounds_var (tree ptr_var)
|
|
1177 {
|
|
1178 tree bnd_var;
|
|
1179 tree *slot;
|
|
1180
|
|
1181 slot = chkp_bound_vars->get (ptr_var);
|
|
1182 if (slot)
|
|
1183 bnd_var = *slot;
|
|
1184 else
|
|
1185 {
|
|
1186 bnd_var = create_tmp_reg (pointer_bounds_type_node,
|
|
1187 CHKP_BOUND_TMP_NAME);
|
|
1188 chkp_bound_vars->put (ptr_var, bnd_var);
|
|
1189 }
|
|
1190
|
|
1191 return bnd_var;
|
|
1192 }
|
|
1193
|
|
1194 /* If BND is an abnormal bounds copy, return a copied value.
|
|
1195 Otherwise return BND. */
|
|
1196 static tree
|
|
1197 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
|
|
1198 {
|
|
1199 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
|
|
1200 {
|
|
1201 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
|
|
1202 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
|
|
1203 bnd = gimple_assign_rhs1 (bnd_def);
|
|
1204 }
|
|
1205
|
|
1206 return bnd;
|
|
1207 }
|
|
1208
|
|
1209 /* Register bounds BND for object PTR in global bounds table.
|
|
1210 A copy of bounds may be created for abnormal ssa names.
|
|
1211 Returns bounds to use for PTR. */
|
|
1212 static tree
|
|
1213 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
|
|
1214 {
|
|
1215 bool abnormal_ptr;
|
|
1216
|
|
1217 if (!chkp_reg_bounds)
|
|
1218 return bnd;
|
|
1219
|
|
1220 /* Do nothing if bounds are incomplete_bounds
|
|
1221 because it means bounds will be recomputed. */
|
|
1222 if (bnd == incomplete_bounds)
|
|
1223 return bnd;
|
|
1224
|
|
1225 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
|
|
1226 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
|
|
1227 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
|
|
1228
|
|
1229 /* A single bounds value may be reused multiple times for
|
|
1230 different pointer values. It may cause coalescing issues
|
|
1231 for abnormal SSA names. To avoid it we create a bounds
|
|
1232 copy in case it is computed for abnormal SSA name.
|
|
1233
|
|
1234 We also cannot reuse such created copies for other pointers */
|
|
1235 if (abnormal_ptr
|
|
1236 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
|
|
1237 {
|
|
1238 tree bnd_var = NULL_TREE;
|
|
1239
|
|
1240 if (abnormal_ptr)
|
|
1241 {
|
|
1242 if (SSA_NAME_VAR (ptr))
|
|
1243 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
|
|
1244 }
|
|
1245 else
|
|
1246 bnd_var = chkp_get_tmp_var ();
|
|
1247
|
|
1248 /* For abnormal copies we may just find original
|
|
1249 bounds and use them. */
|
|
1250 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
|
|
1251 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
|
|
1252 /* For undefined values we usually use none bounds
|
|
1253 value but in case of abnormal edge it may cause
|
|
1254 coalescing failures. Use default definition of
|
|
1255 bounds variable instead to avoid it. */
|
|
1256 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
|
|
1257 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
|
|
1258 {
|
|
1259 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
|
|
1260
|
|
1261 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1262 {
|
|
1263 fprintf (dump_file, "Using default def bounds ");
|
|
1264 print_generic_expr (dump_file, bnd);
|
|
1265 fprintf (dump_file, " for abnormal default def SSA name ");
|
|
1266 print_generic_expr (dump_file, ptr);
|
|
1267 fprintf (dump_file, "\n");
|
|
1268 }
|
|
1269 }
|
|
1270 else
|
|
1271 {
|
|
1272 tree copy;
|
|
1273 gimple *def = SSA_NAME_DEF_STMT (ptr);
|
|
1274 gimple *assign;
|
|
1275 gimple_stmt_iterator gsi;
|
|
1276
|
|
1277 if (bnd_var)
|
|
1278 copy = make_ssa_name (bnd_var);
|
|
1279 else
|
|
1280 copy = make_temp_ssa_name (pointer_bounds_type_node,
|
|
1281 NULL,
|
|
1282 CHKP_BOUND_TMP_NAME);
|
|
1283 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
|
|
1284 assign = gimple_build_assign (copy, bnd);
|
|
1285
|
|
1286 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1287 {
|
|
1288 fprintf (dump_file, "Creating a copy of bounds ");
|
|
1289 print_generic_expr (dump_file, bnd);
|
|
1290 fprintf (dump_file, " for abnormal SSA name ");
|
|
1291 print_generic_expr (dump_file, ptr);
|
|
1292 fprintf (dump_file, "\n");
|
|
1293 }
|
|
1294
|
|
1295 if (gimple_code (def) == GIMPLE_NOP)
|
|
1296 {
|
|
1297 gsi = gsi_last_bb (chkp_get_entry_block ());
|
|
1298 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
|
|
1299 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
|
|
1300 else
|
|
1301 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
|
|
1302 }
|
|
1303 else
|
|
1304 {
|
|
1305 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
|
|
1306 /* Sometimes (e.g. when we load a pointer from a
|
|
1307 memory) bounds are produced later than a pointer.
|
|
1308 We need to insert bounds copy appropriately. */
|
|
1309 if (gimple_code (bnd_def) != GIMPLE_NOP
|
|
1310 && stmt_dominates_stmt_p (def, bnd_def))
|
|
1311 gsi = gsi_for_stmt (bnd_def);
|
|
1312 else
|
|
1313 gsi = gsi_for_stmt (def);
|
|
1314 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
|
|
1315 }
|
|
1316
|
|
1317 bnd = copy;
|
|
1318 }
|
|
1319
|
|
1320 if (abnormal_ptr)
|
|
1321 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
|
|
1322 }
|
|
1323
|
|
1324 chkp_reg_bounds->put (ptr, bnd);
|
|
1325
|
|
1326 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1327 {
|
|
1328 fprintf (dump_file, "Regsitered bound ");
|
|
1329 print_generic_expr (dump_file, bnd);
|
|
1330 fprintf (dump_file, " for pointer ");
|
|
1331 print_generic_expr (dump_file, ptr);
|
|
1332 fprintf (dump_file, "\n");
|
|
1333 }
|
|
1334
|
|
1335 return bnd;
|
|
1336 }
|
|
1337
|
|
1338 /* Get bounds registered for object PTR in global bounds table. */
|
|
1339 static tree
|
|
1340 chkp_get_registered_bounds (tree ptr)
|
|
1341 {
|
|
1342 tree *slot;
|
|
1343
|
|
1344 if (!chkp_reg_bounds)
|
|
1345 return NULL_TREE;
|
|
1346
|
|
1347 slot = chkp_reg_bounds->get (ptr);
|
|
1348 return slot ? *slot : NULL_TREE;
|
|
1349 }
|
|
1350
|
|
1351 /* Add bound retvals to return statement pointed by GSI. */
|
|
1352
|
|
1353 static void
|
|
1354 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
|
|
1355 {
|
|
1356 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
|
|
1357 tree retval = gimple_return_retval (ret);
|
|
1358 tree ret_decl = DECL_RESULT (cfun->decl);
|
|
1359 tree bounds;
|
|
1360
|
|
1361 if (!retval)
|
|
1362 return;
|
|
1363
|
|
1364 if (BOUNDED_P (ret_decl))
|
|
1365 {
|
|
1366 bounds = chkp_find_bounds (retval, gsi);
|
|
1367 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
|
|
1368 gimple_return_set_retbnd (ret, bounds);
|
|
1369 }
|
|
1370
|
|
1371 update_stmt (ret);
|
|
1372 }
|
|
1373
|
|
1374 /* Force OP to be suitable for using as an argument for call.
|
|
1375 New statements (if any) go to SEQ. */
|
|
1376 static tree
|
|
1377 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
|
|
1378 {
|
|
1379 gimple_seq stmts;
|
|
1380 gimple_stmt_iterator si;
|
|
1381
|
|
1382 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
|
|
1383
|
|
1384 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
|
|
1385 chkp_mark_stmt (gsi_stmt (si));
|
|
1386
|
|
1387 gimple_seq_add_seq (seq, stmts);
|
|
1388
|
|
1389 return op;
|
|
1390 }
|
|
1391
|
|
1392 /* Generate lower bound check for memory access by ADDR.
|
|
1393 Check is inserted before the position pointed by ITER.
|
|
1394 DIRFLAG indicates whether memory access is load or store. */
|
|
1395 static void
|
|
1396 chkp_check_lower (tree addr, tree bounds,
|
|
1397 gimple_stmt_iterator iter,
|
|
1398 location_t location,
|
|
1399 tree dirflag)
|
|
1400 {
|
|
1401 gimple_seq seq;
|
|
1402 gimple *check;
|
|
1403 tree node;
|
|
1404
|
|
1405 if (!chkp_function_instrumented_p (current_function_decl)
|
|
1406 && bounds == chkp_get_zero_bounds ())
|
|
1407 return;
|
|
1408
|
|
1409 if (dirflag == integer_zero_node
|
|
1410 && !flag_chkp_check_read)
|
|
1411 return;
|
|
1412
|
|
1413 if (dirflag == integer_one_node
|
|
1414 && !flag_chkp_check_write)
|
|
1415 return;
|
|
1416
|
|
1417 seq = NULL;
|
|
1418
|
|
1419 node = chkp_force_gimple_call_op (addr, &seq);
|
|
1420
|
|
1421 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
|
|
1422 chkp_mark_stmt (check);
|
|
1423 gimple_call_set_with_bounds (check, true);
|
|
1424 gimple_set_location (check, location);
|
|
1425 gimple_seq_add_stmt (&seq, check);
|
|
1426
|
|
1427 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
|
|
1428
|
|
1429 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1430 {
|
|
1431 gimple *before = gsi_stmt (iter);
|
|
1432 fprintf (dump_file, "Generated lower bound check for statement ");
|
|
1433 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
1434 fprintf (dump_file, " ");
|
|
1435 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
1436 }
|
|
1437 }
|
|
1438
|
|
1439 /* Generate upper bound check for memory access by ADDR.
|
|
1440 Check is inserted before the position pointed by ITER.
|
|
1441 DIRFLAG indicates whether memory access is load or store. */
|
|
1442 static void
|
|
1443 chkp_check_upper (tree addr, tree bounds,
|
|
1444 gimple_stmt_iterator iter,
|
|
1445 location_t location,
|
|
1446 tree dirflag)
|
|
1447 {
|
|
1448 gimple_seq seq;
|
|
1449 gimple *check;
|
|
1450 tree node;
|
|
1451
|
|
1452 if (!chkp_function_instrumented_p (current_function_decl)
|
|
1453 && bounds == chkp_get_zero_bounds ())
|
|
1454 return;
|
|
1455
|
|
1456 if (dirflag == integer_zero_node
|
|
1457 && !flag_chkp_check_read)
|
|
1458 return;
|
|
1459
|
|
1460 if (dirflag == integer_one_node
|
|
1461 && !flag_chkp_check_write)
|
|
1462 return;
|
|
1463
|
|
1464 seq = NULL;
|
|
1465
|
|
1466 node = chkp_force_gimple_call_op (addr, &seq);
|
|
1467
|
|
1468 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
|
|
1469 chkp_mark_stmt (check);
|
|
1470 gimple_call_set_with_bounds (check, true);
|
|
1471 gimple_set_location (check, location);
|
|
1472 gimple_seq_add_stmt (&seq, check);
|
|
1473
|
|
1474 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
|
|
1475
|
|
1476 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1477 {
|
|
1478 gimple *before = gsi_stmt (iter);
|
|
1479 fprintf (dump_file, "Generated upper bound check for statement ");
|
|
1480 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
1481 fprintf (dump_file, " ");
|
|
1482 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
1483 }
|
|
1484 }
|
|
1485
|
|
1486 /* Generate lower and upper bound checks for memory access
|
|
1487 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
|
|
1488 are inserted before the position pointed by ITER.
|
|
1489 DIRFLAG indicates whether memory access is load or store. */
|
|
1490 void
|
|
1491 chkp_check_mem_access (tree first, tree last, tree bounds,
|
|
1492 gimple_stmt_iterator iter,
|
|
1493 location_t location,
|
|
1494 tree dirflag)
|
|
1495 {
|
|
1496 chkp_check_lower (first, bounds, iter, location, dirflag);
|
|
1497 chkp_check_upper (last, bounds, iter, location, dirflag);
|
|
1498 }
|
|
1499
|
|
1500 /* Replace call to _bnd_chk_* pointed by GSI with
|
|
1501 bndcu and bndcl calls. DIRFLAG determines whether
|
|
1502 check is for read or write. */
|
|
1503
|
|
1504 void
|
|
1505 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
|
|
1506 tree dirflag)
|
|
1507 {
|
|
1508 gimple_stmt_iterator call_iter = *gsi;
|
|
1509 gimple *call = gsi_stmt (*gsi);
|
|
1510 tree fndecl = gimple_call_fndecl (call);
|
|
1511 tree addr = gimple_call_arg (call, 0);
|
|
1512 tree bounds = chkp_find_bounds (addr, gsi);
|
|
1513
|
|
1514 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
|
|
1515 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
|
|
1516 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
|
|
1517
|
|
1518 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
|
|
1519 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
|
|
1520
|
|
1521 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
|
|
1522 {
|
|
1523 tree size = gimple_call_arg (call, 1);
|
|
1524 addr = fold_build_pointer_plus (addr, size);
|
|
1525 addr = fold_build_pointer_plus_hwi (addr, -1);
|
|
1526 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
|
|
1527 }
|
|
1528
|
|
1529 gsi_remove (&call_iter, true);
|
|
1530 }
|
|
1531
|
|
1532 /* Replace call to _bnd_get_ptr_* pointed by GSI with
|
|
1533 corresponding bounds extract call. */
|
|
1534
|
|
1535 void
|
|
1536 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
|
|
1537 {
|
|
1538 gimple *call = gsi_stmt (*gsi);
|
|
1539 tree fndecl = gimple_call_fndecl (call);
|
|
1540 tree addr = gimple_call_arg (call, 0);
|
|
1541 tree bounds = chkp_find_bounds (addr, gsi);
|
|
1542 gimple *extract;
|
|
1543
|
|
1544 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
|
|
1545 fndecl = chkp_extract_lower_fndecl;
|
|
1546 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
|
|
1547 fndecl = chkp_extract_upper_fndecl;
|
|
1548 else
|
|
1549 gcc_unreachable ();
|
|
1550
|
|
1551 extract = gimple_build_call (fndecl, 1, bounds);
|
|
1552 gimple_call_set_lhs (extract, gimple_call_lhs (call));
|
|
1553 chkp_mark_stmt (extract);
|
|
1554
|
|
1555 gsi_replace (gsi, extract, false);
|
|
1556 }
|
|
1557
|
|
1558 /* Return COMPONENT_REF accessing FIELD in OBJ. */
|
|
1559 static tree
|
|
1560 chkp_build_component_ref (tree obj, tree field)
|
|
1561 {
|
|
1562 tree res;
|
|
1563
|
|
1564 /* If object is TMR then we do not use component_ref but
|
|
1565 add offset instead. We need it to be able to get addr
|
|
1566 of the reasult later. */
|
|
1567 if (TREE_CODE (obj) == TARGET_MEM_REF)
|
|
1568 {
|
|
1569 tree offs = TMR_OFFSET (obj);
|
|
1570 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
|
|
1571 offs, DECL_FIELD_OFFSET (field));
|
|
1572
|
|
1573 gcc_assert (offs);
|
|
1574
|
|
1575 res = copy_node (obj);
|
|
1576 TREE_TYPE (res) = TREE_TYPE (field);
|
|
1577 TMR_OFFSET (res) = offs;
|
|
1578 }
|
|
1579 else
|
|
1580 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
|
|
1581
|
|
1582 return res;
|
|
1583 }
|
|
1584
|
|
1585 /* Return ARRAY_REF for array ARR and index IDX with
|
|
1586 specified element type ETYPE and element size ESIZE. */
|
|
1587 static tree
|
|
1588 chkp_build_array_ref (tree arr, tree etype, tree esize,
|
|
1589 unsigned HOST_WIDE_INT idx)
|
|
1590 {
|
|
1591 tree index = build_int_cst (size_type_node, idx);
|
|
1592 tree res;
|
|
1593
|
|
1594 /* If object is TMR then we do not use array_ref but
|
|
1595 add offset instead. We need it to be able to get addr
|
|
1596 of the reasult later. */
|
|
1597 if (TREE_CODE (arr) == TARGET_MEM_REF)
|
|
1598 {
|
|
1599 tree offs = TMR_OFFSET (arr);
|
|
1600
|
|
1601 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
|
|
1602 esize, index);
|
|
1603 gcc_assert(esize);
|
|
1604
|
|
1605 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
|
|
1606 offs, esize);
|
|
1607 gcc_assert (offs);
|
|
1608
|
|
1609 res = copy_node (arr);
|
|
1610 TREE_TYPE (res) = etype;
|
|
1611 TMR_OFFSET (res) = offs;
|
|
1612 }
|
|
1613 else
|
|
1614 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
|
|
1615
|
|
1616 return res;
|
|
1617 }
|
|
1618
|
|
1619 /* Helper function for chkp_add_bounds_to_call_stmt.
|
|
1620 Fill ALL_BOUNDS output array with created bounds.
|
|
1621
|
|
1622 OFFS is used for recursive calls and holds basic
|
|
1623 offset of TYPE in outer structure in bits.
|
|
1624
|
|
1625 ITER points a position where bounds are searched.
|
|
1626
|
|
1627 ALL_BOUNDS[i] is filled with elem bounds if there
|
|
1628 is a field in TYPE which has pointer type and offset
|
|
1629 equal to i * POINTER_SIZE in bits. */
|
|
1630 static void
|
|
1631 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
|
|
1632 HOST_WIDE_INT offs,
|
|
1633 gimple_stmt_iterator *iter)
|
|
1634 {
|
|
1635 tree type = TREE_TYPE (elem);
|
|
1636
|
|
1637 if (BOUNDED_TYPE_P (type))
|
|
1638 {
|
|
1639 if (!all_bounds[offs / POINTER_SIZE])
|
|
1640 {
|
|
1641 tree temp = make_temp_ssa_name (type, NULL, "");
|
|
1642 gimple *assign = gimple_build_assign (temp, elem);
|
|
1643 gimple_stmt_iterator gsi;
|
|
1644
|
|
1645 gsi_insert_before (iter, assign, GSI_SAME_STMT);
|
|
1646 gsi = gsi_for_stmt (assign);
|
|
1647
|
|
1648 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
|
|
1649 }
|
|
1650 }
|
|
1651 else if (RECORD_OR_UNION_TYPE_P (type))
|
|
1652 {
|
|
1653 tree field;
|
|
1654
|
|
1655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
|
|
1656 if (TREE_CODE (field) == FIELD_DECL)
|
|
1657 {
|
|
1658 tree base = unshare_expr (elem);
|
|
1659 tree field_ref = chkp_build_component_ref (base, field);
|
|
1660 HOST_WIDE_INT field_offs
|
|
1661 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
|
|
1662 if (DECL_FIELD_OFFSET (field))
|
|
1663 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
|
|
1664
|
|
1665 chkp_find_bounds_for_elem (field_ref, all_bounds,
|
|
1666 offs + field_offs, iter);
|
|
1667 }
|
|
1668 }
|
|
1669 else if (TREE_CODE (type) == ARRAY_TYPE)
|
|
1670 {
|
|
1671 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
|
|
1672 tree etype = TREE_TYPE (type);
|
|
1673 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
|
|
1674 unsigned HOST_WIDE_INT cur;
|
|
1675
|
|
1676 if (!maxval || integer_minus_onep (maxval))
|
|
1677 return;
|
|
1678
|
|
1679 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
|
|
1680 {
|
|
1681 tree base = unshare_expr (elem);
|
|
1682 tree arr_elem = chkp_build_array_ref (base, etype,
|
|
1683 TYPE_SIZE (etype),
|
|
1684 cur);
|
|
1685 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
|
|
1686 iter);
|
|
1687 }
|
|
1688 }
|
|
1689 }
|
|
1690
|
|
1691 /* Fill HAVE_BOUND output bitmap with information about
|
|
1692 bounds requred for object of type TYPE.
|
|
1693
|
|
1694 OFFS is used for recursive calls and holds basic
|
|
1695 offset of TYPE in outer structure in bits.
|
|
1696
|
|
1697 HAVE_BOUND[i] is set to 1 if there is a field
|
|
1698 in TYPE which has pointer type and offset
|
|
1699 equal to i * POINTER_SIZE - OFFS in bits. */
|
|
1700 void
|
|
1701 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
|
|
1702 HOST_WIDE_INT offs)
|
|
1703 {
|
|
1704 if (BOUNDED_TYPE_P (type))
|
|
1705 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
|
|
1706 else if (RECORD_OR_UNION_TYPE_P (type))
|
|
1707 {
|
|
1708 tree field;
|
|
1709
|
|
1710 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
|
|
1711 if (TREE_CODE (field) == FIELD_DECL)
|
|
1712 {
|
|
1713 HOST_WIDE_INT field_offs = 0;
|
|
1714 if (DECL_FIELD_BIT_OFFSET (field))
|
|
1715 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
|
|
1716 if (DECL_FIELD_OFFSET (field))
|
|
1717 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
|
|
1718 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
|
|
1719 offs + field_offs);
|
|
1720 }
|
|
1721 }
|
|
1722 else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
|
|
1723 {
|
|
1724 /* The object type is an array of complete type, i.e., other
|
|
1725 than a flexible array. */
|
|
1726 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
|
|
1727 tree etype = TREE_TYPE (type);
|
|
1728 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
|
|
1729 unsigned HOST_WIDE_INT cur;
|
|
1730
|
|
1731 if (!maxval
|
|
1732 || TREE_CODE (maxval) != INTEGER_CST
|
|
1733 || integer_minus_onep (maxval))
|
|
1734 return;
|
|
1735
|
|
1736 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
|
|
1737 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
|
|
1738 }
|
|
1739 }
|
|
1740
|
|
1741 /* Fill bitmap RES with information about bounds for
|
|
1742 type TYPE. See chkp_find_bound_slots_1 for more
|
|
1743 details. */
|
|
1744 void
|
|
1745 chkp_find_bound_slots (const_tree type, bitmap res)
|
|
1746 {
|
|
1747 bitmap_clear (res);
|
|
1748 chkp_find_bound_slots_1 (type, res, 0);
|
|
1749 }
|
|
1750
|
|
1751 /* Return 1 if call to FNDECL should be instrumented
|
|
1752 and 0 otherwise. */
|
|
1753
|
|
1754 static bool
|
|
1755 chkp_instrument_normal_builtin (tree fndecl)
|
|
1756 {
|
|
1757 switch (DECL_FUNCTION_CODE (fndecl))
|
|
1758 {
|
|
1759 case BUILT_IN_STRLEN:
|
|
1760 case BUILT_IN_STRCPY:
|
|
1761 case BUILT_IN_STRNCPY:
|
|
1762 case BUILT_IN_STPCPY:
|
|
1763 case BUILT_IN_STPNCPY:
|
|
1764 case BUILT_IN_STRCAT:
|
|
1765 case BUILT_IN_STRNCAT:
|
|
1766 case BUILT_IN_MEMCPY:
|
|
1767 case BUILT_IN_MEMPCPY:
|
|
1768 case BUILT_IN_MEMSET:
|
|
1769 case BUILT_IN_MEMMOVE:
|
|
1770 case BUILT_IN_BZERO:
|
|
1771 case BUILT_IN_STRCMP:
|
|
1772 case BUILT_IN_STRNCMP:
|
|
1773 case BUILT_IN_BCMP:
|
|
1774 case BUILT_IN_MEMCMP:
|
|
1775 case BUILT_IN_MEMCPY_CHK:
|
|
1776 case BUILT_IN_MEMPCPY_CHK:
|
|
1777 case BUILT_IN_MEMMOVE_CHK:
|
|
1778 case BUILT_IN_MEMSET_CHK:
|
|
1779 case BUILT_IN_STRCPY_CHK:
|
|
1780 case BUILT_IN_STRNCPY_CHK:
|
|
1781 case BUILT_IN_STPCPY_CHK:
|
|
1782 case BUILT_IN_STPNCPY_CHK:
|
|
1783 case BUILT_IN_STRCAT_CHK:
|
|
1784 case BUILT_IN_STRNCAT_CHK:
|
|
1785 case BUILT_IN_MALLOC:
|
|
1786 case BUILT_IN_CALLOC:
|
|
1787 case BUILT_IN_REALLOC:
|
|
1788 return 1;
|
|
1789
|
|
1790 default:
|
|
1791 return 0;
|
|
1792 }
|
|
1793 }
|
|
1794
|
|
1795 /* Add bound arguments to call statement pointed by GSI.
|
|
1796 Also performs a replacement of user checker builtins calls
|
|
1797 with internal ones. */
|
|
1798
|
|
1799 static void
|
|
1800 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
|
|
1801 {
|
|
1802 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
|
|
1803 unsigned arg_no = 0;
|
|
1804 tree fndecl = gimple_call_fndecl (call);
|
|
1805 tree fntype;
|
|
1806 tree first_formal_arg;
|
|
1807 tree arg;
|
|
1808 bool use_fntype = false;
|
|
1809 tree op;
|
|
1810 ssa_op_iter iter;
|
|
1811 gcall *new_call;
|
|
1812
|
|
1813 /* Do nothing for internal functions. */
|
|
1814 if (gimple_call_internal_p (call))
|
|
1815 return;
|
|
1816
|
|
1817 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
|
|
1818
|
|
1819 /* Do nothing if back-end builtin is called. */
|
|
1820 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
|
|
1821 return;
|
|
1822
|
|
1823 /* Do nothing for some middle-end builtins. */
|
|
1824 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
1825 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
|
|
1826 return;
|
|
1827
|
|
1828 /* Do nothing for calls to not instrumentable functions. */
|
|
1829 if (fndecl && !chkp_instrumentable_p (fndecl))
|
|
1830 return;
|
|
1831
|
|
1832 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
|
|
1833 and CHKP_COPY_PTR_BOUNDS. */
|
|
1834 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
1835 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
|
|
1836 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
|
|
1837 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
|
|
1838 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
|
|
1839 return;
|
|
1840
|
|
1841 /* Check user builtins are replaced with checks. */
|
|
1842 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
1843 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
|
|
1844 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
|
|
1845 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
|
|
1846 {
|
|
1847 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
|
|
1848 return;
|
|
1849 }
|
|
1850
|
|
1851 /* Check user builtins are replaced with bound extract. */
|
|
1852 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
1853 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
|
|
1854 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
|
|
1855 {
|
|
1856 chkp_replace_extract_builtin (gsi);
|
|
1857 return;
|
|
1858 }
|
|
1859
|
|
1860 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
|
|
1861 target narrow bounds call. */
|
|
1862 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
1863 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
|
|
1864 {
|
|
1865 tree arg = gimple_call_arg (call, 1);
|
|
1866 tree bounds = chkp_find_bounds (arg, gsi);
|
|
1867
|
|
1868 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
|
|
1869 gimple_call_set_arg (call, 1, bounds);
|
|
1870 update_stmt (call);
|
|
1871
|
|
1872 return;
|
|
1873 }
|
|
1874
|
|
1875 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
|
|
1876 bndstx call. */
|
|
1877 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
1878 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
|
|
1879 {
|
|
1880 tree addr = gimple_call_arg (call, 0);
|
|
1881 tree ptr = gimple_call_arg (call, 1);
|
|
1882 tree bounds = chkp_find_bounds (ptr, gsi);
|
|
1883 gimple_stmt_iterator iter = gsi_for_stmt (call);
|
|
1884
|
|
1885 chkp_build_bndstx (addr, ptr, bounds, gsi);
|
|
1886 gsi_remove (&iter, true);
|
|
1887
|
|
1888 return;
|
|
1889 }
|
|
1890
|
|
1891 if (!flag_chkp_instrument_calls)
|
|
1892 return;
|
|
1893
|
|
1894 /* We instrument only some subset of builtins. We also instrument
|
|
1895 builtin calls to be inlined. */
|
|
1896 if (fndecl
|
|
1897 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
1898 && !chkp_instrument_normal_builtin (fndecl))
|
|
1899 {
|
|
1900 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
|
|
1901 return;
|
|
1902
|
|
1903 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
|
|
1904 if (!clone
|
|
1905 || !gimple_has_body_p (clone->decl))
|
|
1906 return;
|
|
1907 }
|
|
1908
|
|
1909 /* If function decl is available then use it for
|
|
1910 formal arguments list. Otherwise use function type. */
|
|
1911 if (fndecl
|
|
1912 && DECL_ARGUMENTS (fndecl)
|
|
1913 && gimple_call_fntype (call) == TREE_TYPE (fndecl))
|
|
1914 first_formal_arg = DECL_ARGUMENTS (fndecl);
|
|
1915 else
|
|
1916 {
|
|
1917 first_formal_arg = TYPE_ARG_TYPES (fntype);
|
|
1918 use_fntype = true;
|
|
1919 }
|
|
1920
|
|
1921 /* Fill vector of new call args. */
|
|
1922 vec<tree> new_args = vNULL;
|
|
1923 new_args.create (gimple_call_num_args (call));
|
|
1924 arg = first_formal_arg;
|
|
1925 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
|
|
1926 {
|
|
1927 tree call_arg = gimple_call_arg (call, arg_no);
|
|
1928 tree type;
|
|
1929
|
|
1930 /* Get arg type using formal argument description
|
|
1931 or actual argument type. */
|
|
1932 if (arg)
|
|
1933 if (use_fntype)
|
|
1934 if (TREE_VALUE (arg) != void_type_node)
|
|
1935 {
|
|
1936 type = TREE_VALUE (arg);
|
|
1937 arg = TREE_CHAIN (arg);
|
|
1938 }
|
|
1939 else
|
|
1940 type = TREE_TYPE (call_arg);
|
|
1941 else
|
|
1942 {
|
|
1943 type = TREE_TYPE (arg);
|
|
1944 arg = TREE_CHAIN (arg);
|
|
1945 }
|
|
1946 else
|
|
1947 type = TREE_TYPE (call_arg);
|
|
1948
|
|
1949 new_args.safe_push (call_arg);
|
|
1950
|
|
1951 if (BOUNDED_TYPE_P (type)
|
|
1952 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
|
|
1953 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
|
|
1954 else if (chkp_type_has_pointer (type))
|
|
1955 {
|
|
1956 HOST_WIDE_INT max_bounds
|
|
1957 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
|
|
1958 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
|
|
1959 HOST_WIDE_INT bnd_no;
|
|
1960
|
|
1961 memset (all_bounds, 0, sizeof (tree) * max_bounds);
|
|
1962
|
|
1963 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
|
|
1964
|
|
1965 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
|
|
1966 if (all_bounds[bnd_no])
|
|
1967 new_args.safe_push (all_bounds[bnd_no]);
|
|
1968
|
|
1969 free (all_bounds);
|
|
1970 }
|
|
1971 }
|
|
1972
|
|
1973 if (new_args.length () == gimple_call_num_args (call))
|
|
1974 new_call = call;
|
|
1975 else
|
|
1976 {
|
|
1977 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
|
|
1978 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
|
|
1979 gimple_call_copy_flags (new_call, call);
|
|
1980 gimple_call_set_chain (new_call, gimple_call_chain (call));
|
|
1981 }
|
|
1982 new_args.release ();
|
|
1983
|
|
1984 /* For direct calls fndecl is replaced with instrumented version. */
|
|
1985 if (fndecl)
|
|
1986 {
|
|
1987 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
|
|
1988 gimple_call_set_fndecl (new_call, new_decl);
|
|
1989 /* In case of a type cast we should modify used function
|
|
1990 type instead of using type of new fndecl. */
|
|
1991 if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
|
|
1992 {
|
|
1993 tree type = gimple_call_fntype (call);
|
|
1994 type = chkp_copy_function_type_adding_bounds (type);
|
|
1995 gimple_call_set_fntype (new_call, type);
|
|
1996 }
|
|
1997 else
|
|
1998 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
|
|
1999 }
|
|
2000 /* For indirect call we should fix function pointer type if
|
|
2001 pass some bounds. */
|
|
2002 else if (new_call != call)
|
|
2003 {
|
|
2004 tree type = gimple_call_fntype (call);
|
|
2005 type = chkp_copy_function_type_adding_bounds (type);
|
|
2006 gimple_call_set_fntype (new_call, type);
|
|
2007 }
|
|
2008
|
|
2009 /* replace old call statement with the new one. */
|
|
2010 if (call != new_call)
|
|
2011 {
|
|
2012 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
|
|
2013 {
|
|
2014 SSA_NAME_DEF_STMT (op) = new_call;
|
|
2015 }
|
|
2016 gsi_replace (gsi, new_call, true);
|
|
2017 }
|
|
2018 else
|
|
2019 update_stmt (new_call);
|
|
2020
|
|
2021 gimple_call_set_with_bounds (new_call, true);
|
|
2022 }
|
|
2023
|
|
2024 /* Return constant static bounds var with specified bounds LB and UB.
|
|
2025 If such var does not exists then new var is created with specified NAME. */
|
|
2026 static tree
|
|
2027 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
|
|
2028 HOST_WIDE_INT ub,
|
|
2029 const char *name)
|
|
2030 {
|
|
2031 tree id = get_identifier (name);
|
|
2032 tree var;
|
|
2033 varpool_node *node;
|
|
2034 symtab_node *snode;
|
|
2035
|
|
2036 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
|
|
2037 pointer_bounds_type_node);
|
|
2038 TREE_STATIC (var) = 1;
|
|
2039 TREE_PUBLIC (var) = 1;
|
|
2040
|
|
2041 /* With LTO we may have constant bounds already in varpool.
|
|
2042 Try to find it. */
|
|
2043 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
|
|
2044 {
|
|
2045 /* We don't allow this symbol usage for non bounds. */
|
|
2046 if (snode->type != SYMTAB_VARIABLE
|
|
2047 || !POINTER_BOUNDS_P (snode->decl))
|
|
2048 sorry ("-fcheck-pointer-bounds requires %qs "
|
|
2049 "name for internal usage",
|
|
2050 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
|
|
2051
|
|
2052 return snode->decl;
|
|
2053 }
|
|
2054
|
|
2055 TREE_USED (var) = 1;
|
|
2056 TREE_READONLY (var) = 1;
|
|
2057 TREE_ADDRESSABLE (var) = 0;
|
|
2058 DECL_ARTIFICIAL (var) = 1;
|
|
2059 DECL_READ_P (var) = 1;
|
|
2060 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
|
|
2061 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
|
|
2062 /* We may use this symbol during ctors generation in chkp_finish_file
|
|
2063 when all symbols are emitted. Force output to avoid undefined
|
|
2064 symbols in ctors. */
|
|
2065 node = varpool_node::get_create (var);
|
|
2066 node->force_output = 1;
|
|
2067
|
|
2068 varpool_node::finalize_decl (var);
|
|
2069
|
|
2070 return var;
|
|
2071 }
|
|
2072
|
|
2073 /* Generate code to make bounds with specified lower bound LB and SIZE.
|
|
2074 if AFTER is 1 then code is inserted after position pointed by ITER
|
|
2075 otherwise code is inserted before position pointed by ITER.
|
|
2076 If ITER is NULL then code is added to entry block. */
|
|
2077 static tree
|
|
2078 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
|
|
2079 {
|
|
2080 gimple_seq seq;
|
|
2081 gimple_stmt_iterator gsi;
|
|
2082 gimple *stmt;
|
|
2083 tree bounds;
|
|
2084
|
|
2085 if (iter)
|
|
2086 gsi = *iter;
|
|
2087 else
|
|
2088 gsi = gsi_start_bb (chkp_get_entry_block ());
|
|
2089
|
|
2090 seq = NULL;
|
|
2091
|
|
2092 lb = chkp_force_gimple_call_op (lb, &seq);
|
|
2093 size = chkp_force_gimple_call_op (size, &seq);
|
|
2094
|
|
2095 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
|
|
2096 chkp_mark_stmt (stmt);
|
|
2097
|
|
2098 bounds = chkp_get_tmp_reg (stmt);
|
|
2099 gimple_call_set_lhs (stmt, bounds);
|
|
2100
|
|
2101 gimple_seq_add_stmt (&seq, stmt);
|
|
2102
|
|
2103 if (iter && after)
|
|
2104 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
|
|
2105 else
|
|
2106 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
|
|
2107
|
|
2108 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2109 {
|
|
2110 fprintf (dump_file, "Made bounds: ");
|
|
2111 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
2112 if (iter)
|
|
2113 {
|
|
2114 fprintf (dump_file, " inserted before statement: ");
|
|
2115 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
|
|
2116 }
|
|
2117 else
|
|
2118 fprintf (dump_file, " at function entry\n");
|
|
2119 }
|
|
2120
|
|
2121 /* update_stmt (stmt); */
|
|
2122
|
|
2123 return bounds;
|
|
2124 }
|
|
2125
|
|
2126 /* Return var holding zero bounds. */
|
|
2127 tree
|
|
2128 chkp_get_zero_bounds_var (void)
|
|
2129 {
|
|
2130 if (!chkp_zero_bounds_var)
|
|
2131 chkp_zero_bounds_var
|
|
2132 = chkp_make_static_const_bounds (0, -1,
|
|
2133 CHKP_ZERO_BOUNDS_VAR_NAME);
|
|
2134 return chkp_zero_bounds_var;
|
|
2135 }
|
|
2136
|
|
2137 /* Return var holding none bounds. */
|
|
2138 tree
|
|
2139 chkp_get_none_bounds_var (void)
|
|
2140 {
|
|
2141 if (!chkp_none_bounds_var)
|
|
2142 chkp_none_bounds_var
|
|
2143 = chkp_make_static_const_bounds (-1, 0,
|
|
2144 CHKP_NONE_BOUNDS_VAR_NAME);
|
|
2145 return chkp_none_bounds_var;
|
|
2146 }
|
|
2147
|
|
2148 /* Return SSA_NAME used to represent zero bounds. */
|
|
2149 static tree
|
|
2150 chkp_get_zero_bounds (void)
|
|
2151 {
|
|
2152 if (zero_bounds)
|
|
2153 return zero_bounds;
|
|
2154
|
|
2155 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2156 fprintf (dump_file, "Creating zero bounds...");
|
|
2157
|
|
2158 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
|
|
2159 || flag_chkp_use_static_const_bounds > 0)
|
|
2160 {
|
|
2161 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
|
|
2162 gimple *stmt;
|
|
2163
|
|
2164 zero_bounds = chkp_get_tmp_reg (NULL);
|
|
2165 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
|
|
2166 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
|
|
2167 }
|
|
2168 else
|
|
2169 zero_bounds = chkp_make_bounds (integer_zero_node,
|
|
2170 integer_zero_node,
|
|
2171 NULL,
|
|
2172 false);
|
|
2173
|
|
2174 return zero_bounds;
|
|
2175 }
|
|
2176
|
|
2177 /* Return SSA_NAME used to represent none bounds. */
|
|
2178 static tree
|
|
2179 chkp_get_none_bounds (void)
|
|
2180 {
|
|
2181 if (none_bounds)
|
|
2182 return none_bounds;
|
|
2183
|
|
2184 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2185 fprintf (dump_file, "Creating none bounds...");
|
|
2186
|
|
2187
|
|
2188 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
|
|
2189 || flag_chkp_use_static_const_bounds > 0)
|
|
2190 {
|
|
2191 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
|
|
2192 gimple *stmt;
|
|
2193
|
|
2194 none_bounds = chkp_get_tmp_reg (NULL);
|
|
2195 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
|
|
2196 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
|
|
2197 }
|
|
2198 else
|
|
2199 none_bounds = chkp_make_bounds (integer_minus_one_node,
|
|
2200 build_int_cst (size_type_node, 2),
|
|
2201 NULL,
|
|
2202 false);
|
|
2203
|
|
2204 return none_bounds;
|
|
2205 }
|
|
2206
|
|
2207 /* Return bounds to be used as a result of operation which
|
|
2208 should not create poiunter (e.g. MULT_EXPR). */
|
|
2209 static tree
|
|
2210 chkp_get_invalid_op_bounds (void)
|
|
2211 {
|
|
2212 return chkp_get_zero_bounds ();
|
|
2213 }
|
|
2214
|
|
2215 /* Return bounds to be used for loads of non-pointer values. */
|
|
2216 static tree
|
|
2217 chkp_get_nonpointer_load_bounds (void)
|
|
2218 {
|
|
2219 return chkp_get_zero_bounds ();
|
|
2220 }
|
|
2221
|
|
2222 /* Return 1 if may use bndret call to get bounds for pointer
|
|
2223 returned by CALL. */
|
|
2224 static bool
|
|
2225 chkp_call_returns_bounds_p (gcall *call)
|
|
2226 {
|
|
2227 if (gimple_call_internal_p (call))
|
|
2228 {
|
|
2229 if (gimple_call_internal_fn (call) == IFN_VA_ARG)
|
|
2230 return true;
|
|
2231 return false;
|
|
2232 }
|
|
2233
|
|
2234 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
|
|
2235 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
|
|
2236 return true;
|
|
2237
|
|
2238 if (gimple_call_with_bounds_p (call))
|
|
2239 return true;
|
|
2240
|
|
2241 tree fndecl = gimple_call_fndecl (call);
|
|
2242
|
|
2243 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
|
|
2244 return false;
|
|
2245
|
|
2246 if (fndecl && !chkp_instrumentable_p (fndecl))
|
|
2247 return false;
|
|
2248
|
|
2249 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
|
|
2250 {
|
|
2251 if (chkp_instrument_normal_builtin (fndecl))
|
|
2252 return true;
|
|
2253
|
|
2254 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
|
|
2255 return false;
|
|
2256
|
|
2257 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
|
|
2258 return (clone && gimple_has_body_p (clone->decl));
|
|
2259 }
|
|
2260
|
|
2261 return true;
|
|
2262 }
|
|
2263
|
|
2264 /* Build bounds returned by CALL. */
|
|
2265 static tree
|
|
2266 chkp_build_returned_bound (gcall *call)
|
|
2267 {
|
|
2268 gimple_stmt_iterator gsi;
|
|
2269 tree bounds;
|
|
2270 gimple *stmt;
|
|
2271 tree fndecl = gimple_call_fndecl (call);
|
|
2272 unsigned int retflags;
|
|
2273 tree lhs = gimple_call_lhs (call);
|
|
2274
|
|
2275 /* To avoid fixing alloca expands in targets we handle
|
|
2276 it separately. */
|
|
2277 if (fndecl
|
|
2278 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
2279 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
|
|
2280 {
|
|
2281 tree size = gimple_call_arg (call, 0);
|
|
2282 gimple_stmt_iterator iter = gsi_for_stmt (call);
|
|
2283 bounds = chkp_make_bounds (lhs, size, &iter, true);
|
|
2284 }
|
|
2285 /* We know bounds returned by set_bounds builtin call. */
|
|
2286 else if (fndecl
|
|
2287 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
2288 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
|
|
2289 {
|
|
2290 tree lb = gimple_call_arg (call, 0);
|
|
2291 tree size = gimple_call_arg (call, 1);
|
|
2292 gimple_stmt_iterator iter = gsi_for_stmt (call);
|
|
2293 bounds = chkp_make_bounds (lb, size, &iter, true);
|
|
2294 }
|
|
2295 /* Detect bounds initialization calls. */
|
|
2296 else if (fndecl
|
|
2297 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
2298 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
|
|
2299 bounds = chkp_get_zero_bounds ();
|
|
2300 /* Detect bounds nullification calls. */
|
|
2301 else if (fndecl
|
|
2302 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
2303 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
|
|
2304 bounds = chkp_get_none_bounds ();
|
|
2305 /* Detect bounds copy calls. */
|
|
2306 else if (fndecl
|
|
2307 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
2308 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
|
|
2309 {
|
|
2310 gimple_stmt_iterator iter = gsi_for_stmt (call);
|
|
2311 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
|
|
2312 }
|
|
2313 /* Do not use retbnd when returned bounds are equal to some
|
|
2314 of passed bounds. */
|
|
2315 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
|
|
2316 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
|
|
2317 {
|
|
2318 gimple_stmt_iterator iter = gsi_for_stmt (call);
|
|
2319 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
|
|
2320 if (gimple_call_with_bounds_p (call))
|
|
2321 {
|
|
2322 for (argno = 0; argno < gimple_call_num_args (call); argno++)
|
|
2323 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
|
|
2324 {
|
|
2325 if (retarg)
|
|
2326 retarg--;
|
|
2327 else
|
|
2328 break;
|
|
2329 }
|
|
2330 }
|
|
2331 else
|
|
2332 argno = retarg;
|
|
2333
|
|
2334 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
|
|
2335 }
|
|
2336 else if (chkp_call_returns_bounds_p (call)
|
|
2337 && BOUNDED_P (lhs))
|
|
2338 {
|
|
2339 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
|
|
2340
|
|
2341 /* In general case build checker builtin call to
|
|
2342 obtain returned bounds. */
|
|
2343 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
|
|
2344 gimple_call_lhs (call));
|
|
2345 chkp_mark_stmt (stmt);
|
|
2346
|
|
2347 gsi = gsi_for_stmt (call);
|
|
2348 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
|
|
2349
|
|
2350 bounds = chkp_get_tmp_reg (stmt);
|
|
2351 gimple_call_set_lhs (stmt, bounds);
|
|
2352
|
|
2353 update_stmt (stmt);
|
|
2354 }
|
|
2355 else
|
|
2356 bounds = chkp_get_zero_bounds ();
|
|
2357
|
|
2358 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2359 {
|
|
2360 fprintf (dump_file, "Built returned bounds (");
|
|
2361 print_generic_expr (dump_file, bounds);
|
|
2362 fprintf (dump_file, ") for call: ");
|
|
2363 print_gimple_stmt (dump_file, call, 0, TDF_VOPS | TDF_MEMSYMS);
|
|
2364 }
|
|
2365
|
|
2366 bounds = chkp_maybe_copy_and_register_bounds (lhs, bounds);
|
|
2367
|
|
2368 return bounds;
|
|
2369 }
|
|
2370
|
|
2371 /* Return bounds used as returned by call
|
|
2372 which produced SSA name VAL. */
|
|
2373 gcall *
|
|
2374 chkp_retbnd_call_by_val (tree val)
|
|
2375 {
|
|
2376 if (TREE_CODE (val) != SSA_NAME)
|
|
2377 return NULL;
|
|
2378
|
|
2379 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
|
|
2380
|
|
2381 imm_use_iterator use_iter;
|
|
2382 use_operand_p use_p;
|
|
2383 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
|
|
2384 if (chkp_gimple_call_builtin_p (USE_STMT (use_p), BUILT_IN_CHKP_BNDRET))
|
|
2385 return as_a <gcall *> (USE_STMT (use_p));
|
|
2386
|
|
2387 return NULL;
|
|
2388 }
|
|
2389
|
|
2390 /* Check the next parameter for the given PARM is bounds
|
|
2391 and return it's default SSA_NAME (create if required). */
|
|
2392 static tree
|
|
2393 chkp_get_next_bounds_parm (tree parm)
|
|
2394 {
|
|
2395 tree bounds = TREE_CHAIN (parm);
|
|
2396 gcc_assert (POINTER_BOUNDS_P (bounds));
|
|
2397 bounds = ssa_default_def (cfun, bounds);
|
|
2398 if (!bounds)
|
|
2399 {
|
|
2400 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
|
|
2401 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
|
|
2402 }
|
|
2403 return bounds;
|
|
2404 }
|
|
2405
|
|
2406 /* Return bounds to be used for input argument PARM. */
|
|
2407 static tree
|
|
2408 chkp_get_bound_for_parm (tree parm)
|
|
2409 {
|
|
2410 tree decl = SSA_NAME_VAR (parm);
|
|
2411 tree bounds;
|
|
2412
|
|
2413 gcc_assert (TREE_CODE (decl) == PARM_DECL);
|
|
2414
|
|
2415 bounds = chkp_get_registered_bounds (parm);
|
|
2416
|
|
2417 if (!bounds)
|
|
2418 bounds = chkp_get_registered_bounds (decl);
|
|
2419
|
|
2420 if (!bounds)
|
|
2421 {
|
|
2422 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
|
|
2423
|
|
2424 /* For static chain param we return zero bounds
|
|
2425 because currently we do not check dereferences
|
|
2426 of this pointer. */
|
|
2427 if (cfun->static_chain_decl == decl)
|
|
2428 bounds = chkp_get_zero_bounds ();
|
|
2429 /* If non instrumented runtime is used then it may be useful
|
|
2430 to use zero bounds for input arguments of main
|
|
2431 function. */
|
|
2432 else if (flag_chkp_zero_input_bounds_for_main
|
|
2433 && id_equal (DECL_ASSEMBLER_NAME (orig_decl), "main"))
|
|
2434 bounds = chkp_get_zero_bounds ();
|
|
2435 else if (BOUNDED_P (parm))
|
|
2436 {
|
|
2437 bounds = chkp_get_next_bounds_parm (decl);
|
|
2438 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
|
|
2439
|
|
2440 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2441 {
|
|
2442 fprintf (dump_file, "Built arg bounds (");
|
|
2443 print_generic_expr (dump_file, bounds);
|
|
2444 fprintf (dump_file, ") for arg: ");
|
|
2445 print_node (dump_file, "", decl, 0);
|
|
2446 }
|
|
2447 }
|
|
2448 else
|
|
2449 bounds = chkp_get_zero_bounds ();
|
|
2450 }
|
|
2451
|
|
2452 if (!chkp_get_registered_bounds (parm))
|
|
2453 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
|
|
2454
|
|
2455 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2456 {
|
|
2457 fprintf (dump_file, "Using bounds ");
|
|
2458 print_generic_expr (dump_file, bounds);
|
|
2459 fprintf (dump_file, " for parm ");
|
|
2460 print_generic_expr (dump_file, parm);
|
|
2461 fprintf (dump_file, " of type ");
|
|
2462 print_generic_expr (dump_file, TREE_TYPE (parm));
|
|
2463 fprintf (dump_file, ".\n");
|
|
2464 }
|
|
2465
|
|
2466 return bounds;
|
|
2467 }
|
|
2468
|
|
2469 /* Build and return CALL_EXPR for bndstx builtin with specified
|
|
2470 arguments. */
|
|
2471 tree
|
|
2472 chkp_build_bndldx_call (tree addr, tree ptr)
|
|
2473 {
|
|
2474 tree fn = build1 (ADDR_EXPR,
|
|
2475 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
|
|
2476 chkp_bndldx_fndecl);
|
|
2477 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
|
|
2478 fn, 2, addr, ptr);
|
|
2479 CALL_WITH_BOUNDS_P (call) = true;
|
|
2480 return call;
|
|
2481 }
|
|
2482
|
|
2483 /* Insert code to load bounds for PTR located by ADDR.
|
|
2484 Code is inserted after position pointed by GSI.
|
|
2485 Loaded bounds are returned. */
|
|
2486 static tree
|
|
2487 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
|
|
2488 {
|
|
2489 gimple_seq seq;
|
|
2490 gimple *stmt;
|
|
2491 tree bounds;
|
|
2492
|
|
2493 seq = NULL;
|
|
2494
|
|
2495 addr = chkp_force_gimple_call_op (addr, &seq);
|
|
2496 ptr = chkp_force_gimple_call_op (ptr, &seq);
|
|
2497
|
|
2498 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
|
|
2499 chkp_mark_stmt (stmt);
|
|
2500 bounds = chkp_get_tmp_reg (stmt);
|
|
2501 gimple_call_set_lhs (stmt, bounds);
|
|
2502
|
|
2503 gimple_seq_add_stmt (&seq, stmt);
|
|
2504
|
|
2505 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
|
|
2506
|
|
2507 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2508 {
|
|
2509 fprintf (dump_file, "Generated bndldx for pointer ");
|
|
2510 print_generic_expr (dump_file, ptr);
|
|
2511 fprintf (dump_file, ": ");
|
|
2512 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS | TDF_MEMSYMS);
|
|
2513 }
|
|
2514
|
|
2515 return bounds;
|
|
2516 }
|
|
2517
|
|
2518 /* Build and return CALL_EXPR for bndstx builtin with specified
|
|
2519 arguments. */
|
|
2520 tree
|
|
2521 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
|
|
2522 {
|
|
2523 tree fn = build1 (ADDR_EXPR,
|
|
2524 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
|
|
2525 chkp_bndstx_fndecl);
|
|
2526 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
|
|
2527 fn, 3, ptr, bounds, addr);
|
|
2528 CALL_WITH_BOUNDS_P (call) = true;
|
|
2529 return call;
|
|
2530 }
|
|
2531
|
|
2532 /* Insert code to store BOUNDS for PTR stored by ADDR.
|
|
2533 New statements are inserted after position pointed
|
|
2534 by GSI. */
|
|
2535 void
|
|
2536 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
|
|
2537 gimple_stmt_iterator *gsi)
|
|
2538 {
|
|
2539 gimple_seq seq;
|
|
2540 gimple *stmt;
|
|
2541
|
|
2542 seq = NULL;
|
|
2543
|
|
2544 addr = chkp_force_gimple_call_op (addr, &seq);
|
|
2545 ptr = chkp_force_gimple_call_op (ptr, &seq);
|
|
2546
|
|
2547 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
|
|
2548 chkp_mark_stmt (stmt);
|
|
2549 gimple_call_set_with_bounds (stmt, true);
|
|
2550
|
|
2551 gimple_seq_add_stmt (&seq, stmt);
|
|
2552
|
|
2553 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
|
|
2554
|
|
2555 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2556 {
|
|
2557 fprintf (dump_file, "Generated bndstx for pointer store ");
|
|
2558 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
|
|
2559 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
|
|
2560 }
|
|
2561 }
|
|
2562
|
|
2563 /* This function is called when call statement
|
|
2564 is inlined and therefore we can't use bndret
|
|
2565 for its LHS anymore. Function fixes bndret
|
|
2566 call using new RHS value if possible. */
|
|
2567 void
|
|
2568 chkp_fixup_inlined_call (tree lhs, tree rhs)
|
|
2569 {
|
|
2570 tree addr, bounds;
|
|
2571 gcall *retbnd, *bndldx;
|
|
2572
|
|
2573 if (!BOUNDED_P (lhs))
|
|
2574 return;
|
|
2575
|
|
2576 /* Search for retbnd call. */
|
|
2577 retbnd = chkp_retbnd_call_by_val (lhs);
|
|
2578 if (!retbnd)
|
|
2579 return;
|
|
2580
|
|
2581 /* Currently only handle cases when call is replaced
|
|
2582 with a memory access. In this case bndret call
|
|
2583 may be replaced with bndldx call. Otherwise we
|
|
2584 have to search for bounds which may cause wrong
|
|
2585 result due to various optimizations applied. */
|
|
2586 switch (TREE_CODE (rhs))
|
|
2587 {
|
|
2588 case VAR_DECL:
|
|
2589 if (DECL_REGISTER (rhs))
|
|
2590 return;
|
|
2591 break;
|
|
2592
|
|
2593 case MEM_REF:
|
|
2594 break;
|
|
2595
|
|
2596 case ARRAY_REF:
|
|
2597 case COMPONENT_REF:
|
|
2598 addr = get_base_address (rhs);
|
|
2599 if (!DECL_P (addr)
|
|
2600 && TREE_CODE (addr) != MEM_REF)
|
|
2601 return;
|
|
2602 if (DECL_P (addr) && DECL_REGISTER (addr))
|
|
2603 return;
|
|
2604 break;
|
|
2605
|
|
2606 default:
|
|
2607 return;
|
|
2608 }
|
|
2609
|
|
2610 /* Create a new statements sequence with bndldx call. */
|
|
2611 gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
|
|
2612 addr = build_fold_addr_expr (rhs);
|
|
2613 chkp_build_bndldx (addr, lhs, &gsi);
|
|
2614 bndldx = as_a <gcall *> (gsi_stmt (gsi));
|
|
2615
|
|
2616 /* Remove bndret call. */
|
|
2617 bounds = gimple_call_lhs (retbnd);
|
|
2618 gsi = gsi_for_stmt (retbnd);
|
|
2619 gsi_remove (&gsi, true);
|
|
2620
|
|
2621 /* Link new bndldx call. */
|
|
2622 gimple_call_set_lhs (bndldx, bounds);
|
|
2623 update_stmt (bndldx);
|
|
2624 }
|
|
2625
|
|
2626 /* Compute bounds for pointer NODE which was assigned in
|
|
2627 assignment statement ASSIGN. Return computed bounds. */
|
|
2628 static tree
|
|
2629 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
|
|
2630 {
|
|
2631 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
|
|
2632 tree rhs1 = gimple_assign_rhs1 (assign);
|
|
2633 tree bounds = NULL_TREE;
|
|
2634 gimple_stmt_iterator iter = gsi_for_stmt (assign);
|
|
2635 tree base = NULL;
|
|
2636
|
|
2637 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2638 {
|
|
2639 fprintf (dump_file, "Computing bounds for assignment: ");
|
|
2640 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
2641 }
|
|
2642
|
|
2643 switch (rhs_code)
|
|
2644 {
|
|
2645 case MEM_REF:
|
|
2646 case TARGET_MEM_REF:
|
|
2647 case COMPONENT_REF:
|
|
2648 case ARRAY_REF:
|
|
2649 /* We need to load bounds from the bounds table. */
|
|
2650 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
|
|
2651 break;
|
|
2652
|
|
2653 case VAR_DECL:
|
|
2654 case SSA_NAME:
|
|
2655 case ADDR_EXPR:
|
|
2656 case POINTER_PLUS_EXPR:
|
|
2657 case NOP_EXPR:
|
|
2658 case CONVERT_EXPR:
|
|
2659 case INTEGER_CST:
|
|
2660 /* Bounds are just propagated from RHS. */
|
|
2661 bounds = chkp_find_bounds (rhs1, &iter);
|
|
2662 base = rhs1;
|
|
2663 break;
|
|
2664
|
|
2665 case VIEW_CONVERT_EXPR:
|
|
2666 /* Bounds are just propagated from RHS. */
|
|
2667 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
|
|
2668 break;
|
|
2669
|
|
2670 case PARM_DECL:
|
|
2671 if (BOUNDED_P (rhs1))
|
|
2672 {
|
|
2673 /* We need to load bounds from the bounds table. */
|
|
2674 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
|
|
2675 node, &iter);
|
|
2676 TREE_ADDRESSABLE (rhs1) = 1;
|
|
2677 }
|
|
2678 else
|
|
2679 bounds = chkp_get_nonpointer_load_bounds ();
|
|
2680 break;
|
|
2681
|
|
2682 case MINUS_EXPR:
|
|
2683 case PLUS_EXPR:
|
|
2684 case BIT_AND_EXPR:
|
|
2685 case BIT_IOR_EXPR:
|
|
2686 case BIT_XOR_EXPR:
|
|
2687 {
|
|
2688 tree rhs2 = gimple_assign_rhs2 (assign);
|
|
2689 tree bnd1 = chkp_find_bounds (rhs1, &iter);
|
|
2690 tree bnd2 = chkp_find_bounds (rhs2, &iter);
|
|
2691
|
|
2692 /* First we try to check types of operands. If it
|
|
2693 does not help then look at bound values.
|
|
2694
|
|
2695 If some bounds are incomplete and other are
|
|
2696 not proven to be valid (i.e. also incomplete
|
|
2697 or invalid because value is not pointer) then
|
|
2698 resulting value is incomplete and will be
|
|
2699 recomputed later in chkp_finish_incomplete_bounds. */
|
|
2700 if (BOUNDED_P (rhs1)
|
|
2701 && !BOUNDED_P (rhs2))
|
|
2702 bounds = bnd1;
|
|
2703 else if (BOUNDED_P (rhs2)
|
|
2704 && !BOUNDED_P (rhs1)
|
|
2705 && rhs_code != MINUS_EXPR)
|
|
2706 bounds = bnd2;
|
|
2707 else if (chkp_incomplete_bounds (bnd1))
|
|
2708 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
|
|
2709 && !chkp_incomplete_bounds (bnd2))
|
|
2710 bounds = bnd2;
|
|
2711 else
|
|
2712 bounds = incomplete_bounds;
|
|
2713 else if (chkp_incomplete_bounds (bnd2))
|
|
2714 if (chkp_valid_bounds (bnd1)
|
|
2715 && !chkp_incomplete_bounds (bnd1))
|
|
2716 bounds = bnd1;
|
|
2717 else
|
|
2718 bounds = incomplete_bounds;
|
|
2719 else if (!chkp_valid_bounds (bnd1))
|
|
2720 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
|
|
2721 bounds = bnd2;
|
|
2722 else if (bnd2 == chkp_get_zero_bounds ())
|
|
2723 bounds = bnd2;
|
|
2724 else
|
|
2725 bounds = bnd1;
|
|
2726 else if (!chkp_valid_bounds (bnd2))
|
|
2727 bounds = bnd1;
|
|
2728 else
|
|
2729 /* Seems both operands may have valid bounds
|
|
2730 (e.g. pointer minus pointer). In such case
|
|
2731 use default invalid op bounds. */
|
|
2732 bounds = chkp_get_invalid_op_bounds ();
|
|
2733
|
|
2734 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
|
|
2735 }
|
|
2736 break;
|
|
2737
|
|
2738 case BIT_NOT_EXPR:
|
|
2739 case NEGATE_EXPR:
|
|
2740 case LSHIFT_EXPR:
|
|
2741 case RSHIFT_EXPR:
|
|
2742 case LROTATE_EXPR:
|
|
2743 case RROTATE_EXPR:
|
|
2744 case EQ_EXPR:
|
|
2745 case NE_EXPR:
|
|
2746 case LT_EXPR:
|
|
2747 case LE_EXPR:
|
|
2748 case GT_EXPR:
|
|
2749 case GE_EXPR:
|
|
2750 case MULT_EXPR:
|
|
2751 case RDIV_EXPR:
|
|
2752 case TRUNC_DIV_EXPR:
|
|
2753 case FLOOR_DIV_EXPR:
|
|
2754 case CEIL_DIV_EXPR:
|
|
2755 case ROUND_DIV_EXPR:
|
|
2756 case TRUNC_MOD_EXPR:
|
|
2757 case FLOOR_MOD_EXPR:
|
|
2758 case CEIL_MOD_EXPR:
|
|
2759 case ROUND_MOD_EXPR:
|
|
2760 case EXACT_DIV_EXPR:
|
|
2761 case FIX_TRUNC_EXPR:
|
|
2762 case FLOAT_EXPR:
|
|
2763 case REALPART_EXPR:
|
|
2764 case IMAGPART_EXPR:
|
|
2765 /* No valid bounds may be produced by these exprs. */
|
|
2766 bounds = chkp_get_invalid_op_bounds ();
|
|
2767 break;
|
|
2768
|
|
2769 case COND_EXPR:
|
|
2770 {
|
|
2771 tree val1 = gimple_assign_rhs2 (assign);
|
|
2772 tree val2 = gimple_assign_rhs3 (assign);
|
|
2773 tree bnd1 = chkp_find_bounds (val1, &iter);
|
|
2774 tree bnd2 = chkp_find_bounds (val2, &iter);
|
|
2775 gimple *stmt;
|
|
2776
|
|
2777 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
|
|
2778 bounds = incomplete_bounds;
|
|
2779 else if (bnd1 == bnd2)
|
|
2780 bounds = bnd1;
|
|
2781 else
|
|
2782 {
|
|
2783 rhs1 = unshare_expr (rhs1);
|
|
2784
|
|
2785 bounds = chkp_get_tmp_reg (assign);
|
|
2786 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
|
|
2787 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
|
|
2788
|
|
2789 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
|
|
2790 chkp_mark_invalid_bounds (bounds);
|
|
2791 }
|
|
2792 }
|
|
2793 break;
|
|
2794
|
|
2795 case MAX_EXPR:
|
|
2796 case MIN_EXPR:
|
|
2797 {
|
|
2798 tree rhs2 = gimple_assign_rhs2 (assign);
|
|
2799 tree bnd1 = chkp_find_bounds (rhs1, &iter);
|
|
2800 tree bnd2 = chkp_find_bounds (rhs2, &iter);
|
|
2801
|
|
2802 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
|
|
2803 bounds = incomplete_bounds;
|
|
2804 else if (bnd1 == bnd2)
|
|
2805 bounds = bnd1;
|
|
2806 else
|
|
2807 {
|
|
2808 gimple *stmt;
|
|
2809 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
|
|
2810 boolean_type_node, rhs1, rhs2);
|
|
2811 bounds = chkp_get_tmp_reg (assign);
|
|
2812 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
|
|
2813
|
|
2814 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
|
|
2815
|
|
2816 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
|
|
2817 chkp_mark_invalid_bounds (bounds);
|
|
2818 }
|
|
2819 }
|
|
2820 break;
|
|
2821
|
|
2822 default:
|
|
2823 bounds = chkp_get_zero_bounds ();
|
|
2824 warning (0, "pointer bounds were lost due to unexpected expression %s",
|
|
2825 get_tree_code_name (rhs_code));
|
|
2826 }
|
|
2827
|
|
2828 gcc_assert (bounds);
|
|
2829
|
|
2830 /* We may reuse bounds of other pointer we copy/modify. But it is not
|
|
2831 allowed for abnormal ssa names. If we produced a pointer using
|
|
2832 abnormal ssa name, we better make a bounds copy to avoid coalescing
|
|
2833 issues. */
|
|
2834 if (base
|
|
2835 && TREE_CODE (base) == SSA_NAME
|
|
2836 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
|
|
2837 {
|
|
2838 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
|
|
2839 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
|
|
2840 bounds = gimple_assign_lhs (stmt);
|
|
2841 }
|
|
2842
|
|
2843 if (node)
|
|
2844 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
|
|
2845
|
|
2846 return bounds;
|
|
2847 }
|
|
2848
|
|
2849 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
|
|
2850
|
|
2851 There are just few statement codes allowed: NOP (for default ssa names),
|
|
2852 ASSIGN, CALL, PHI, ASM.
|
|
2853
|
|
2854 Return computed bounds. */
|
|
2855 static tree
|
|
2856 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
|
|
2857 gphi_iterator *iter)
|
|
2858 {
|
|
2859 tree var, bounds;
|
|
2860 enum gimple_code code = gimple_code (def_stmt);
|
|
2861 gphi *stmt;
|
|
2862
|
|
2863 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2864 {
|
|
2865 fprintf (dump_file, "Searching for bounds for node: ");
|
|
2866 print_generic_expr (dump_file, node);
|
|
2867
|
|
2868 fprintf (dump_file, " using its definition: ");
|
|
2869 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS | TDF_MEMSYMS);
|
|
2870 }
|
|
2871
|
|
2872 switch (code)
|
|
2873 {
|
|
2874 case GIMPLE_NOP:
|
|
2875 var = SSA_NAME_VAR (node);
|
|
2876 switch (TREE_CODE (var))
|
|
2877 {
|
|
2878 case PARM_DECL:
|
|
2879 bounds = chkp_get_bound_for_parm (node);
|
|
2880 break;
|
|
2881
|
|
2882 case VAR_DECL:
|
|
2883 /* For uninitialized pointers use none bounds. */
|
|
2884 bounds = chkp_get_none_bounds ();
|
|
2885 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
|
|
2886 break;
|
|
2887
|
|
2888 case RESULT_DECL:
|
|
2889 {
|
|
2890 tree base_type;
|
|
2891
|
|
2892 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
|
|
2893
|
|
2894 base_type = TREE_TYPE (TREE_TYPE (node));
|
|
2895
|
|
2896 gcc_assert (TYPE_SIZE (base_type)
|
|
2897 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
|
|
2898 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
|
|
2899
|
|
2900 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
|
|
2901 NULL, false);
|
|
2902 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
|
|
2903 }
|
|
2904 break;
|
|
2905
|
|
2906 default:
|
|
2907 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
2908 {
|
|
2909 fprintf (dump_file, "Unexpected var with no definition\n");
|
|
2910 print_generic_expr (dump_file, var);
|
|
2911 }
|
|
2912 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
|
|
2913 get_tree_code_name (TREE_CODE (var)));
|
|
2914 }
|
|
2915 break;
|
|
2916
|
|
2917 case GIMPLE_ASSIGN:
|
|
2918 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
|
|
2919 break;
|
|
2920
|
|
2921 case GIMPLE_CALL:
|
|
2922 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
|
|
2923 break;
|
|
2924
|
|
2925 case GIMPLE_PHI:
|
|
2926 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
|
|
2927 if (SSA_NAME_VAR (node))
|
|
2928 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
|
|
2929 else
|
|
2930 var = make_temp_ssa_name (pointer_bounds_type_node,
|
|
2931 NULL,
|
|
2932 CHKP_BOUND_TMP_NAME);
|
|
2933 else
|
|
2934 var = chkp_get_tmp_var ();
|
|
2935 stmt = create_phi_node (var, gimple_bb (def_stmt));
|
|
2936 bounds = gimple_phi_result (stmt);
|
|
2937 *iter = gsi_for_phi (stmt);
|
|
2938
|
|
2939 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
|
|
2940
|
|
2941 /* Created bounds do not have all phi args computed and
|
|
2942 therefore we do not know if there is a valid source
|
|
2943 of bounds for that node. Therefore we mark bounds
|
|
2944 as incomplete and then recompute them when all phi
|
|
2945 args are computed. */
|
|
2946 chkp_register_incomplete_bounds (bounds, node);
|
|
2947 break;
|
|
2948
|
|
2949 case GIMPLE_ASM:
|
|
2950 bounds = chkp_get_zero_bounds ();
|
|
2951 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
|
|
2952 break;
|
|
2953
|
|
2954 default:
|
|
2955 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
|
|
2956 gimple_code_name[code]);
|
|
2957 }
|
|
2958
|
|
2959 return bounds;
|
|
2960 }
|
|
2961
|
|
2962 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
|
|
2963 tree
|
|
2964 chkp_build_make_bounds_call (tree lower_bound, tree size)
|
|
2965 {
|
|
2966 tree call = build1 (ADDR_EXPR,
|
|
2967 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
|
|
2968 chkp_bndmk_fndecl);
|
|
2969 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
|
|
2970 call, 2, lower_bound, size);
|
|
2971 }
|
|
2972
|
|
2973 /* Create static bounds var of specfified OBJ which is
|
|
2974 is either VAR_DECL or string constant. */
|
|
2975 static tree
|
|
2976 chkp_make_static_bounds (tree obj)
|
|
2977 {
|
|
2978 static int string_id = 1;
|
|
2979 static int var_id = 1;
|
|
2980 tree *slot;
|
|
2981 const char *var_name;
|
|
2982 char *bnd_var_name;
|
|
2983 tree bnd_var;
|
|
2984
|
|
2985 /* First check if we already have required var. */
|
|
2986 if (chkp_static_var_bounds)
|
|
2987 {
|
|
2988 /* For vars we use assembler name as a key in
|
|
2989 chkp_static_var_bounds map. It allows to
|
|
2990 avoid duplicating bound vars for decls
|
|
2991 sharing assembler name. */
|
|
2992 if (VAR_P (obj))
|
|
2993 {
|
|
2994 tree name = DECL_ASSEMBLER_NAME (obj);
|
|
2995 slot = chkp_static_var_bounds->get (name);
|
|
2996 if (slot)
|
|
2997 return *slot;
|
|
2998 }
|
|
2999 else
|
|
3000 {
|
|
3001 slot = chkp_static_var_bounds->get (obj);
|
|
3002 if (slot)
|
|
3003 return *slot;
|
|
3004 }
|
|
3005 }
|
|
3006
|
|
3007 /* Build decl for bounds var. */
|
|
3008 if (VAR_P (obj))
|
|
3009 {
|
|
3010 if (DECL_IGNORED_P (obj))
|
|
3011 {
|
|
3012 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
|
|
3013 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
|
|
3014 }
|
|
3015 else
|
|
3016 {
|
|
3017 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
|
|
3018
|
|
3019 /* For hidden symbols we want to skip first '*' char. */
|
|
3020 if (*var_name == '*')
|
|
3021 var_name++;
|
|
3022
|
|
3023 bnd_var_name = (char *) xmalloc (strlen (var_name)
|
|
3024 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
|
|
3025 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
|
|
3026 strcat (bnd_var_name, var_name);
|
|
3027 }
|
|
3028
|
|
3029 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
|
|
3030 get_identifier (bnd_var_name),
|
|
3031 pointer_bounds_type_node);
|
|
3032
|
|
3033 /* Address of the obj will be used as lower bound. */
|
|
3034 TREE_ADDRESSABLE (obj) = 1;
|
|
3035 }
|
|
3036 else
|
|
3037 {
|
|
3038 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
|
|
3039 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
|
|
3040
|
|
3041 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
|
|
3042 get_identifier (bnd_var_name),
|
|
3043 pointer_bounds_type_node);
|
|
3044 }
|
|
3045
|
|
3046 free (bnd_var_name);
|
|
3047
|
|
3048 TREE_PUBLIC (bnd_var) = 0;
|
|
3049 TREE_USED (bnd_var) = 1;
|
|
3050 TREE_READONLY (bnd_var) = 0;
|
|
3051 TREE_STATIC (bnd_var) = 1;
|
|
3052 TREE_ADDRESSABLE (bnd_var) = 0;
|
|
3053 DECL_ARTIFICIAL (bnd_var) = 1;
|
|
3054 DECL_COMMON (bnd_var) = 1;
|
|
3055 DECL_COMDAT (bnd_var) = 1;
|
|
3056 DECL_READ_P (bnd_var) = 1;
|
|
3057 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
|
|
3058 /* Force output similar to constant bounds.
|
|
3059 See chkp_make_static_const_bounds. */
|
|
3060 varpool_node::get_create (bnd_var)->force_output = 1;
|
|
3061 /* Mark symbol as requiring bounds initialization. */
|
|
3062 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
|
|
3063 varpool_node::finalize_decl (bnd_var);
|
|
3064
|
|
3065 /* Add created var to the map to use it for other references
|
|
3066 to obj. */
|
|
3067 if (!chkp_static_var_bounds)
|
|
3068 chkp_static_var_bounds = new hash_map<tree, tree>;
|
|
3069
|
|
3070 if (VAR_P (obj))
|
|
3071 {
|
|
3072 tree name = DECL_ASSEMBLER_NAME (obj);
|
|
3073 chkp_static_var_bounds->put (name, bnd_var);
|
|
3074 }
|
|
3075 else
|
|
3076 chkp_static_var_bounds->put (obj, bnd_var);
|
|
3077
|
|
3078 return bnd_var;
|
|
3079 }
|
|
3080
|
|
3081 /* When var has incomplete type we cannot get size to
|
|
3082 compute its bounds. In such cases we use checker
|
|
3083 builtin call which determines object size at runtime. */
|
|
3084 static tree
|
|
3085 chkp_generate_extern_var_bounds (tree var)
|
|
3086 {
|
|
3087 tree bounds, size_reloc, lb, size, max_size, cond;
|
|
3088 gimple_stmt_iterator gsi;
|
|
3089 gimple_seq seq = NULL;
|
|
3090 gimple *stmt;
|
|
3091
|
|
3092 /* If instrumentation is not enabled for vars having
|
|
3093 incomplete type then just return zero bounds to avoid
|
|
3094 checks for this var. */
|
|
3095 if (!flag_chkp_incomplete_type)
|
|
3096 return chkp_get_zero_bounds ();
|
|
3097
|
|
3098 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3099 {
|
|
3100 fprintf (dump_file, "Generating bounds for extern symbol '");
|
|
3101 print_generic_expr (dump_file, var);
|
|
3102 fprintf (dump_file, "'\n");
|
|
3103 }
|
|
3104
|
|
3105 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
|
|
3106
|
|
3107 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
|
|
3108 gimple_call_set_lhs (stmt, size_reloc);
|
|
3109
|
|
3110 gimple_seq_add_stmt (&seq, stmt);
|
|
3111
|
|
3112 lb = chkp_build_addr_expr (var);
|
|
3113 size = make_ssa_name (chkp_get_size_tmp_var ());
|
|
3114
|
|
3115 if (flag_chkp_zero_dynamic_size_as_infinite)
|
|
3116 {
|
|
3117 /* We should check that size relocation was resolved.
|
|
3118 If it was not then use maximum possible size for the var. */
|
|
3119 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
|
|
3120 fold_convert (chkp_uintptr_type, lb));
|
|
3121 max_size = chkp_force_gimple_call_op (max_size, &seq);
|
|
3122
|
|
3123 cond = build2 (NE_EXPR, boolean_type_node,
|
|
3124 size_reloc, integer_zero_node);
|
|
3125 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
|
|
3126 gimple_seq_add_stmt (&seq, stmt);
|
|
3127 }
|
|
3128 else
|
|
3129 {
|
|
3130 stmt = gimple_build_assign (size, size_reloc);
|
|
3131 gimple_seq_add_stmt (&seq, stmt);
|
|
3132 }
|
|
3133
|
|
3134 gsi = gsi_start_bb (chkp_get_entry_block ());
|
|
3135 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
|
|
3136
|
|
3137 bounds = chkp_make_bounds (lb, size, &gsi, true);
|
|
3138
|
|
3139 return bounds;
|
|
3140 }
|
|
3141
|
|
3142 /* Return 1 if TYPE has fields with zero size or fields
|
|
3143 marked with chkp_variable_size attribute. */
|
|
3144 bool
|
|
3145 chkp_variable_size_type (tree type)
|
|
3146 {
|
|
3147 bool res = false;
|
|
3148 tree field;
|
|
3149
|
|
3150 if (RECORD_OR_UNION_TYPE_P (type))
|
|
3151 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
|
|
3152 {
|
|
3153 if (TREE_CODE (field) == FIELD_DECL)
|
|
3154 res = res
|
|
3155 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
|
|
3156 || chkp_variable_size_type (TREE_TYPE (field));
|
|
3157 }
|
|
3158 else
|
|
3159 res = !TYPE_SIZE (type)
|
|
3160 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
|
|
3161 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
|
|
3162
|
|
3163 return res;
|
|
3164 }
|
|
3165
|
|
3166 /* Compute and return bounds for address of DECL which is
|
|
3167 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
|
|
3168 static tree
|
|
3169 chkp_get_bounds_for_decl_addr (tree decl)
|
|
3170 {
|
|
3171 tree bounds;
|
|
3172
|
|
3173 gcc_assert (VAR_P (decl)
|
|
3174 || TREE_CODE (decl) == PARM_DECL
|
|
3175 || TREE_CODE (decl) == RESULT_DECL);
|
|
3176
|
|
3177 bounds = chkp_get_registered_addr_bounds (decl);
|
|
3178
|
|
3179 if (bounds)
|
|
3180 return bounds;
|
|
3181
|
|
3182 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3183 {
|
|
3184 fprintf (dump_file, "Building bounds for address of decl ");
|
|
3185 print_generic_expr (dump_file, decl);
|
|
3186 fprintf (dump_file, "\n");
|
|
3187 }
|
|
3188
|
|
3189 /* Use zero bounds if size is unknown and checks for
|
|
3190 unknown sizes are restricted. */
|
|
3191 if ((!DECL_SIZE (decl)
|
|
3192 || (chkp_variable_size_type (TREE_TYPE (decl))
|
|
3193 && (TREE_STATIC (decl)
|
|
3194 || DECL_EXTERNAL (decl)
|
|
3195 || TREE_PUBLIC (decl))))
|
|
3196 && !flag_chkp_incomplete_type)
|
|
3197 return chkp_get_zero_bounds ();
|
|
3198
|
|
3199 if (VOID_TYPE_P (TREE_TYPE (decl)))
|
|
3200 return chkp_get_zero_bounds ();
|
|
3201
|
|
3202 if (flag_chkp_use_static_bounds
|
|
3203 && VAR_P (decl)
|
|
3204 && (TREE_STATIC (decl)
|
|
3205 || DECL_EXTERNAL (decl)
|
|
3206 || TREE_PUBLIC (decl))
|
|
3207 && !DECL_THREAD_LOCAL_P (decl))
|
|
3208 {
|
|
3209 tree bnd_var = chkp_make_static_bounds (decl);
|
|
3210 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
|
|
3211 gimple *stmt;
|
|
3212
|
|
3213 bounds = chkp_get_tmp_reg (NULL);
|
|
3214 stmt = gimple_build_assign (bounds, bnd_var);
|
|
3215 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
|
|
3216 }
|
|
3217 else if (!DECL_SIZE (decl)
|
|
3218 || (chkp_variable_size_type (TREE_TYPE (decl))
|
|
3219 && (TREE_STATIC (decl)
|
|
3220 || DECL_EXTERNAL (decl)
|
|
3221 || TREE_PUBLIC (decl))))
|
|
3222 {
|
|
3223 gcc_assert (VAR_P (decl));
|
|
3224 bounds = chkp_generate_extern_var_bounds (decl);
|
|
3225 }
|
|
3226 else
|
|
3227 {
|
|
3228 tree lb = chkp_build_addr_expr (decl);
|
|
3229 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
|
|
3230 }
|
|
3231
|
|
3232 return bounds;
|
|
3233 }
|
|
3234
|
|
3235 /* Compute and return bounds for constant string. */
|
|
3236 static tree
|
|
3237 chkp_get_bounds_for_string_cst (tree cst)
|
|
3238 {
|
|
3239 tree bounds;
|
|
3240 tree lb;
|
|
3241 tree size;
|
|
3242
|
|
3243 gcc_assert (TREE_CODE (cst) == STRING_CST);
|
|
3244
|
|
3245 bounds = chkp_get_registered_bounds (cst);
|
|
3246
|
|
3247 if (bounds)
|
|
3248 return bounds;
|
|
3249
|
|
3250 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
|
|
3251 || flag_chkp_use_static_const_bounds > 0)
|
|
3252 {
|
|
3253 tree bnd_var = chkp_make_static_bounds (cst);
|
|
3254 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
|
|
3255 gimple *stmt;
|
|
3256
|
|
3257 bounds = chkp_get_tmp_reg (NULL);
|
|
3258 stmt = gimple_build_assign (bounds, bnd_var);
|
|
3259 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
|
|
3260 }
|
|
3261 else
|
|
3262 {
|
|
3263 lb = chkp_build_addr_expr (cst);
|
|
3264 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
|
|
3265 bounds = chkp_make_bounds (lb, size, NULL, false);
|
|
3266 }
|
|
3267
|
|
3268 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
|
|
3269
|
|
3270 return bounds;
|
|
3271 }
|
|
3272
|
|
3273 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
|
|
3274 return the result. if ITER is not NULL then Code is inserted
|
|
3275 before position pointed by ITER. Otherwise code is added to
|
|
3276 entry block. */
|
|
3277 static tree
|
|
3278 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
|
|
3279 {
|
|
3280 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
|
|
3281 return bounds2 ? bounds2 : bounds1;
|
|
3282 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
|
|
3283 return bounds1;
|
|
3284 else
|
|
3285 {
|
|
3286 gimple_seq seq;
|
|
3287 gimple *stmt;
|
|
3288 tree bounds;
|
|
3289
|
|
3290 seq = NULL;
|
|
3291
|
|
3292 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
|
|
3293 chkp_mark_stmt (stmt);
|
|
3294
|
|
3295 bounds = chkp_get_tmp_reg (stmt);
|
|
3296 gimple_call_set_lhs (stmt, bounds);
|
|
3297
|
|
3298 gimple_seq_add_stmt (&seq, stmt);
|
|
3299
|
|
3300 /* We are probably doing narrowing for constant expression.
|
|
3301 In such case iter may be undefined. */
|
|
3302 if (!iter)
|
|
3303 {
|
|
3304 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
|
|
3305 iter = &gsi;
|
|
3306 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
|
|
3307 }
|
|
3308 else
|
|
3309 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
|
|
3310
|
|
3311 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3312 {
|
|
3313 fprintf (dump_file, "Bounds intersection: ");
|
|
3314 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
|
|
3315 fprintf (dump_file, " inserted before statement: ");
|
|
3316 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
|
|
3317 TDF_VOPS|TDF_MEMSYMS);
|
|
3318 }
|
|
3319
|
|
3320 return bounds;
|
|
3321 }
|
|
3322 }
|
|
3323
|
|
3324 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
|
|
3325 and 0 othersize. REF is reference to the field. */
|
|
3326
|
|
3327 static bool
|
|
3328 chkp_may_narrow_to_field (tree ref, tree field)
|
|
3329 {
|
|
3330 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
|
|
3331 && tree_to_uhwi (DECL_SIZE (field)) != 0
|
|
3332 && !(flag_chkp_flexible_struct_trailing_arrays
|
|
3333 && array_at_struct_end_p (ref))
|
|
3334 && (!DECL_FIELD_OFFSET (field)
|
|
3335 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
|
|
3336 && (!DECL_FIELD_BIT_OFFSET (field)
|
|
3337 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
|
|
3338 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
|
|
3339 && !chkp_variable_size_type (TREE_TYPE (field));
|
|
3340 }
|
|
3341
|
|
3342 /* Return 1 if bounds for FIELD should be narrowed to
|
|
3343 field's own size. REF is reference to the field. */
|
|
3344
|
|
3345 static bool
|
|
3346 chkp_narrow_bounds_for_field (tree ref, tree field)
|
|
3347 {
|
|
3348 HOST_WIDE_INT offs;
|
|
3349 HOST_WIDE_INT bit_offs;
|
|
3350
|
|
3351 if (!chkp_may_narrow_to_field (ref, field))
|
|
3352 return false;
|
|
3353
|
|
3354 /* Access to compiler generated fields should not cause
|
|
3355 bounds narrowing. */
|
|
3356 if (DECL_ARTIFICIAL (field))
|
|
3357 return false;
|
|
3358
|
|
3359 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
|
|
3360 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
|
|
3361
|
|
3362 return (flag_chkp_narrow_bounds
|
|
3363 && (flag_chkp_first_field_has_own_bounds
|
|
3364 || offs
|
|
3365 || bit_offs));
|
|
3366 }
|
|
3367
|
|
3368 /* Perform narrowing for BOUNDS of an INNER reference. Shift boundary
|
|
3369 by OFFSET bytes and limit to SIZE bytes. Newly created statements are
|
|
3370 added to ITER. */
|
|
3371
|
|
3372 static tree
|
|
3373 chkp_narrow_size_and_offset (tree bounds, tree inner, tree offset,
|
|
3374 tree size, gimple_stmt_iterator *iter)
|
|
3375 {
|
|
3376 tree addr = chkp_build_addr_expr (unshare_expr (inner));
|
|
3377 tree t = TREE_TYPE (addr);
|
|
3378
|
|
3379 gimple *stmt = gimple_build_assign (NULL_TREE, addr);
|
|
3380 addr = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
|
|
3381 gimple_assign_set_lhs (stmt, addr);
|
|
3382 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
|
|
3383
|
|
3384 stmt = gimple_build_assign (NULL_TREE, POINTER_PLUS_EXPR, addr, offset);
|
|
3385 tree shifted = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
|
|
3386 gimple_assign_set_lhs (stmt, shifted);
|
|
3387 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
|
|
3388
|
|
3389 tree bounds2 = chkp_make_bounds (shifted, size, iter, false);
|
|
3390
|
|
3391 return chkp_intersect_bounds (bounds, bounds2, iter);
|
|
3392 }
|
|
3393
|
|
3394 /* Perform narrowing for BOUNDS using bounds computed for field
|
|
3395 access COMPONENT. ITER meaning is the same as for
|
|
3396 chkp_intersect_bounds. */
|
|
3397
|
|
3398 static tree
|
|
3399 chkp_narrow_bounds_to_field (tree bounds, tree component,
|
|
3400 gimple_stmt_iterator *iter)
|
|
3401 {
|
|
3402 tree field = TREE_OPERAND (component, 1);
|
|
3403 tree size = DECL_SIZE_UNIT (field);
|
|
3404 tree field_ptr = chkp_build_addr_expr (component);
|
|
3405 tree field_bounds;
|
|
3406
|
|
3407 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
|
|
3408
|
|
3409 return chkp_intersect_bounds (field_bounds, bounds, iter);
|
|
3410 }
|
|
3411
|
|
3412 /* Parse field or array access NODE.
|
|
3413
|
|
3414 PTR ouput parameter holds a pointer to the outermost
|
|
3415 object.
|
|
3416
|
|
3417 BITFIELD output parameter is set to 1 if bitfield is
|
|
3418 accessed and to 0 otherwise. If it is 1 then ELT holds
|
|
3419 outer component for accessed bit field.
|
|
3420
|
|
3421 SAFE outer parameter is set to 1 if access is safe and
|
|
3422 checks are not required.
|
|
3423
|
|
3424 BOUNDS outer parameter holds bounds to be used to check
|
|
3425 access (may be NULL).
|
|
3426
|
|
3427 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
|
|
3428 innermost accessed component. */
|
|
3429 static void
|
|
3430 chkp_parse_array_and_component_ref (tree node, tree *ptr,
|
|
3431 tree *elt, bool *safe,
|
|
3432 bool *bitfield,
|
|
3433 tree *bounds,
|
|
3434 gimple_stmt_iterator *iter,
|
|
3435 bool innermost_bounds)
|
|
3436 {
|
|
3437 tree comp_to_narrow = NULL_TREE;
|
|
3438 tree last_comp = NULL_TREE;
|
|
3439 bool array_ref_found = false;
|
|
3440 tree *nodes;
|
|
3441 tree var;
|
|
3442 int len;
|
|
3443 int i;
|
|
3444
|
|
3445 /* Compute tree height for expression. */
|
|
3446 var = node;
|
|
3447 len = 1;
|
|
3448 while (TREE_CODE (var) == COMPONENT_REF
|
|
3449 || TREE_CODE (var) == ARRAY_REF
|
|
3450 || TREE_CODE (var) == VIEW_CONVERT_EXPR
|
|
3451 || TREE_CODE (var) == BIT_FIELD_REF)
|
|
3452 {
|
|
3453 var = TREE_OPERAND (var, 0);
|
|
3454 len++;
|
|
3455 }
|
|
3456
|
|
3457 gcc_assert (len > 1);
|
|
3458
|
|
3459 /* It is more convenient for us to scan left-to-right,
|
|
3460 so walk tree again and put all node to nodes vector
|
|
3461 in reversed order. */
|
|
3462 nodes = XALLOCAVEC (tree, len);
|
|
3463 nodes[len - 1] = node;
|
|
3464 for (i = len - 2; i >= 0; i--)
|
|
3465 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
|
|
3466
|
|
3467 if (bounds)
|
|
3468 *bounds = NULL;
|
|
3469 *safe = true;
|
|
3470 *bitfield = ((TREE_CODE (node) == COMPONENT_REF
|
|
3471 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)))
|
|
3472 || TREE_CODE (node) == BIT_FIELD_REF);
|
|
3473 /* To get bitfield address we will need outer element. */
|
|
3474 if (*bitfield)
|
|
3475 *elt = nodes[len - 2];
|
|
3476 else
|
|
3477 *elt = NULL_TREE;
|
|
3478
|
|
3479 /* If we have indirection in expression then compute
|
|
3480 outermost structure bounds. Computed bounds may be
|
|
3481 narrowed later. */
|
|
3482 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
|
|
3483 {
|
|
3484 *safe = false;
|
|
3485 *ptr = TREE_OPERAND (nodes[0], 0);
|
|
3486 if (bounds)
|
|
3487 *bounds = chkp_find_bounds (*ptr, iter);
|
|
3488 }
|
|
3489 else
|
|
3490 {
|
|
3491 gcc_assert (VAR_P (var)
|
|
3492 || TREE_CODE (var) == PARM_DECL
|
|
3493 || TREE_CODE (var) == RESULT_DECL
|
|
3494 || TREE_CODE (var) == STRING_CST
|
|
3495 || TREE_CODE (var) == SSA_NAME);
|
|
3496
|
|
3497 *ptr = chkp_build_addr_expr (var);
|
|
3498
|
|
3499 /* For hard register cases chkp_build_addr_expr returns INTEGER_CST
|
|
3500 and later on chkp_find_bounds will fail to find proper bounds.
|
|
3501 In order to avoid that, we find/create bounds right aways using
|
|
3502 the var itself. */
|
|
3503 if (VAR_P (var) && DECL_HARD_REGISTER (var))
|
|
3504 *bounds = chkp_make_addressed_object_bounds (var, iter);
|
|
3505 }
|
|
3506
|
|
3507 /* In this loop we are trying to find a field access
|
|
3508 requiring narrowing. There are two simple rules
|
|
3509 for search:
|
|
3510 1. Leftmost array_ref is chosen if any.
|
|
3511 2. Rightmost suitable component_ref is chosen if innermost
|
|
3512 bounds are required and no array_ref exists. */
|
|
3513 for (i = 1; i < len; i++)
|
|
3514 {
|
|
3515 var = nodes[i];
|
|
3516
|
|
3517 if (TREE_CODE (var) == ARRAY_REF)
|
|
3518 {
|
|
3519 *safe = false;
|
|
3520 array_ref_found = true;
|
|
3521 if (flag_chkp_narrow_bounds
|
|
3522 && !flag_chkp_narrow_to_innermost_arrray
|
|
3523 && (!last_comp
|
|
3524 || chkp_may_narrow_to_field (var,
|
|
3525 TREE_OPERAND (last_comp, 1))))
|
|
3526 {
|
|
3527 comp_to_narrow = last_comp;
|
|
3528 break;
|
|
3529 }
|
|
3530 }
|
|
3531 else if (TREE_CODE (var) == COMPONENT_REF)
|
|
3532 {
|
|
3533 tree field = TREE_OPERAND (var, 1);
|
|
3534
|
|
3535 if (innermost_bounds
|
|
3536 && !array_ref_found
|
|
3537 && chkp_narrow_bounds_for_field (var, field))
|
|
3538 comp_to_narrow = var;
|
|
3539 last_comp = var;
|
|
3540
|
|
3541 if (flag_chkp_narrow_bounds
|
|
3542 && flag_chkp_narrow_to_innermost_arrray
|
|
3543 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
|
|
3544 {
|
|
3545 if (bounds)
|
|
3546 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
|
|
3547 comp_to_narrow = NULL;
|
|
3548 }
|
|
3549 }
|
|
3550 else if (TREE_CODE (var) == BIT_FIELD_REF)
|
|
3551 {
|
|
3552 if (flag_chkp_narrow_bounds && bounds)
|
|
3553 {
|
|
3554 tree offset, size;
|
|
3555 chkp_parse_bit_field_ref (var, UNKNOWN_LOCATION, &offset, &size);
|
|
3556 *bounds
|
|
3557 = chkp_narrow_size_and_offset (*bounds, TREE_OPERAND (var, 0),
|
|
3558 offset, size, iter);
|
|
3559 }
|
|
3560 }
|
|
3561 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
|
|
3562 /* Nothing to do for it. */
|
|
3563 ;
|
|
3564 else
|
|
3565 gcc_unreachable ();
|
|
3566 }
|
|
3567
|
|
3568 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
|
|
3569 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
|
|
3570
|
|
3571 if (innermost_bounds && bounds && !*bounds)
|
|
3572 *bounds = chkp_find_bounds (*ptr, iter);
|
|
3573 }
|
|
3574
|
|
3575 /* Parse BIT_FIELD_REF to a NODE for a given location LOC. Return OFFSET
|
|
3576 and SIZE in bytes. */
|
|
3577
|
|
3578 static
|
|
3579 void chkp_parse_bit_field_ref (tree node, location_t loc, tree *offset,
|
|
3580 tree *size)
|
|
3581 {
|
|
3582 tree bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
|
|
3583 tree offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
|
|
3584 tree rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
|
|
3585 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
|
|
3586
|
|
3587 tree s = fold_convert (size_type_node, TREE_OPERAND (node, 1));
|
|
3588 s = size_binop_loc (loc, PLUS_EXPR, s, rem);
|
|
3589 s = size_binop_loc (loc, CEIL_DIV_EXPR, s, bpu);
|
|
3590 s = fold_convert (size_type_node, s);
|
|
3591
|
|
3592 *offset = offs;
|
|
3593 *size = s;
|
|
3594 }
|
|
3595
|
|
3596 /* Compute and return bounds for address of OBJ. */
|
|
3597 static tree
|
|
3598 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
|
|
3599 {
|
|
3600 tree bounds = chkp_get_registered_addr_bounds (obj);
|
|
3601
|
|
3602 if (bounds)
|
|
3603 return bounds;
|
|
3604
|
|
3605 switch (TREE_CODE (obj))
|
|
3606 {
|
|
3607 case VAR_DECL:
|
|
3608 case PARM_DECL:
|
|
3609 case RESULT_DECL:
|
|
3610 bounds = chkp_get_bounds_for_decl_addr (obj);
|
|
3611 break;
|
|
3612
|
|
3613 case STRING_CST:
|
|
3614 bounds = chkp_get_bounds_for_string_cst (obj);
|
|
3615 break;
|
|
3616
|
|
3617 case ARRAY_REF:
|
|
3618 case COMPONENT_REF:
|
|
3619 case BIT_FIELD_REF:
|
|
3620 {
|
|
3621 tree elt;
|
|
3622 tree ptr;
|
|
3623 bool safe;
|
|
3624 bool bitfield;
|
|
3625
|
|
3626 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
|
|
3627 &bitfield, &bounds, iter, true);
|
|
3628
|
|
3629 gcc_assert (bounds);
|
|
3630 }
|
|
3631 break;
|
|
3632
|
|
3633 case FUNCTION_DECL:
|
|
3634 case LABEL_DECL:
|
|
3635 bounds = chkp_get_zero_bounds ();
|
|
3636 break;
|
|
3637
|
|
3638 case MEM_REF:
|
|
3639 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
|
|
3640 break;
|
|
3641
|
|
3642 case REALPART_EXPR:
|
|
3643 case IMAGPART_EXPR:
|
|
3644 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
|
|
3645 break;
|
|
3646
|
|
3647 default:
|
|
3648 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3649 {
|
|
3650 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
|
|
3651 "unexpected object of type %s\n",
|
|
3652 get_tree_code_name (TREE_CODE (obj)));
|
|
3653 print_node (dump_file, "", obj, 0);
|
|
3654 }
|
|
3655 internal_error ("chkp_make_addressed_object_bounds: "
|
|
3656 "Unexpected tree code %s",
|
|
3657 get_tree_code_name (TREE_CODE (obj)));
|
|
3658 }
|
|
3659
|
|
3660 chkp_register_addr_bounds (obj, bounds);
|
|
3661
|
|
3662 return bounds;
|
|
3663 }
|
|
3664
|
|
3665 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
|
|
3666 to compute bounds if required. Computed bounds should be available at
|
|
3667 position pointed by ITER.
|
|
3668
|
|
3669 If PTR_SRC is NULL_TREE then pointer definition is identified.
|
|
3670
|
|
3671 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
|
|
3672 PTR. If PTR is a any memory reference then ITER points to a statement
|
|
3673 after which bndldx will be inserterd. In both cases ITER will be updated
|
|
3674 to point to the inserted bndldx statement. */
|
|
3675
|
|
3676 static tree
|
|
3677 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
|
|
3678 {
|
|
3679 tree addr = NULL_TREE;
|
|
3680 tree bounds = NULL_TREE;
|
|
3681
|
|
3682 if (!ptr_src)
|
|
3683 ptr_src = ptr;
|
|
3684
|
|
3685 bounds = chkp_get_registered_bounds (ptr_src);
|
|
3686
|
|
3687 if (bounds)
|
|
3688 return bounds;
|
|
3689
|
|
3690 switch (TREE_CODE (ptr_src))
|
|
3691 {
|
|
3692 case MEM_REF:
|
|
3693 case VAR_DECL:
|
|
3694 if (BOUNDED_P (ptr_src))
|
|
3695 if (VAR_P (ptr) && DECL_REGISTER (ptr))
|
|
3696 bounds = chkp_get_zero_bounds ();
|
|
3697 else
|
|
3698 {
|
|
3699 addr = chkp_build_addr_expr (ptr_src);
|
|
3700 bounds = chkp_build_bndldx (addr, ptr, iter);
|
|
3701 }
|
|
3702 else
|
|
3703 bounds = chkp_get_nonpointer_load_bounds ();
|
|
3704 break;
|
|
3705
|
|
3706 case ARRAY_REF:
|
|
3707 case COMPONENT_REF:
|
|
3708 addr = get_base_address (ptr_src);
|
|
3709 if (VAR_P (addr) && DECL_HARD_REGISTER (addr))
|
|
3710 {
|
|
3711 bounds = chkp_get_zero_bounds ();
|
|
3712 break;
|
|
3713 }
|
|
3714 if (DECL_P (addr)
|
|
3715 || TREE_CODE (addr) == MEM_REF
|
|
3716 || TREE_CODE (addr) == TARGET_MEM_REF)
|
|
3717 {
|
|
3718 if (BOUNDED_P (ptr_src))
|
|
3719 if (VAR_P (ptr) && DECL_REGISTER (ptr))
|
|
3720 bounds = chkp_get_zero_bounds ();
|
|
3721 else
|
|
3722 {
|
|
3723 addr = chkp_build_addr_expr (ptr_src);
|
|
3724 bounds = chkp_build_bndldx (addr, ptr, iter);
|
|
3725 }
|
|
3726 else
|
|
3727 bounds = chkp_get_nonpointer_load_bounds ();
|
|
3728 }
|
|
3729 else
|
|
3730 {
|
|
3731 gcc_assert (TREE_CODE (addr) == SSA_NAME);
|
|
3732 bounds = chkp_find_bounds (addr, iter);
|
|
3733 }
|
|
3734 break;
|
|
3735
|
|
3736 case PARM_DECL:
|
|
3737 /* Handled above but failed. */
|
|
3738 bounds = chkp_get_invalid_op_bounds ();
|
|
3739 break;
|
|
3740
|
|
3741 case TARGET_MEM_REF:
|
|
3742 addr = chkp_build_addr_expr (ptr_src);
|
|
3743 bounds = chkp_build_bndldx (addr, ptr, iter);
|
|
3744 break;
|
|
3745
|
|
3746 case SSA_NAME:
|
|
3747 bounds = chkp_get_registered_bounds (ptr_src);
|
|
3748 if (!bounds)
|
|
3749 {
|
|
3750 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
|
|
3751 gphi_iterator phi_iter;
|
|
3752
|
|
3753 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
|
|
3754
|
|
3755 gcc_assert (bounds);
|
|
3756
|
|
3757 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
|
|
3758 {
|
|
3759 unsigned i;
|
|
3760
|
|
3761 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
|
|
3762 {
|
|
3763 tree arg = gimple_phi_arg_def (def_phi, i);
|
|
3764 tree arg_bnd;
|
|
3765 gphi *phi_bnd;
|
|
3766
|
|
3767 arg_bnd = chkp_find_bounds (arg, NULL);
|
|
3768
|
|
3769 /* chkp_get_bounds_by_definition created new phi
|
|
3770 statement and phi_iter points to it.
|
|
3771
|
|
3772 Previous call to chkp_find_bounds could create
|
|
3773 new basic block and therefore change phi statement
|
|
3774 phi_iter points to. */
|
|
3775 phi_bnd = phi_iter.phi ();
|
|
3776
|
|
3777 add_phi_arg (phi_bnd, arg_bnd,
|
|
3778 gimple_phi_arg_edge (def_phi, i),
|
|
3779 UNKNOWN_LOCATION);
|
|
3780 }
|
|
3781
|
|
3782 /* If all bound phi nodes have their arg computed
|
|
3783 then we may finish its computation. See
|
|
3784 chkp_finish_incomplete_bounds for more details. */
|
|
3785 if (chkp_may_finish_incomplete_bounds ())
|
|
3786 chkp_finish_incomplete_bounds ();
|
|
3787 }
|
|
3788
|
|
3789 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
|
|
3790 || chkp_incomplete_bounds (bounds));
|
|
3791 }
|
|
3792 break;
|
|
3793
|
|
3794 case ADDR_EXPR:
|
|
3795 case WITH_SIZE_EXPR:
|
|
3796 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
|
|
3797 break;
|
|
3798
|
|
3799 case INTEGER_CST:
|
|
3800 case COMPLEX_CST:
|
|
3801 case VECTOR_CST:
|
|
3802 if (integer_zerop (ptr_src))
|
|
3803 bounds = chkp_get_none_bounds ();
|
|
3804 else
|
|
3805 bounds = chkp_get_invalid_op_bounds ();
|
|
3806 break;
|
|
3807
|
|
3808 default:
|
|
3809 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3810 {
|
|
3811 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
|
|
3812 get_tree_code_name (TREE_CODE (ptr_src)));
|
|
3813 print_node (dump_file, "", ptr_src, 0);
|
|
3814 }
|
|
3815 internal_error ("chkp_find_bounds: Unexpected tree code %s",
|
|
3816 get_tree_code_name (TREE_CODE (ptr_src)));
|
|
3817 }
|
|
3818
|
|
3819 if (!bounds)
|
|
3820 {
|
|
3821 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3822 {
|
|
3823 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
|
|
3824 print_node (dump_file, "", ptr_src, 0);
|
|
3825 }
|
|
3826 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
|
|
3827 }
|
|
3828
|
|
3829 return bounds;
|
|
3830 }
|
|
3831
|
|
3832 /* Normal case for bounds search without forced narrowing. */
|
|
3833 static tree
|
|
3834 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
|
|
3835 {
|
|
3836 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
|
|
3837 }
|
|
3838
|
|
3839 /* Search bounds for pointer PTR loaded from PTR_SRC
|
|
3840 by statement *ITER points to. */
|
|
3841 static tree
|
|
3842 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
|
|
3843 {
|
|
3844 return chkp_find_bounds_1 (ptr, ptr_src, iter);
|
|
3845 }
|
|
3846
|
|
3847 /* Helper function which checks type of RHS and finds all pointers in
|
|
3848 it. For each found pointer we build it's accesses in LHS and RHS
|
|
3849 objects and then call HANDLER for them. Function is used to copy
|
|
3850 or initilize bounds for copied object. */
|
|
3851 static void
|
|
3852 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
|
|
3853 assign_handler handler)
|
|
3854 {
|
|
3855 tree type = TREE_TYPE (lhs);
|
|
3856
|
|
3857 /* We have nothing to do with clobbers. */
|
|
3858 if (TREE_CLOBBER_P (rhs))
|
|
3859 return;
|
|
3860
|
|
3861 if (BOUNDED_TYPE_P (type))
|
|
3862 handler (lhs, rhs, arg);
|
|
3863 else if (RECORD_OR_UNION_TYPE_P (type))
|
|
3864 {
|
|
3865 tree field;
|
|
3866
|
|
3867 if (TREE_CODE (rhs) == CONSTRUCTOR)
|
|
3868 {
|
|
3869 unsigned HOST_WIDE_INT cnt;
|
|
3870 tree val;
|
|
3871
|
|
3872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
|
|
3873 {
|
|
3874 if (field && chkp_type_has_pointer (TREE_TYPE (field)))
|
|
3875 {
|
|
3876 tree lhs_field = chkp_build_component_ref (lhs, field);
|
|
3877 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
|
|
3878 }
|
|
3879 }
|
|
3880 }
|
|
3881 else
|
|
3882 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
|
|
3883 if (TREE_CODE (field) == FIELD_DECL
|
|
3884 && chkp_type_has_pointer (TREE_TYPE (field)))
|
|
3885 {
|
|
3886 tree rhs_field = chkp_build_component_ref (rhs, field);
|
|
3887 tree lhs_field = chkp_build_component_ref (lhs, field);
|
|
3888 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
|
|
3889 }
|
|
3890 }
|
|
3891 else if (TREE_CODE (type) == ARRAY_TYPE)
|
|
3892 {
|
|
3893 unsigned HOST_WIDE_INT cur = 0;
|
|
3894 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
|
|
3895 tree etype = TREE_TYPE (type);
|
|
3896 tree esize = TYPE_SIZE (etype);
|
|
3897
|
|
3898 if (TREE_CODE (rhs) == CONSTRUCTOR)
|
|
3899 {
|
|
3900 unsigned HOST_WIDE_INT cnt;
|
|
3901 tree purp, val, lhs_elem;
|
|
3902
|
|
3903 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
|
|
3904 {
|
|
3905 if (purp && TREE_CODE (purp) == RANGE_EXPR)
|
|
3906 {
|
|
3907 tree lo_index = TREE_OPERAND (purp, 0);
|
|
3908 tree hi_index = TREE_OPERAND (purp, 1);
|
|
3909
|
|
3910 for (cur = (unsigned)tree_to_uhwi (lo_index);
|
|
3911 cur <= (unsigned)tree_to_uhwi (hi_index);
|
|
3912 cur++)
|
|
3913 {
|
|
3914 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
|
|
3915 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
|
|
3916 }
|
|
3917 }
|
|
3918 else
|
|
3919 {
|
|
3920 if (purp)
|
|
3921 {
|
|
3922 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
|
|
3923 cur = tree_to_uhwi (purp);
|
|
3924 }
|
|
3925
|
|
3926 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
|
|
3927
|
|
3928 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
|
|
3929 }
|
|
3930 }
|
|
3931 }
|
|
3932 /* Copy array only when size is known. */
|
|
3933 else if (maxval && !integer_minus_onep (maxval))
|
|
3934 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
|
|
3935 {
|
|
3936 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
|
|
3937 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
|
|
3938 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
|
|
3939 }
|
|
3940 }
|
|
3941 else
|
|
3942 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
|
|
3943 get_tree_code_name (TREE_CODE (type)));
|
|
3944 }
|
|
3945
|
|
3946 /* Add code to copy bounds for assignment of RHS to LHS.
|
|
3947 ARG is an iterator pointing ne code position. */
|
|
3948 static void
|
|
3949 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
|
|
3950 {
|
|
3951 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
|
|
3952 tree bounds = chkp_find_bounds (rhs, iter);
|
|
3953 tree addr = chkp_build_addr_expr(lhs);
|
|
3954
|
|
3955 chkp_build_bndstx (addr, rhs, bounds, iter);
|
|
3956 }
|
|
3957
|
|
3958 /* Emit static bound initilizers and size vars. */
|
|
3959 void
|
|
3960 chkp_finish_file (void)
|
|
3961 {
|
|
3962 struct varpool_node *node;
|
|
3963 struct chkp_ctor_stmt_list stmts;
|
|
3964
|
|
3965 if (seen_error ())
|
|
3966 return;
|
|
3967
|
|
3968 /* Iterate through varpool and generate bounds initialization
|
|
3969 constructors for all statically initialized pointers. */
|
|
3970 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
|
|
3971 stmts.stmts = NULL;
|
|
3972 FOR_EACH_VARIABLE (node)
|
|
3973 /* Check that var is actually emitted and we need and may initialize
|
|
3974 its bounds. */
|
|
3975 if (node->need_bounds_init
|
|
3976 && !POINTER_BOUNDS_P (node->decl)
|
|
3977 && DECL_RTL (node->decl)
|
|
3978 && MEM_P (DECL_RTL (node->decl))
|
|
3979 && TREE_ASM_WRITTEN (node->decl))
|
|
3980 {
|
|
3981 chkp_walk_pointer_assignments (node->decl,
|
|
3982 DECL_INITIAL (node->decl),
|
|
3983 &stmts,
|
|
3984 chkp_add_modification_to_stmt_list);
|
|
3985
|
|
3986 if (stmts.avail <= 0)
|
|
3987 {
|
|
3988 cgraph_build_static_cdtor ('P', stmts.stmts,
|
|
3989 MAX_RESERVED_INIT_PRIORITY + 3);
|
|
3990 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
|
|
3991 stmts.stmts = NULL;
|
|
3992 }
|
|
3993 }
|
|
3994
|
|
3995 if (stmts.stmts)
|
|
3996 cgraph_build_static_cdtor ('P', stmts.stmts,
|
|
3997 MAX_RESERVED_INIT_PRIORITY + 3);
|
|
3998
|
|
3999 /* Iterate through varpool and generate bounds initialization
|
|
4000 constructors for all static bounds vars. */
|
|
4001 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
|
|
4002 stmts.stmts = NULL;
|
|
4003 FOR_EACH_VARIABLE (node)
|
|
4004 if (node->need_bounds_init
|
|
4005 && POINTER_BOUNDS_P (node->decl)
|
|
4006 && TREE_ASM_WRITTEN (node->decl))
|
|
4007 {
|
|
4008 tree bnd = node->decl;
|
|
4009 tree var;
|
|
4010
|
|
4011 gcc_assert (DECL_INITIAL (bnd)
|
|
4012 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
|
|
4013
|
|
4014 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
|
|
4015 chkp_output_static_bounds (bnd, var, &stmts);
|
|
4016 }
|
|
4017
|
|
4018 if (stmts.stmts)
|
|
4019 cgraph_build_static_cdtor ('B', stmts.stmts,
|
|
4020 MAX_RESERVED_INIT_PRIORITY + 2);
|
|
4021
|
|
4022 delete chkp_static_var_bounds;
|
|
4023 delete chkp_bounds_map;
|
|
4024 }
|
|
4025
|
|
4026 /* An instrumentation function which is called for each statement
|
|
4027 having memory access we want to instrument. It inserts check
|
|
4028 code and bounds copy code.
|
|
4029
|
|
4030 ITER points to statement to instrument.
|
|
4031
|
|
4032 NODE holds memory access in statement to check.
|
|
4033
|
|
4034 LOC holds the location information for statement.
|
|
4035
|
|
4036 DIRFLAGS determines whether access is read or write.
|
|
4037
|
|
4038 ACCESS_OFFS should be added to address used in NODE
|
|
4039 before check.
|
|
4040
|
|
4041 ACCESS_SIZE holds size of checked access.
|
|
4042
|
|
4043 SAFE indicates if NODE access is safe and should not be
|
|
4044 checked. */
|
|
4045 static void
|
|
4046 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
|
|
4047 location_t loc, tree dirflag,
|
|
4048 tree access_offs, tree access_size,
|
|
4049 bool safe)
|
|
4050 {
|
|
4051 tree node_type = TREE_TYPE (node);
|
|
4052 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
|
|
4053 tree addr_first = NULL_TREE; /* address of the first accessed byte */
|
|
4054 tree addr_last = NULL_TREE; /* address of the last accessed byte */
|
|
4055 tree ptr = NULL_TREE; /* a pointer used for dereference */
|
|
4056 tree bounds = NULL_TREE;
|
|
4057 bool reg_store = false;
|
|
4058
|
|
4059 /* We do not need instrumentation for clobbers. */
|
|
4060 if (dirflag == integer_one_node
|
|
4061 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
|
|
4062 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
|
|
4063 return;
|
|
4064
|
|
4065 switch (TREE_CODE (node))
|
|
4066 {
|
|
4067 case ARRAY_REF:
|
|
4068 case COMPONENT_REF:
|
|
4069 {
|
|
4070 bool bitfield;
|
|
4071 tree elt;
|
|
4072
|
|
4073 if (safe)
|
|
4074 {
|
|
4075 /* We are not going to generate any checks, so do not
|
|
4076 generate bounds as well. */
|
|
4077 addr_first = chkp_build_addr_expr (node);
|
|
4078 break;
|
|
4079 }
|
|
4080
|
|
4081 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
|
|
4082 &bitfield, &bounds, iter, false);
|
|
4083
|
|
4084 /* Break if there is no dereference and operation is safe. */
|
|
4085
|
|
4086 if (bitfield)
|
|
4087 {
|
|
4088 tree field = TREE_OPERAND (node, 1);
|
|
4089
|
|
4090 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
|
|
4091 size = DECL_SIZE_UNIT (field);
|
|
4092
|
|
4093 if (elt)
|
|
4094 elt = chkp_build_addr_expr (elt);
|
|
4095 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
|
|
4096 addr_first = fold_build_pointer_plus_loc (loc,
|
|
4097 addr_first,
|
|
4098 byte_position (field));
|
|
4099 }
|
|
4100 else
|
|
4101 addr_first = chkp_build_addr_expr (node);
|
|
4102 }
|
|
4103 break;
|
|
4104
|
|
4105 case INDIRECT_REF:
|
|
4106 ptr = TREE_OPERAND (node, 0);
|
|
4107 addr_first = ptr;
|
|
4108 break;
|
|
4109
|
|
4110 case MEM_REF:
|
|
4111 ptr = TREE_OPERAND (node, 0);
|
|
4112 addr_first = chkp_build_addr_expr (node);
|
|
4113 break;
|
|
4114
|
|
4115 case TARGET_MEM_REF:
|
|
4116 ptr = TMR_BASE (node);
|
|
4117 addr_first = chkp_build_addr_expr (node);
|
|
4118 break;
|
|
4119
|
|
4120 case ARRAY_RANGE_REF:
|
|
4121 printf("ARRAY_RANGE_REF\n");
|
|
4122 debug_gimple_stmt(gsi_stmt(*iter));
|
|
4123 debug_tree(node);
|
|
4124 gcc_unreachable ();
|
|
4125 break;
|
|
4126
|
|
4127 case BIT_FIELD_REF:
|
|
4128 {
|
|
4129 tree offset, size;
|
|
4130
|
|
4131 gcc_assert (!access_offs);
|
|
4132 gcc_assert (!access_size);
|
|
4133
|
|
4134 chkp_parse_bit_field_ref (node, loc, &offset, &size);
|
|
4135
|
|
4136 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
|
|
4137 dirflag, offset, size, safe);
|
|
4138 return;
|
|
4139 }
|
|
4140 break;
|
|
4141
|
|
4142 case VAR_DECL:
|
|
4143 case RESULT_DECL:
|
|
4144 case PARM_DECL:
|
|
4145 if (dirflag != integer_one_node
|
|
4146 || DECL_REGISTER (node))
|
|
4147 return;
|
|
4148
|
|
4149 safe = true;
|
|
4150 addr_first = chkp_build_addr_expr (node);
|
|
4151 break;
|
|
4152
|
|
4153 default:
|
|
4154 return;
|
|
4155 }
|
|
4156
|
|
4157 /* If addr_last was not computed then use (addr_first + size - 1)
|
|
4158 expression to compute it. */
|
|
4159 if (!addr_last)
|
|
4160 {
|
|
4161 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
|
|
4162 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
|
|
4163 }
|
|
4164
|
|
4165 /* Shift both first_addr and last_addr by access_offs if specified. */
|
|
4166 if (access_offs)
|
|
4167 {
|
|
4168 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
|
|
4169 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
|
|
4170 }
|
|
4171
|
|
4172 if (dirflag == integer_one_node)
|
|
4173 {
|
|
4174 tree base = get_base_address (node);
|
|
4175 if (VAR_P (base) && DECL_HARD_REGISTER (base))
|
|
4176 reg_store = true;
|
|
4177 }
|
|
4178
|
|
4179 /* Generate bndcl/bndcu checks if memory access is not safe. */
|
|
4180 if (!safe)
|
|
4181 {
|
|
4182 gimple_stmt_iterator stmt_iter = *iter;
|
|
4183
|
|
4184 if (!bounds)
|
|
4185 bounds = chkp_find_bounds (ptr, iter);
|
|
4186
|
|
4187 chkp_check_mem_access (addr_first, addr_last, bounds,
|
|
4188 stmt_iter, loc, dirflag);
|
|
4189 }
|
|
4190
|
|
4191 /* We need to store bounds in case pointer is stored. */
|
|
4192 if (dirflag == integer_one_node
|
|
4193 && !reg_store
|
|
4194 && chkp_type_has_pointer (node_type)
|
|
4195 && flag_chkp_store_bounds)
|
|
4196 {
|
|
4197 gimple *stmt = gsi_stmt (*iter);
|
|
4198 tree rhs1 = gimple_assign_rhs1 (stmt);
|
|
4199 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
|
|
4200
|
|
4201 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
|
|
4202 chkp_walk_pointer_assignments (node, rhs1, iter,
|
|
4203 chkp_copy_bounds_for_elem);
|
|
4204 else
|
|
4205 {
|
|
4206 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
|
|
4207 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
|
|
4208 }
|
|
4209 }
|
|
4210 }
|
|
4211
|
|
4212 /* Add code to copy bounds for all pointers copied
|
|
4213 in ASSIGN created during inline of EDGE. */
|
|
4214 void
|
|
4215 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
|
|
4216 {
|
|
4217 tree lhs = gimple_assign_lhs (assign);
|
|
4218 tree rhs = gimple_assign_rhs1 (assign);
|
|
4219 gimple_stmt_iterator iter = gsi_for_stmt (assign);
|
|
4220
|
|
4221 if (!flag_chkp_store_bounds)
|
|
4222 return;
|
|
4223
|
|
4224 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
|
|
4225
|
|
4226 /* We should create edges for all created calls to bndldx and bndstx. */
|
|
4227 while (gsi_stmt (iter) != assign)
|
|
4228 {
|
|
4229 gimple *stmt = gsi_stmt (iter);
|
|
4230 if (gimple_code (stmt) == GIMPLE_CALL)
|
|
4231 {
|
|
4232 tree fndecl = gimple_call_fndecl (stmt);
|
|
4233 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
|
|
4234 struct cgraph_edge *new_edge;
|
|
4235
|
|
4236 gcc_assert (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDSTX)
|
|
4237 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDLDX)
|
|
4238 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET));
|
|
4239
|
|
4240 new_edge = edge->caller->create_edge (callee,
|
|
4241 as_a <gcall *> (stmt),
|
|
4242 edge->count,
|
|
4243 edge->frequency);
|
|
4244 new_edge->frequency = compute_call_stmt_bb_frequency
|
|
4245 (edge->caller->decl, gimple_bb (stmt));
|
|
4246 }
|
|
4247 gsi_prev (&iter);
|
|
4248 }
|
|
4249 }
|
|
4250
|
|
4251 /* Some code transformation made during instrumentation pass
|
|
4252 may put code into inconsistent state. Here we find and fix
|
|
4253 such flaws. */
|
|
4254 void
|
|
4255 chkp_fix_cfg ()
|
|
4256 {
|
|
4257 basic_block bb;
|
|
4258 gimple_stmt_iterator i;
|
|
4259
|
|
4260 /* We could insert some code right after stmt which ends bb.
|
|
4261 We wanted to put this code on fallthru edge but did not
|
|
4262 add new edges from the beginning because it may cause new
|
|
4263 phi node creation which may be incorrect due to incomplete
|
|
4264 bound phi nodes. */
|
|
4265 FOR_ALL_BB_FN (bb, cfun)
|
|
4266 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
|
|
4267 {
|
|
4268 gimple *stmt = gsi_stmt (i);
|
|
4269 gimple_stmt_iterator next = i;
|
|
4270
|
|
4271 gsi_next (&next);
|
|
4272
|
|
4273 if (stmt_ends_bb_p (stmt)
|
|
4274 && !gsi_end_p (next))
|
|
4275 {
|
|
4276 edge fall = find_fallthru_edge (bb->succs);
|
|
4277 basic_block dest = NULL;
|
|
4278 int flags = 0;
|
|
4279
|
|
4280 gcc_assert (fall);
|
|
4281
|
|
4282 /* We cannot split abnormal edge. Therefore we
|
|
4283 store its params, make it regular and then
|
|
4284 rebuild abnormal edge after split. */
|
|
4285 if (fall->flags & EDGE_ABNORMAL)
|
|
4286 {
|
|
4287 flags = fall->flags & ~EDGE_FALLTHRU;
|
|
4288 dest = fall->dest;
|
|
4289
|
|
4290 fall->flags &= ~EDGE_COMPLEX;
|
|
4291 }
|
|
4292
|
|
4293 while (!gsi_end_p (next))
|
|
4294 {
|
|
4295 gimple *next_stmt = gsi_stmt (next);
|
|
4296 gsi_remove (&next, false);
|
|
4297 gsi_insert_on_edge (fall, next_stmt);
|
|
4298 }
|
|
4299
|
|
4300 gsi_commit_edge_inserts ();
|
|
4301
|
|
4302 /* Re-create abnormal edge. */
|
|
4303 if (dest)
|
|
4304 make_edge (bb, dest, flags);
|
|
4305 }
|
|
4306 }
|
|
4307 }
|
|
4308
|
|
4309 /* Walker callback for chkp_replace_function_pointers. Replaces
|
|
4310 function pointer in the specified operand with pointer to the
|
|
4311 instrumented function version. */
|
|
4312 static tree
|
|
4313 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
|
|
4314 void *data ATTRIBUTE_UNUSED)
|
|
4315 {
|
|
4316 if (TREE_CODE (*op) == FUNCTION_DECL
|
|
4317 && chkp_instrumentable_p (*op)
|
|
4318 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
|
|
4319 /* For builtins we replace pointers only for selected
|
|
4320 function and functions having definitions. */
|
|
4321 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
|
|
4322 && (chkp_instrument_normal_builtin (*op)
|
|
4323 || gimple_has_body_p (*op)))))
|
|
4324 {
|
|
4325 struct cgraph_node *node = cgraph_node::get_create (*op);
|
|
4326 struct cgraph_node *clone = NULL;
|
|
4327
|
|
4328 if (!node->instrumentation_clone)
|
|
4329 clone = chkp_maybe_create_clone (*op);
|
|
4330
|
|
4331 if (clone)
|
|
4332 *op = clone->decl;
|
|
4333 *walk_subtrees = 0;
|
|
4334 }
|
|
4335
|
|
4336 return NULL;
|
|
4337 }
|
|
4338
|
|
4339 /* This function searches for function pointers in statement
|
|
4340 pointed by GSI and replaces them with pointers to instrumented
|
|
4341 function versions. */
|
|
4342 static void
|
|
4343 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
|
|
4344 {
|
|
4345 gimple *stmt = gsi_stmt (*gsi);
|
|
4346 /* For calls we want to walk call args only. */
|
|
4347 if (gimple_code (stmt) == GIMPLE_CALL)
|
|
4348 {
|
|
4349 unsigned i;
|
|
4350 for (i = 0; i < gimple_call_num_args (stmt); i++)
|
|
4351 walk_tree (gimple_call_arg_ptr (stmt, i),
|
|
4352 chkp_replace_function_pointer, NULL, NULL);
|
|
4353 }
|
|
4354 else
|
|
4355 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
|
|
4356 }
|
|
4357
|
|
4358 /* This function instruments all statements working with memory,
|
|
4359 calls and rets.
|
|
4360
|
|
4361 It also removes excess statements from static initializers. */
|
|
4362 static void
|
|
4363 chkp_instrument_function (void)
|
|
4364 {
|
|
4365 basic_block bb, next;
|
|
4366 gimple_stmt_iterator i;
|
|
4367 enum gimple_rhs_class grhs_class;
|
|
4368 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
|
|
4369
|
|
4370 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
|
|
4371 do
|
|
4372 {
|
|
4373 next = bb->next_bb;
|
|
4374 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
|
|
4375 {
|
|
4376 gimple *s = gsi_stmt (i);
|
|
4377
|
|
4378 /* Skip statement marked to not be instrumented. */
|
|
4379 if (chkp_marked_stmt_p (s))
|
|
4380 {
|
|
4381 gsi_next (&i);
|
|
4382 continue;
|
|
4383 }
|
|
4384
|
|
4385 chkp_replace_function_pointers (&i);
|
|
4386
|
|
4387 switch (gimple_code (s))
|
|
4388 {
|
|
4389 case GIMPLE_ASSIGN:
|
|
4390 chkp_process_stmt (&i, gimple_assign_lhs (s),
|
|
4391 gimple_location (s), integer_one_node,
|
|
4392 NULL_TREE, NULL_TREE, safe);
|
|
4393 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
|
|
4394 gimple_location (s), integer_zero_node,
|
|
4395 NULL_TREE, NULL_TREE, safe);
|
|
4396 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
|
|
4397 if (grhs_class == GIMPLE_BINARY_RHS)
|
|
4398 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
|
|
4399 gimple_location (s), integer_zero_node,
|
|
4400 NULL_TREE, NULL_TREE, safe);
|
|
4401 break;
|
|
4402
|
|
4403 case GIMPLE_RETURN:
|
|
4404 {
|
|
4405 greturn *r = as_a <greturn *> (s);
|
|
4406 if (gimple_return_retval (r) != NULL_TREE)
|
|
4407 {
|
|
4408 chkp_process_stmt (&i, gimple_return_retval (r),
|
|
4409 gimple_location (r),
|
|
4410 integer_zero_node,
|
|
4411 NULL_TREE, NULL_TREE, safe);
|
|
4412
|
|
4413 /* Additionally we need to add bounds
|
|
4414 to return statement. */
|
|
4415 chkp_add_bounds_to_ret_stmt (&i);
|
|
4416 }
|
|
4417 }
|
|
4418 break;
|
|
4419
|
|
4420 case GIMPLE_CALL:
|
|
4421 chkp_add_bounds_to_call_stmt (&i);
|
|
4422 break;
|
|
4423
|
|
4424 default:
|
|
4425 ;
|
|
4426 }
|
|
4427
|
|
4428 gsi_next (&i);
|
|
4429
|
|
4430 /* We do not need any actual pointer stores in checker
|
|
4431 static initializer. */
|
|
4432 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
|
|
4433 && gimple_code (s) == GIMPLE_ASSIGN
|
|
4434 && gimple_store_p (s))
|
|
4435 {
|
|
4436 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
|
|
4437 gsi_remove (&del_iter, true);
|
|
4438 unlink_stmt_vdef (s);
|
|
4439 release_defs(s);
|
|
4440 }
|
|
4441 }
|
|
4442 bb = next;
|
|
4443 }
|
|
4444 while (bb);
|
|
4445
|
|
4446 /* Some input params may have bounds and be address taken. In this case
|
|
4447 we should store incoming bounds into bounds table. */
|
|
4448 tree arg;
|
|
4449 if (flag_chkp_store_bounds)
|
|
4450 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
|
|
4451 if (TREE_ADDRESSABLE (arg))
|
|
4452 {
|
|
4453 if (BOUNDED_P (arg))
|
|
4454 {
|
|
4455 tree bounds = chkp_get_next_bounds_parm (arg);
|
|
4456 tree def_ptr = ssa_default_def (cfun, arg);
|
|
4457 gimple_stmt_iterator iter
|
|
4458 = gsi_start_bb (chkp_get_entry_block ());
|
|
4459 chkp_build_bndstx (chkp_build_addr_expr (arg),
|
|
4460 def_ptr ? def_ptr : arg,
|
|
4461 bounds, &iter);
|
|
4462
|
|
4463 /* Skip bounds arg. */
|
|
4464 arg = TREE_CHAIN (arg);
|
|
4465 }
|
|
4466 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
|
|
4467 {
|
|
4468 tree orig_arg = arg;
|
|
4469 bitmap slots = BITMAP_ALLOC (NULL);
|
|
4470 gimple_stmt_iterator iter
|
|
4471 = gsi_start_bb (chkp_get_entry_block ());
|
|
4472 bitmap_iterator bi;
|
|
4473 unsigned bnd_no;
|
|
4474
|
|
4475 chkp_find_bound_slots (TREE_TYPE (arg), slots);
|
|
4476
|
|
4477 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
|
|
4478 {
|
|
4479 tree bounds = chkp_get_next_bounds_parm (arg);
|
|
4480 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
|
|
4481 tree addr = chkp_build_addr_expr (orig_arg);
|
|
4482 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
|
|
4483 build_int_cst (ptr_type_node, offs));
|
|
4484 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
|
|
4485 bounds, &iter);
|
|
4486
|
|
4487 arg = DECL_CHAIN (arg);
|
|
4488 }
|
|
4489 BITMAP_FREE (slots);
|
|
4490 }
|
|
4491 }
|
|
4492 }
|
|
4493
|
|
4494 /* Find init/null/copy_ptr_bounds calls and replace them
|
|
4495 with assignments. It should allow better code
|
|
4496 optimization. */
|
|
4497
|
|
4498 static void
|
|
4499 chkp_remove_useless_builtins ()
|
|
4500 {
|
|
4501 basic_block bb;
|
|
4502 gimple_stmt_iterator gsi;
|
|
4503
|
|
4504 FOR_EACH_BB_FN (bb, cfun)
|
|
4505 {
|
|
4506 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
4507 {
|
|
4508 gimple *stmt = gsi_stmt (gsi);
|
|
4509 tree fndecl;
|
|
4510 enum built_in_function fcode;
|
|
4511
|
|
4512 /* Find builtins returning first arg and replace
|
|
4513 them with assignments. */
|
|
4514 if (gimple_code (stmt) == GIMPLE_CALL
|
|
4515 && (fndecl = gimple_call_fndecl (stmt))
|
|
4516 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
|
|
4517 && (fcode = DECL_FUNCTION_CODE (fndecl))
|
|
4518 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
|
|
4519 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
|
|
4520 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
|
|
4521 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
|
|
4522 {
|
|
4523 tree res = gimple_call_arg (stmt, 0);
|
|
4524 update_call_from_tree (&gsi, res);
|
|
4525 stmt = gsi_stmt (gsi);
|
|
4526 update_stmt (stmt);
|
|
4527 }
|
|
4528 }
|
|
4529 }
|
|
4530 }
|
|
4531
|
|
4532 /* Initialize pass. */
|
|
4533 static void
|
|
4534 chkp_init (void)
|
|
4535 {
|
|
4536 basic_block bb;
|
|
4537 gimple_stmt_iterator i;
|
|
4538
|
|
4539 in_chkp_pass = true;
|
|
4540
|
|
4541 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
|
|
4542 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
|
|
4543 chkp_unmark_stmt (gsi_stmt (i));
|
|
4544
|
|
4545 chkp_invalid_bounds = new hash_set<tree>;
|
|
4546 chkp_completed_bounds_set = new hash_set<tree>;
|
|
4547 delete chkp_reg_bounds;
|
|
4548 chkp_reg_bounds = new hash_map<tree, tree>;
|
|
4549 delete chkp_bound_vars;
|
|
4550 chkp_bound_vars = new hash_map<tree, tree>;
|
|
4551 chkp_reg_addr_bounds = new hash_map<tree, tree>;
|
|
4552 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
|
|
4553 delete chkp_bounds_map;
|
|
4554 chkp_bounds_map = new hash_map<tree, tree>;
|
|
4555 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
|
|
4556
|
|
4557 entry_block = NULL;
|
|
4558 zero_bounds = NULL_TREE;
|
|
4559 none_bounds = NULL_TREE;
|
|
4560 incomplete_bounds = integer_zero_node;
|
|
4561 tmp_var = NULL_TREE;
|
|
4562 size_tmp_var = NULL_TREE;
|
|
4563
|
|
4564 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
|
|
4565
|
|
4566 /* We create these constant bounds once for each object file.
|
|
4567 These symbols go to comdat section and result in single copy
|
|
4568 of each one in the final binary. */
|
|
4569 chkp_get_zero_bounds_var ();
|
|
4570 chkp_get_none_bounds_var ();
|
|
4571
|
|
4572 calculate_dominance_info (CDI_DOMINATORS);
|
|
4573 calculate_dominance_info (CDI_POST_DOMINATORS);
|
|
4574
|
|
4575 bitmap_obstack_initialize (NULL);
|
|
4576 }
|
|
4577
|
|
4578 /* Finalize instrumentation pass. */
|
|
4579 static void
|
|
4580 chkp_fini (void)
|
|
4581 {
|
|
4582 in_chkp_pass = false;
|
|
4583
|
|
4584 delete chkp_invalid_bounds;
|
|
4585 delete chkp_completed_bounds_set;
|
|
4586 delete chkp_reg_addr_bounds;
|
|
4587 delete chkp_incomplete_bounds_map;
|
|
4588
|
|
4589 free_dominance_info (CDI_DOMINATORS);
|
|
4590 free_dominance_info (CDI_POST_DOMINATORS);
|
|
4591
|
|
4592 bitmap_obstack_release (NULL);
|
|
4593
|
|
4594 entry_block = NULL;
|
|
4595 zero_bounds = NULL_TREE;
|
|
4596 none_bounds = NULL_TREE;
|
|
4597 }
|
|
4598
|
|
4599 /* Main instrumentation pass function. */
|
|
4600 static unsigned int
|
|
4601 chkp_execute (void)
|
|
4602 {
|
|
4603 chkp_init ();
|
|
4604
|
|
4605 chkp_instrument_function ();
|
|
4606
|
|
4607 chkp_remove_useless_builtins ();
|
|
4608
|
|
4609 chkp_function_mark_instrumented (cfun->decl);
|
|
4610
|
|
4611 chkp_fix_cfg ();
|
|
4612
|
|
4613 chkp_fini ();
|
|
4614
|
|
4615 return 0;
|
|
4616 }
|
|
4617
|
|
4618 /* Instrumentation pass gate. */
|
|
4619 static bool
|
|
4620 chkp_gate (void)
|
|
4621 {
|
|
4622 cgraph_node *node = cgraph_node::get (cfun->decl);
|
|
4623 return ((node != NULL
|
|
4624 && node->instrumentation_clone)
|
|
4625 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
|
|
4626 }
|
|
4627
|
|
4628 namespace {
|
|
4629
|
|
4630 const pass_data pass_data_chkp =
|
|
4631 {
|
|
4632 GIMPLE_PASS, /* type */
|
|
4633 "chkp", /* name */
|
|
4634 OPTGROUP_NONE, /* optinfo_flags */
|
|
4635 TV_NONE, /* tv_id */
|
|
4636 PROP_ssa | PROP_cfg, /* properties_required */
|
|
4637 0, /* properties_provided */
|
|
4638 0, /* properties_destroyed */
|
|
4639 0, /* todo_flags_start */
|
|
4640 TODO_verify_il
|
|
4641 | TODO_update_ssa /* todo_flags_finish */
|
|
4642 };
|
|
4643
|
|
4644 class pass_chkp : public gimple_opt_pass
|
|
4645 {
|
|
4646 public:
|
|
4647 pass_chkp (gcc::context *ctxt)
|
|
4648 : gimple_opt_pass (pass_data_chkp, ctxt)
|
|
4649 {}
|
|
4650
|
|
4651 /* opt_pass methods: */
|
|
4652 virtual opt_pass * clone ()
|
|
4653 {
|
|
4654 return new pass_chkp (m_ctxt);
|
|
4655 }
|
|
4656
|
|
4657 virtual bool gate (function *)
|
|
4658 {
|
|
4659 return chkp_gate ();
|
|
4660 }
|
|
4661
|
|
4662 virtual unsigned int execute (function *)
|
|
4663 {
|
|
4664 return chkp_execute ();
|
|
4665 }
|
|
4666
|
|
4667 }; // class pass_chkp
|
|
4668
|
|
4669 } // anon namespace
|
|
4670
|
|
4671 gimple_opt_pass *
|
|
4672 make_pass_chkp (gcc::context *ctxt)
|
|
4673 {
|
|
4674 return new pass_chkp (ctxt);
|
|
4675 }
|
|
4676
|
|
4677 #include "gt-tree-chkp.h"
|