111
|
1 /* Optimize and expand sanitizer functions.
|
145
|
2 Copyright (C) 2014-2020 Free Software Foundation, Inc.
|
111
|
3 Contributed by Marek Polacek <polacek@redhat.com>
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify it under
|
|
8 the terms of the GNU General Public License as published by the Free
|
|
9 Software Foundation; either version 3, or (at your option) any later
|
|
10 version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
15 for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "backend.h"
|
|
25 #include "tree.h"
|
|
26 #include "gimple.h"
|
|
27 #include "ssa.h"
|
|
28 #include "tree-pass.h"
|
|
29 #include "tree-ssa-operands.h"
|
|
30 #include "gimple-pretty-print.h"
|
|
31 #include "fold-const.h"
|
|
32 #include "gimple-iterator.h"
|
|
33 #include "stringpool.h"
|
|
34 #include "attribs.h"
|
|
35 #include "asan.h"
|
|
36 #include "ubsan.h"
|
|
37 #include "tree-hash-traits.h"
|
|
38 #include "gimple-ssa.h"
|
|
39 #include "tree-phinodes.h"
|
|
40 #include "ssa-iterators.h"
|
|
41 #include "gimplify.h"
|
|
42 #include "gimple-iterator.h"
|
|
43 #include "gimple-walk.h"
|
|
44 #include "cfghooks.h"
|
|
45 #include "tree-dfa.h"
|
|
46 #include "tree-ssa.h"
|
|
47 #include "varasm.h"
|
|
48
|
|
49 /* This is used to carry information about basic blocks. It is
|
|
50 attached to the AUX field of the standard CFG block. */
|
|
51
|
|
52 struct sanopt_info
|
|
53 {
|
|
54 /* True if this BB might call (directly or indirectly) free/munmap
|
|
55 or similar operation. */
|
|
56 bool has_freeing_call_p;
|
|
57
|
|
58 /* True if HAS_FREEING_CALL_P flag has been computed. */
|
|
59 bool has_freeing_call_computed_p;
|
|
60
|
|
61 /* True if there is a block with HAS_FREEING_CALL_P flag set
|
|
62 on any path between an immediate dominator of BB, denoted
|
|
63 imm(BB), and BB. */
|
|
64 bool imm_dom_path_with_freeing_call_p;
|
|
65
|
|
66 /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed. */
|
|
67 bool imm_dom_path_with_freeing_call_computed_p;
|
|
68
|
|
69 /* Number of possibly freeing calls encountered in this bb
|
|
70 (so far). */
|
|
71 uint64_t freeing_call_events;
|
|
72
|
|
73 /* True if BB is currently being visited during computation
|
|
74 of IMM_DOM_PATH_WITH_FREEING_CALL_P flag. */
|
|
75 bool being_visited_p;
|
|
76
|
|
77 /* True if this BB has been visited in the dominator walk. */
|
|
78 bool visited_p;
|
|
79 };
|
|
80
|
|
81 /* If T has a single definition of form T = T2, return T2. */
|
|
82
|
|
83 static tree
|
|
84 maybe_get_single_definition (tree t)
|
|
85 {
|
|
86 if (TREE_CODE (t) == SSA_NAME)
|
|
87 {
|
|
88 gimple *g = SSA_NAME_DEF_STMT (t);
|
|
89 if (gimple_assign_single_p (g))
|
|
90 return gimple_assign_rhs1 (g);
|
|
91 }
|
|
92 return NULL_TREE;
|
|
93 }
|
|
94
|
|
95 /* Tree triplet for vptr_check_map. */
|
|
96 struct sanopt_tree_triplet
|
|
97 {
|
|
98 tree t1, t2, t3;
|
|
99 };
|
|
100
|
|
101 /* Traits class for tree triplet hash maps below. */
|
|
102
|
|
103 struct sanopt_tree_triplet_hash : typed_noop_remove <sanopt_tree_triplet>
|
|
104 {
|
|
105 typedef sanopt_tree_triplet value_type;
|
|
106 typedef sanopt_tree_triplet compare_type;
|
|
107
|
|
108 static hashval_t
|
|
109 hash (const sanopt_tree_triplet &ref)
|
|
110 {
|
|
111 inchash::hash hstate (0);
|
|
112 inchash::add_expr (ref.t1, hstate);
|
|
113 inchash::add_expr (ref.t2, hstate);
|
|
114 inchash::add_expr (ref.t3, hstate);
|
|
115 return hstate.end ();
|
|
116 }
|
|
117
|
|
118 static bool
|
|
119 equal (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2)
|
|
120 {
|
|
121 return operand_equal_p (ref1.t1, ref2.t1, 0)
|
|
122 && operand_equal_p (ref1.t2, ref2.t2, 0)
|
|
123 && operand_equal_p (ref1.t3, ref2.t3, 0);
|
|
124 }
|
|
125
|
|
126 static void
|
|
127 mark_deleted (sanopt_tree_triplet &ref)
|
|
128 {
|
|
129 ref.t1 = reinterpret_cast<tree> (1);
|
|
130 }
|
|
131
|
145
|
132 static const bool empty_zero_p = true;
|
|
133
|
111
|
134 static void
|
|
135 mark_empty (sanopt_tree_triplet &ref)
|
|
136 {
|
|
137 ref.t1 = NULL;
|
|
138 }
|
|
139
|
|
140 static bool
|
|
141 is_deleted (const sanopt_tree_triplet &ref)
|
|
142 {
|
|
143 return ref.t1 == reinterpret_cast<tree> (1);
|
|
144 }
|
|
145
|
|
146 static bool
|
|
147 is_empty (const sanopt_tree_triplet &ref)
|
|
148 {
|
|
149 return ref.t1 == NULL;
|
|
150 }
|
|
151 };
|
|
152
|
|
153 /* Tree couple for ptr_check_map. */
|
|
154 struct sanopt_tree_couple
|
|
155 {
|
|
156 tree ptr;
|
|
157 bool pos_p;
|
|
158 };
|
|
159
|
|
160 /* Traits class for tree triplet hash maps below. */
|
|
161
|
|
162 struct sanopt_tree_couple_hash : typed_noop_remove <sanopt_tree_couple>
|
|
163 {
|
|
164 typedef sanopt_tree_couple value_type;
|
|
165 typedef sanopt_tree_couple compare_type;
|
|
166
|
|
167 static hashval_t
|
|
168 hash (const sanopt_tree_couple &ref)
|
|
169 {
|
|
170 inchash::hash hstate (0);
|
|
171 inchash::add_expr (ref.ptr, hstate);
|
|
172 hstate.add_int (ref.pos_p);
|
|
173 return hstate.end ();
|
|
174 }
|
|
175
|
|
176 static bool
|
|
177 equal (const sanopt_tree_couple &ref1, const sanopt_tree_couple &ref2)
|
|
178 {
|
|
179 return operand_equal_p (ref1.ptr, ref2.ptr, 0)
|
|
180 && ref1.pos_p == ref2.pos_p;
|
|
181 }
|
|
182
|
|
183 static void
|
|
184 mark_deleted (sanopt_tree_couple &ref)
|
|
185 {
|
|
186 ref.ptr = reinterpret_cast<tree> (1);
|
|
187 }
|
|
188
|
145
|
189 static const bool empty_zero_p = true;
|
|
190
|
111
|
191 static void
|
|
192 mark_empty (sanopt_tree_couple &ref)
|
|
193 {
|
|
194 ref.ptr = NULL;
|
|
195 }
|
|
196
|
|
197 static bool
|
|
198 is_deleted (const sanopt_tree_couple &ref)
|
|
199 {
|
|
200 return ref.ptr == reinterpret_cast<tree> (1);
|
|
201 }
|
|
202
|
|
203 static bool
|
|
204 is_empty (const sanopt_tree_couple &ref)
|
|
205 {
|
|
206 return ref.ptr == NULL;
|
|
207 }
|
|
208 };
|
|
209
|
|
210 /* This is used to carry various hash maps and variables used
|
|
211 in sanopt_optimize_walker. */
|
|
212
|
145
|
213 class sanopt_ctx
|
111
|
214 {
|
145
|
215 public:
|
111
|
216 /* This map maps a pointer (the first argument of UBSAN_NULL) to
|
|
217 a vector of UBSAN_NULL call statements that check this pointer. */
|
|
218 hash_map<tree, auto_vec<gimple *> > null_check_map;
|
|
219
|
|
220 /* This map maps a pointer (the second argument of ASAN_CHECK) to
|
|
221 a vector of ASAN_CHECK call statements that check the access. */
|
|
222 hash_map<tree_operand_hash, auto_vec<gimple *> > asan_check_map;
|
|
223
|
|
224 /* This map maps a tree triplet (the first, second and fourth argument
|
|
225 of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
|
|
226 that virtual table pointer. */
|
|
227 hash_map<sanopt_tree_triplet_hash, auto_vec<gimple *> > vptr_check_map;
|
|
228
|
|
229 /* This map maps a couple (tree and boolean) to a vector of UBSAN_PTR
|
|
230 call statements that check that pointer overflow. */
|
|
231 hash_map<sanopt_tree_couple_hash, auto_vec<gimple *> > ptr_check_map;
|
|
232
|
|
233 /* Number of IFN_ASAN_CHECK statements. */
|
|
234 int asan_num_accesses;
|
|
235
|
|
236 /* True when the current functions constains an ASAN_MARK. */
|
|
237 bool contains_asan_mark;
|
|
238 };
|
|
239
|
|
240 /* Return true if there might be any call to free/munmap operation
|
|
241 on any path in between DOM (which should be imm(BB)) and BB. */
|
|
242
|
|
243 static bool
|
|
244 imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
|
|
245 {
|
|
246 sanopt_info *info = (sanopt_info *) bb->aux;
|
|
247 edge e;
|
|
248 edge_iterator ei;
|
|
249
|
|
250 if (info->imm_dom_path_with_freeing_call_computed_p)
|
|
251 return info->imm_dom_path_with_freeing_call_p;
|
|
252
|
|
253 info->being_visited_p = true;
|
|
254
|
|
255 FOR_EACH_EDGE (e, ei, bb->preds)
|
|
256 {
|
|
257 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
|
|
258
|
|
259 if (e->src == dom)
|
|
260 continue;
|
|
261
|
|
262 if ((pred_info->imm_dom_path_with_freeing_call_computed_p
|
|
263 && pred_info->imm_dom_path_with_freeing_call_p)
|
|
264 || (pred_info->has_freeing_call_computed_p
|
|
265 && pred_info->has_freeing_call_p))
|
|
266 {
|
|
267 info->imm_dom_path_with_freeing_call_computed_p = true;
|
|
268 info->imm_dom_path_with_freeing_call_p = true;
|
|
269 info->being_visited_p = false;
|
|
270 return true;
|
|
271 }
|
|
272 }
|
|
273
|
|
274 FOR_EACH_EDGE (e, ei, bb->preds)
|
|
275 {
|
|
276 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
|
|
277
|
|
278 if (e->src == dom)
|
|
279 continue;
|
|
280
|
|
281 if (pred_info->has_freeing_call_computed_p)
|
|
282 continue;
|
|
283
|
|
284 gimple_stmt_iterator gsi;
|
|
285 for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
286 {
|
|
287 gimple *stmt = gsi_stmt (gsi);
|
|
288 gasm *asm_stmt;
|
|
289
|
|
290 if ((is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
|
|
291 || ((asm_stmt = dyn_cast <gasm *> (stmt))
|
|
292 && (gimple_asm_clobbers_memory_p (asm_stmt)
|
|
293 || gimple_asm_volatile_p (asm_stmt))))
|
|
294 {
|
|
295 pred_info->has_freeing_call_p = true;
|
|
296 break;
|
|
297 }
|
|
298 }
|
|
299
|
|
300 pred_info->has_freeing_call_computed_p = true;
|
|
301 if (pred_info->has_freeing_call_p)
|
|
302 {
|
|
303 info->imm_dom_path_with_freeing_call_computed_p = true;
|
|
304 info->imm_dom_path_with_freeing_call_p = true;
|
|
305 info->being_visited_p = false;
|
|
306 return true;
|
|
307 }
|
|
308 }
|
|
309
|
|
310 FOR_EACH_EDGE (e, ei, bb->preds)
|
|
311 {
|
|
312 if (e->src == dom)
|
|
313 continue;
|
|
314
|
|
315 basic_block src;
|
|
316 for (src = e->src; src != dom; )
|
|
317 {
|
|
318 sanopt_info *pred_info = (sanopt_info *) src->aux;
|
|
319 if (pred_info->being_visited_p)
|
|
320 break;
|
|
321 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src);
|
|
322 if (imm_dom_path_with_freeing_call (src, imm))
|
|
323 {
|
|
324 info->imm_dom_path_with_freeing_call_computed_p = true;
|
|
325 info->imm_dom_path_with_freeing_call_p = true;
|
|
326 info->being_visited_p = false;
|
|
327 return true;
|
|
328 }
|
|
329 src = imm;
|
|
330 }
|
|
331 }
|
|
332
|
|
333 info->imm_dom_path_with_freeing_call_computed_p = true;
|
|
334 info->imm_dom_path_with_freeing_call_p = false;
|
|
335 info->being_visited_p = false;
|
|
336 return false;
|
|
337 }
|
|
338
|
|
339 /* Get the first dominating check from the list of stored checks.
|
|
340 Non-dominating checks are silently dropped. */
|
|
341
|
|
342 static gimple *
|
|
343 maybe_get_dominating_check (auto_vec<gimple *> &v)
|
|
344 {
|
|
345 for (; !v.is_empty (); v.pop ())
|
|
346 {
|
|
347 gimple *g = v.last ();
|
|
348 sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
|
|
349 if (!si->visited_p)
|
|
350 /* At this point we shouldn't have any statements
|
|
351 that aren't dominating the current BB. */
|
|
352 return g;
|
|
353 }
|
|
354 return NULL;
|
|
355 }
|
|
356
|
|
357 /* Optimize away redundant UBSAN_NULL calls. */
|
|
358
|
|
359 static bool
|
145
|
360 maybe_optimize_ubsan_null_ifn (class sanopt_ctx *ctx, gimple *stmt)
|
111
|
361 {
|
|
362 gcc_assert (gimple_call_num_args (stmt) == 3);
|
|
363 tree ptr = gimple_call_arg (stmt, 0);
|
|
364 tree cur_align = gimple_call_arg (stmt, 2);
|
|
365 gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
|
|
366 bool remove = false;
|
|
367
|
|
368 auto_vec<gimple *> &v = ctx->null_check_map.get_or_insert (ptr);
|
|
369 gimple *g = maybe_get_dominating_check (v);
|
|
370 if (!g)
|
|
371 {
|
|
372 /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
|
|
373 nothing to optimize yet. */
|
|
374 v.safe_push (stmt);
|
|
375 return false;
|
|
376 }
|
|
377
|
|
378 /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we
|
|
379 can drop this one. But only if this check doesn't specify stricter
|
|
380 alignment. */
|
|
381
|
|
382 tree align = gimple_call_arg (g, 2);
|
|
383 int kind = tree_to_shwi (gimple_call_arg (g, 1));
|
|
384 /* If this is a NULL pointer check where we had segv anyway, we can
|
|
385 remove it. */
|
|
386 if (integer_zerop (align)
|
|
387 && (kind == UBSAN_LOAD_OF
|
|
388 || kind == UBSAN_STORE_OF
|
|
389 || kind == UBSAN_MEMBER_ACCESS))
|
|
390 remove = true;
|
|
391 /* Otherwise remove the check in non-recovering mode, or if the
|
|
392 stmts have same location. */
|
|
393 else if (integer_zerop (align))
|
|
394 remove = (flag_sanitize_recover & SANITIZE_NULL) == 0
|
|
395 || flag_sanitize_undefined_trap_on_error
|
|
396 || gimple_location (g) == gimple_location (stmt);
|
|
397 else if (tree_int_cst_le (cur_align, align))
|
|
398 remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0
|
|
399 || flag_sanitize_undefined_trap_on_error
|
|
400 || gimple_location (g) == gimple_location (stmt);
|
|
401
|
|
402 if (!remove && gimple_bb (g) == gimple_bb (stmt)
|
|
403 && tree_int_cst_compare (cur_align, align) == 0)
|
|
404 v.pop ();
|
|
405
|
|
406 if (!remove)
|
|
407 v.safe_push (stmt);
|
|
408 return remove;
|
|
409 }
|
|
410
|
|
411 /* Return true when pointer PTR for a given CUR_OFFSET is already sanitized
|
|
412 in a given sanitization context CTX. */
|
|
413
|
|
414 static bool
|
|
415 has_dominating_ubsan_ptr_check (sanopt_ctx *ctx, tree ptr,
|
|
416 offset_int &cur_offset)
|
|
417 {
|
|
418 bool pos_p = !wi::neg_p (cur_offset);
|
|
419 sanopt_tree_couple couple;
|
|
420 couple.ptr = ptr;
|
|
421 couple.pos_p = pos_p;
|
|
422
|
|
423 auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple);
|
|
424 gimple *g = maybe_get_dominating_check (v);
|
|
425 if (!g)
|
|
426 return false;
|
|
427
|
|
428 /* We already have recorded a UBSAN_PTR check for this pointer. Perhaps we
|
|
429 can drop this one. But only if this check doesn't specify larger offset.
|
|
430 */
|
|
431 tree offset = gimple_call_arg (g, 1);
|
|
432 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
|
|
433 offset_int ooffset = wi::sext (wi::to_offset (offset), POINTER_SIZE);
|
|
434
|
|
435 if (pos_p)
|
|
436 {
|
|
437 if (wi::les_p (cur_offset, ooffset))
|
|
438 return true;
|
|
439 }
|
|
440 else if (!pos_p && wi::les_p (ooffset, cur_offset))
|
|
441 return true;
|
|
442
|
|
443 return false;
|
|
444 }
|
|
445
|
|
446 /* Record UBSAN_PTR check of given context CTX. Register pointer PTR on
|
|
447 a given OFFSET that it's handled by GIMPLE STMT. */
|
|
448
|
|
449 static void
|
|
450 record_ubsan_ptr_check_stmt (sanopt_ctx *ctx, gimple *stmt, tree ptr,
|
|
451 const offset_int &offset)
|
|
452 {
|
|
453 sanopt_tree_couple couple;
|
|
454 couple.ptr = ptr;
|
|
455 couple.pos_p = !wi::neg_p (offset);
|
|
456
|
|
457 auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple);
|
|
458 v.safe_push (stmt);
|
|
459 }
|
|
460
|
|
461 /* Optimize away redundant UBSAN_PTR calls. */
|
|
462
|
|
463 static bool
|
|
464 maybe_optimize_ubsan_ptr_ifn (sanopt_ctx *ctx, gimple *stmt)
|
|
465 {
|
131
|
466 poly_int64 bitsize, pbitpos;
|
111
|
467 machine_mode mode;
|
|
468 int volatilep = 0, reversep, unsignedp = 0;
|
|
469 tree offset;
|
|
470
|
|
471 gcc_assert (gimple_call_num_args (stmt) == 2);
|
|
472 tree ptr = gimple_call_arg (stmt, 0);
|
|
473 tree off = gimple_call_arg (stmt, 1);
|
|
474
|
|
475 if (TREE_CODE (off) != INTEGER_CST)
|
|
476 return false;
|
|
477
|
|
478 if (integer_zerop (off))
|
|
479 return true;
|
|
480
|
|
481 offset_int cur_offset = wi::sext (wi::to_offset (off), POINTER_SIZE);
|
|
482 if (has_dominating_ubsan_ptr_check (ctx, ptr, cur_offset))
|
|
483 return true;
|
|
484
|
|
485 tree base = ptr;
|
|
486 if (TREE_CODE (base) == ADDR_EXPR)
|
|
487 {
|
|
488 base = TREE_OPERAND (base, 0);
|
|
489
|
131
|
490 HOST_WIDE_INT bitpos;
|
|
491 base = get_inner_reference (base, &bitsize, &pbitpos, &offset, &mode,
|
111
|
492 &unsignedp, &reversep, &volatilep);
|
131
|
493 if ((offset == NULL_TREE || TREE_CODE (offset) == INTEGER_CST)
|
|
494 && DECL_P (base)
|
|
495 && !DECL_REGISTER (base)
|
|
496 && pbitpos.is_constant (&bitpos))
|
111
|
497 {
|
131
|
498 offset_int expr_offset;
|
|
499 if (offset)
|
|
500 expr_offset = wi::to_offset (offset) + bitpos / BITS_PER_UNIT;
|
|
501 else
|
|
502 expr_offset = bitpos / BITS_PER_UNIT;
|
|
503 expr_offset = wi::sext (expr_offset, POINTER_SIZE);
|
111
|
504 offset_int total_offset = expr_offset + cur_offset;
|
|
505 if (total_offset != wi::sext (total_offset, POINTER_SIZE))
|
|
506 {
|
|
507 record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset);
|
|
508 return false;
|
|
509 }
|
|
510
|
|
511 /* If BASE is a fixed size automatic variable or
|
|
512 global variable defined in the current TU, we don't have
|
|
513 to instrument anything if offset is within address
|
|
514 of the variable. */
|
|
515 if ((VAR_P (base)
|
|
516 || TREE_CODE (base) == PARM_DECL
|
|
517 || TREE_CODE (base) == RESULT_DECL)
|
|
518 && DECL_SIZE_UNIT (base)
|
|
519 && TREE_CODE (DECL_SIZE_UNIT (base)) == INTEGER_CST
|
|
520 && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
|
|
521 {
|
|
522 offset_int base_size = wi::to_offset (DECL_SIZE_UNIT (base));
|
131
|
523 if (!wi::neg_p (expr_offset)
|
111
|
524 && wi::les_p (total_offset, base_size))
|
|
525 {
|
|
526 if (!wi::neg_p (total_offset)
|
|
527 && wi::les_p (total_offset, base_size))
|
|
528 return true;
|
|
529 }
|
|
530 }
|
|
531
|
|
532 /* Following expression: UBSAN_PTR (&MEM_REF[ptr + x], y) can be
|
|
533 handled as follows:
|
|
534
|
|
535 1) sign (x) == sign (y), then check for dominating check of (x + y)
|
|
536 2) sign (x) != sign (y), then first check if we have a dominating
|
|
537 check for ptr + x. If so, then we have 2 situations:
|
|
538 a) sign (x) == sign (x + y), here we are done, example:
|
|
539 UBSAN_PTR (&MEM_REF[ptr + 100], -50)
|
|
540 b) check for dominating check of ptr + x + y.
|
|
541 */
|
|
542
|
|
543 bool sign_cur_offset = !wi::neg_p (cur_offset);
|
131
|
544 bool sign_expr_offset = !wi::neg_p (expr_offset);
|
111
|
545
|
|
546 tree base_addr
|
|
547 = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (base)), base);
|
|
548
|
|
549 bool add = false;
|
|
550 if (sign_cur_offset == sign_expr_offset)
|
|
551 {
|
|
552 if (has_dominating_ubsan_ptr_check (ctx, base_addr, total_offset))
|
|
553 return true;
|
|
554 else
|
|
555 add = true;
|
|
556 }
|
|
557 else
|
|
558 {
|
|
559 if (!has_dominating_ubsan_ptr_check (ctx, base_addr, expr_offset))
|
|
560 ; /* Don't record base_addr + expr_offset, it's not a guarding
|
|
561 check. */
|
|
562 else
|
|
563 {
|
|
564 bool sign_total_offset = !wi::neg_p (total_offset);
|
|
565 if (sign_expr_offset == sign_total_offset)
|
|
566 return true;
|
|
567 else
|
|
568 {
|
|
569 if (has_dominating_ubsan_ptr_check (ctx, base_addr,
|
|
570 total_offset))
|
|
571 return true;
|
|
572 else
|
|
573 add = true;
|
|
574 }
|
|
575 }
|
|
576 }
|
|
577
|
|
578 /* Record a new dominating check for base_addr + total_offset. */
|
|
579 if (add && !operand_equal_p (base, base_addr, 0))
|
|
580 record_ubsan_ptr_check_stmt (ctx, stmt, base_addr,
|
|
581 total_offset);
|
|
582 }
|
|
583 }
|
|
584
|
|
585 /* For this PTR we don't have any UBSAN_PTR stmts recorded, so there's
|
|
586 nothing to optimize yet. */
|
|
587 record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset);
|
|
588
|
|
589 return false;
|
|
590 }
|
|
591
|
|
592 /* Optimize away redundant UBSAN_VPTR calls. The second argument
|
|
593 is the value loaded from the virtual table, so rely on FRE to find out
|
|
594 when we can actually optimize. */
|
|
595
|
|
596 static bool
|
145
|
597 maybe_optimize_ubsan_vptr_ifn (class sanopt_ctx *ctx, gimple *stmt)
|
111
|
598 {
|
|
599 gcc_assert (gimple_call_num_args (stmt) == 5);
|
|
600 sanopt_tree_triplet triplet;
|
|
601 triplet.t1 = gimple_call_arg (stmt, 0);
|
|
602 triplet.t2 = gimple_call_arg (stmt, 1);
|
|
603 triplet.t3 = gimple_call_arg (stmt, 3);
|
|
604
|
|
605 auto_vec<gimple *> &v = ctx->vptr_check_map.get_or_insert (triplet);
|
|
606 gimple *g = maybe_get_dominating_check (v);
|
|
607 if (!g)
|
|
608 {
|
|
609 /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
|
|
610 nothing to optimize yet. */
|
|
611 v.safe_push (stmt);
|
|
612 return false;
|
|
613 }
|
|
614
|
|
615 return true;
|
|
616 }
|
|
617
|
|
618 /* Returns TRUE if ASan check of length LEN in block BB can be removed
|
|
619 if preceded by checks in V. */
|
|
620
|
|
621 static bool
|
|
622 can_remove_asan_check (auto_vec<gimple *> &v, tree len, basic_block bb)
|
|
623 {
|
|
624 unsigned int i;
|
|
625 gimple *g;
|
|
626 gimple *to_pop = NULL;
|
|
627 bool remove = false;
|
|
628 basic_block last_bb = bb;
|
|
629 bool cleanup = false;
|
|
630
|
|
631 FOR_EACH_VEC_ELT_REVERSE (v, i, g)
|
|
632 {
|
|
633 basic_block gbb = gimple_bb (g);
|
|
634 sanopt_info *si = (sanopt_info *) gbb->aux;
|
|
635 if (gimple_uid (g) < si->freeing_call_events)
|
|
636 {
|
|
637 /* If there is a potentially freeing call after g in gbb, we should
|
|
638 remove it from the vector, can't use in optimization. */
|
|
639 cleanup = true;
|
|
640 continue;
|
|
641 }
|
|
642
|
|
643 tree glen = gimple_call_arg (g, 2);
|
|
644 gcc_assert (TREE_CODE (glen) == INTEGER_CST);
|
|
645
|
|
646 /* If we've checked only smaller length than we want to check now,
|
|
647 we can't remove the current stmt. If g is in the same basic block,
|
|
648 we want to remove it though, as the current stmt is better. */
|
|
649 if (tree_int_cst_lt (glen, len))
|
|
650 {
|
|
651 if (gbb == bb)
|
|
652 {
|
|
653 to_pop = g;
|
|
654 cleanup = true;
|
|
655 }
|
|
656 continue;
|
|
657 }
|
|
658
|
|
659 while (last_bb != gbb)
|
|
660 {
|
|
661 /* Paths from last_bb to bb have been checked before.
|
|
662 gbb is necessarily a dominator of last_bb, but not necessarily
|
|
663 immediate dominator. */
|
|
664 if (((sanopt_info *) last_bb->aux)->freeing_call_events)
|
|
665 break;
|
|
666
|
|
667 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb);
|
|
668 gcc_assert (imm);
|
|
669 if (imm_dom_path_with_freeing_call (last_bb, imm))
|
|
670 break;
|
|
671
|
|
672 last_bb = imm;
|
|
673 }
|
|
674 if (last_bb == gbb)
|
|
675 remove = true;
|
|
676 break;
|
|
677 }
|
|
678
|
|
679 if (cleanup)
|
|
680 {
|
|
681 unsigned int j = 0, l = v.length ();
|
|
682 for (i = 0; i < l; i++)
|
|
683 if (v[i] != to_pop
|
|
684 && (gimple_uid (v[i])
|
|
685 == ((sanopt_info *)
|
|
686 gimple_bb (v[i])->aux)->freeing_call_events))
|
|
687 {
|
|
688 if (i != j)
|
|
689 v[j] = v[i];
|
|
690 j++;
|
|
691 }
|
|
692 v.truncate (j);
|
|
693 }
|
|
694
|
|
695 return remove;
|
|
696 }
|
|
697
|
|
698 /* Optimize away redundant ASAN_CHECK calls. */
|
|
699
|
|
700 static bool
|
145
|
701 maybe_optimize_asan_check_ifn (class sanopt_ctx *ctx, gimple *stmt)
|
111
|
702 {
|
|
703 gcc_assert (gimple_call_num_args (stmt) == 4);
|
|
704 tree ptr = gimple_call_arg (stmt, 1);
|
|
705 tree len = gimple_call_arg (stmt, 2);
|
|
706 basic_block bb = gimple_bb (stmt);
|
|
707 sanopt_info *info = (sanopt_info *) bb->aux;
|
|
708
|
|
709 if (TREE_CODE (len) != INTEGER_CST)
|
|
710 return false;
|
|
711 if (integer_zerop (len))
|
|
712 return false;
|
|
713
|
|
714 gimple_set_uid (stmt, info->freeing_call_events);
|
|
715
|
|
716 auto_vec<gimple *> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
|
|
717
|
|
718 tree base_addr = maybe_get_single_definition (ptr);
|
|
719 auto_vec<gimple *> *base_checks = NULL;
|
|
720 if (base_addr)
|
|
721 {
|
|
722 base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
|
|
723 /* Original pointer might have been invalidated. */
|
|
724 ptr_checks = ctx->asan_check_map.get (ptr);
|
|
725 }
|
|
726
|
|
727 gimple *g = maybe_get_dominating_check (*ptr_checks);
|
|
728 gimple *g2 = NULL;
|
|
729
|
|
730 if (base_checks)
|
|
731 /* Try with base address as well. */
|
|
732 g2 = maybe_get_dominating_check (*base_checks);
|
|
733
|
|
734 if (g == NULL && g2 == NULL)
|
|
735 {
|
|
736 /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
|
|
737 nothing to optimize yet. */
|
|
738 ptr_checks->safe_push (stmt);
|
|
739 if (base_checks)
|
|
740 base_checks->safe_push (stmt);
|
|
741 return false;
|
|
742 }
|
|
743
|
|
744 bool remove = false;
|
|
745
|
|
746 if (ptr_checks)
|
|
747 remove = can_remove_asan_check (*ptr_checks, len, bb);
|
|
748
|
|
749 if (!remove && base_checks)
|
|
750 /* Try with base address as well. */
|
|
751 remove = can_remove_asan_check (*base_checks, len, bb);
|
|
752
|
|
753 if (!remove)
|
|
754 {
|
|
755 ptr_checks->safe_push (stmt);
|
|
756 if (base_checks)
|
|
757 base_checks->safe_push (stmt);
|
|
758 }
|
|
759
|
|
760 return remove;
|
|
761 }
|
|
762
|
|
763 /* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls.
|
|
764
|
|
765 We walk blocks in the CFG via a depth first search of the dominator
|
|
766 tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector
|
|
767 in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the
|
|
768 blocks. When leaving a block, we mark the block as visited; then
|
|
769 when checking the statements in the vector, we ignore statements that
|
|
770 are coming from already visited blocks, because these cannot dominate
|
|
771 anything anymore. CTX is a sanopt context. */
|
|
772
|
|
773 static void
|
145
|
774 sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx)
|
111
|
775 {
|
|
776 basic_block son;
|
|
777 gimple_stmt_iterator gsi;
|
|
778 sanopt_info *info = (sanopt_info *) bb->aux;
|
|
779 bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0;
|
|
780
|
|
781 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
|
|
782 {
|
|
783 gimple *stmt = gsi_stmt (gsi);
|
|
784 bool remove = false;
|
|
785
|
|
786 if (!is_gimple_call (stmt))
|
|
787 {
|
|
788 /* Handle asm volatile or asm with "memory" clobber
|
|
789 the same as potentionally freeing call. */
|
|
790 gasm *asm_stmt = dyn_cast <gasm *> (stmt);
|
|
791 if (asm_stmt
|
|
792 && asan_check_optimize
|
|
793 && (gimple_asm_clobbers_memory_p (asm_stmt)
|
|
794 || gimple_asm_volatile_p (asm_stmt)))
|
|
795 info->freeing_call_events++;
|
|
796 gsi_next (&gsi);
|
|
797 continue;
|
|
798 }
|
|
799
|
|
800 if (asan_check_optimize && !nonfreeing_call_p (stmt))
|
|
801 info->freeing_call_events++;
|
|
802
|
|
803 /* If __asan_before_dynamic_init ("module"); is followed by
|
|
804 __asan_after_dynamic_init (); without intervening memory loads/stores,
|
|
805 there is nothing to guard, so optimize both away. */
|
|
806 if (asan_check_optimize
|
|
807 && gimple_call_builtin_p (stmt, BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT))
|
|
808 {
|
|
809 use_operand_p use;
|
|
810 gimple *use_stmt;
|
|
811 if (single_imm_use (gimple_vdef (stmt), &use, &use_stmt))
|
|
812 {
|
|
813 if (is_gimple_call (use_stmt)
|
|
814 && gimple_call_builtin_p (use_stmt,
|
|
815 BUILT_IN_ASAN_AFTER_DYNAMIC_INIT))
|
|
816 {
|
|
817 unlink_stmt_vdef (use_stmt);
|
|
818 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
|
|
819 gsi_remove (&gsi2, true);
|
|
820 remove = true;
|
|
821 }
|
|
822 }
|
|
823 }
|
|
824
|
|
825 if (gimple_call_internal_p (stmt))
|
|
826 switch (gimple_call_internal_fn (stmt))
|
|
827 {
|
|
828 case IFN_UBSAN_NULL:
|
|
829 remove = maybe_optimize_ubsan_null_ifn (ctx, stmt);
|
|
830 break;
|
|
831 case IFN_UBSAN_VPTR:
|
|
832 remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt);
|
|
833 break;
|
|
834 case IFN_UBSAN_PTR:
|
|
835 remove = maybe_optimize_ubsan_ptr_ifn (ctx, stmt);
|
|
836 break;
|
|
837 case IFN_ASAN_CHECK:
|
|
838 if (asan_check_optimize)
|
|
839 remove = maybe_optimize_asan_check_ifn (ctx, stmt);
|
|
840 if (!remove)
|
|
841 ctx->asan_num_accesses++;
|
|
842 break;
|
|
843 case IFN_ASAN_MARK:
|
|
844 ctx->contains_asan_mark = true;
|
|
845 break;
|
|
846 default:
|
|
847 break;
|
|
848 }
|
|
849
|
|
850 if (remove)
|
|
851 {
|
|
852 /* Drop this check. */
|
|
853 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
854 {
|
|
855 fprintf (dump_file, "Optimizing out: ");
|
|
856 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
857 }
|
|
858 unlink_stmt_vdef (stmt);
|
|
859 gsi_remove (&gsi, true);
|
|
860 }
|
|
861 else
|
|
862 {
|
|
863 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
864 {
|
|
865 fprintf (dump_file, "Leaving: ");
|
|
866 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
867 }
|
|
868
|
|
869 gsi_next (&gsi);
|
|
870 }
|
|
871 }
|
|
872
|
|
873 if (asan_check_optimize)
|
|
874 {
|
|
875 info->has_freeing_call_p = info->freeing_call_events != 0;
|
|
876 info->has_freeing_call_computed_p = true;
|
|
877 }
|
|
878
|
|
879 for (son = first_dom_son (CDI_DOMINATORS, bb);
|
|
880 son;
|
|
881 son = next_dom_son (CDI_DOMINATORS, son))
|
|
882 sanopt_optimize_walker (son, ctx);
|
|
883
|
|
884 /* We're leaving this BB, so mark it to that effect. */
|
|
885 info->visited_p = true;
|
|
886 }
|
|
887
|
|
888 /* Try to remove redundant sanitizer checks in function FUN. */
|
|
889
|
|
890 static int
|
|
891 sanopt_optimize (function *fun, bool *contains_asan_mark)
|
|
892 {
|
145
|
893 class sanopt_ctx ctx;
|
111
|
894 ctx.asan_num_accesses = 0;
|
|
895 ctx.contains_asan_mark = false;
|
|
896
|
|
897 /* Set up block info for each basic block. */
|
|
898 alloc_aux_for_blocks (sizeof (sanopt_info));
|
|
899
|
|
900 /* We're going to do a dominator walk, so ensure that we have
|
|
901 dominance information. */
|
|
902 calculate_dominance_info (CDI_DOMINATORS);
|
|
903
|
|
904 /* Recursively walk the dominator tree optimizing away
|
|
905 redundant checks. */
|
|
906 sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), &ctx);
|
|
907
|
|
908 free_aux_for_blocks ();
|
|
909
|
|
910 *contains_asan_mark = ctx.contains_asan_mark;
|
|
911 return ctx.asan_num_accesses;
|
|
912 }
|
|
913
|
|
914 /* Perform optimization of sanitize functions. */
|
|
915
|
|
916 namespace {
|
|
917
|
|
918 const pass_data pass_data_sanopt =
|
|
919 {
|
|
920 GIMPLE_PASS, /* type */
|
|
921 "sanopt", /* name */
|
|
922 OPTGROUP_NONE, /* optinfo_flags */
|
|
923 TV_NONE, /* tv_id */
|
|
924 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
|
|
925 0, /* properties_provided */
|
|
926 0, /* properties_destroyed */
|
|
927 0, /* todo_flags_start */
|
|
928 TODO_update_ssa, /* todo_flags_finish */
|
|
929 };
|
|
930
|
|
931 class pass_sanopt : public gimple_opt_pass
|
|
932 {
|
|
933 public:
|
|
934 pass_sanopt (gcc::context *ctxt)
|
|
935 : gimple_opt_pass (pass_data_sanopt, ctxt)
|
|
936 {}
|
|
937
|
|
938 /* opt_pass methods: */
|
|
939 virtual bool gate (function *) { return flag_sanitize; }
|
|
940 virtual unsigned int execute (function *);
|
|
941
|
|
942 }; // class pass_sanopt
|
|
943
|
|
944 /* Sanitize all ASAN_MARK unpoison calls that are not reachable by a BB
|
|
945 that contains an ASAN_MARK poison. All these ASAN_MARK unpoison call
|
|
946 can be removed as all variables are unpoisoned in a function prologue. */
|
|
947
|
|
948 static void
|
|
949 sanitize_asan_mark_unpoison (void)
|
|
950 {
|
|
951 /* 1) Find all BBs that contain an ASAN_MARK poison call. */
|
|
952 auto_sbitmap with_poison (last_basic_block_for_fn (cfun) + 1);
|
|
953 bitmap_clear (with_poison);
|
|
954 basic_block bb;
|
|
955
|
|
956 FOR_EACH_BB_FN (bb, cfun)
|
|
957 {
|
|
958 if (bitmap_bit_p (with_poison, bb->index))
|
|
959 continue;
|
|
960
|
|
961 gimple_stmt_iterator gsi;
|
|
962 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
|
|
963 {
|
|
964 gimple *stmt = gsi_stmt (gsi);
|
|
965 if (asan_mark_p (stmt, ASAN_MARK_POISON))
|
|
966 {
|
|
967 bitmap_set_bit (with_poison, bb->index);
|
|
968 break;
|
|
969 }
|
|
970 }
|
|
971 }
|
|
972
|
|
973 auto_sbitmap poisoned (last_basic_block_for_fn (cfun) + 1);
|
|
974 bitmap_clear (poisoned);
|
|
975 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
|
|
976 bitmap_copy (worklist, with_poison);
|
|
977
|
|
978 /* 2) Propagate the information to all reachable blocks. */
|
|
979 while (!bitmap_empty_p (worklist))
|
|
980 {
|
|
981 unsigned i = bitmap_first_set_bit (worklist);
|
|
982 bitmap_clear_bit (worklist, i);
|
|
983 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
|
|
984 gcc_assert (bb);
|
|
985
|
|
986 edge e;
|
|
987 edge_iterator ei;
|
|
988 FOR_EACH_EDGE (e, ei, bb->succs)
|
|
989 if (!bitmap_bit_p (poisoned, e->dest->index))
|
|
990 {
|
|
991 bitmap_set_bit (poisoned, e->dest->index);
|
|
992 bitmap_set_bit (worklist, e->dest->index);
|
|
993 }
|
|
994 }
|
|
995
|
|
996 /* 3) Iterate all BBs not included in POISONED BBs and remove unpoison
|
|
997 ASAN_MARK preceding an ASAN_MARK poison (which can still happen). */
|
|
998 FOR_EACH_BB_FN (bb, cfun)
|
|
999 {
|
|
1000 if (bitmap_bit_p (poisoned, bb->index))
|
|
1001 continue;
|
|
1002
|
|
1003 gimple_stmt_iterator gsi;
|
|
1004 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
|
|
1005 {
|
|
1006 gimple *stmt = gsi_stmt (gsi);
|
|
1007 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
|
|
1008 {
|
|
1009 if (asan_mark_p (stmt, ASAN_MARK_POISON))
|
|
1010 break;
|
|
1011 else
|
|
1012 {
|
|
1013 if (dump_file)
|
|
1014 fprintf (dump_file, "Removing ASAN_MARK unpoison\n");
|
|
1015 unlink_stmt_vdef (stmt);
|
|
1016 release_defs (stmt);
|
|
1017 gsi_remove (&gsi, true);
|
|
1018 continue;
|
|
1019 }
|
|
1020 }
|
|
1021
|
|
1022 gsi_next (&gsi);
|
|
1023 }
|
|
1024 }
|
|
1025 }
|
|
1026
|
|
1027 /* Return true when STMT is either ASAN_CHECK call or a call of a function
|
|
1028 that can contain an ASAN_CHECK. */
|
|
1029
|
|
1030 static bool
|
|
1031 maybe_contains_asan_check (gimple *stmt)
|
|
1032 {
|
|
1033 if (is_gimple_call (stmt))
|
|
1034 {
|
|
1035 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
|
|
1036 return false;
|
|
1037 else
|
|
1038 return !(gimple_call_flags (stmt) & ECF_CONST);
|
|
1039 }
|
|
1040 else if (is_a<gasm *> (stmt))
|
|
1041 return true;
|
|
1042
|
|
1043 return false;
|
|
1044 }
|
|
1045
|
|
1046 /* Sanitize all ASAN_MARK poison calls that are not followed by an ASAN_CHECK
|
|
1047 call. These calls can be removed. */
|
|
1048
|
|
1049 static void
|
|
1050 sanitize_asan_mark_poison (void)
|
|
1051 {
|
|
1052 /* 1) Find all BBs that possibly contain an ASAN_CHECK. */
|
|
1053 auto_sbitmap with_check (last_basic_block_for_fn (cfun) + 1);
|
|
1054 bitmap_clear (with_check);
|
|
1055 basic_block bb;
|
|
1056
|
|
1057 FOR_EACH_BB_FN (bb, cfun)
|
|
1058 {
|
|
1059 gimple_stmt_iterator gsi;
|
|
1060 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
|
|
1061 {
|
|
1062 gimple *stmt = gsi_stmt (gsi);
|
|
1063 if (maybe_contains_asan_check (stmt))
|
|
1064 {
|
|
1065 bitmap_set_bit (with_check, bb->index);
|
|
1066 break;
|
|
1067 }
|
|
1068 }
|
|
1069 }
|
|
1070
|
|
1071 auto_sbitmap can_reach_check (last_basic_block_for_fn (cfun) + 1);
|
|
1072 bitmap_clear (can_reach_check);
|
|
1073 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
|
|
1074 bitmap_copy (worklist, with_check);
|
|
1075
|
|
1076 /* 2) Propagate the information to all definitions blocks. */
|
|
1077 while (!bitmap_empty_p (worklist))
|
|
1078 {
|
|
1079 unsigned i = bitmap_first_set_bit (worklist);
|
|
1080 bitmap_clear_bit (worklist, i);
|
|
1081 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
|
|
1082 gcc_assert (bb);
|
|
1083
|
|
1084 edge e;
|
|
1085 edge_iterator ei;
|
|
1086 FOR_EACH_EDGE (e, ei, bb->preds)
|
|
1087 if (!bitmap_bit_p (can_reach_check, e->src->index))
|
|
1088 {
|
|
1089 bitmap_set_bit (can_reach_check, e->src->index);
|
|
1090 bitmap_set_bit (worklist, e->src->index);
|
|
1091 }
|
|
1092 }
|
|
1093
|
|
1094 /* 3) Iterate all BBs not included in CAN_REACH_CHECK BBs and remove poison
|
|
1095 ASAN_MARK not followed by a call to function having an ASAN_CHECK. */
|
|
1096 FOR_EACH_BB_FN (bb, cfun)
|
|
1097 {
|
|
1098 if (bitmap_bit_p (can_reach_check, bb->index))
|
|
1099 continue;
|
|
1100
|
|
1101 gimple_stmt_iterator gsi;
|
|
1102 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
|
|
1103 {
|
|
1104 gimple *stmt = gsi_stmt (gsi);
|
|
1105 if (maybe_contains_asan_check (stmt))
|
|
1106 break;
|
|
1107 else if (asan_mark_p (stmt, ASAN_MARK_POISON))
|
|
1108 {
|
|
1109 if (dump_file)
|
|
1110 fprintf (dump_file, "Removing ASAN_MARK poison\n");
|
|
1111 unlink_stmt_vdef (stmt);
|
|
1112 release_defs (stmt);
|
|
1113 gimple_stmt_iterator gsi2 = gsi;
|
|
1114 gsi_prev (&gsi);
|
|
1115 gsi_remove (&gsi2, true);
|
|
1116 continue;
|
|
1117 }
|
|
1118
|
|
1119 gsi_prev (&gsi);
|
|
1120 }
|
|
1121 }
|
|
1122 }
|
|
1123
|
|
1124 /* Rewrite all usages of tree OP which is a PARM_DECL with a VAR_DECL
|
|
1125 that is it's DECL_VALUE_EXPR. */
|
|
1126
|
|
1127 static tree
|
|
1128 rewrite_usage_of_param (tree *op, int *walk_subtrees, void *)
|
|
1129 {
|
|
1130 if (TREE_CODE (*op) == PARM_DECL && DECL_HAS_VALUE_EXPR_P (*op))
|
|
1131 {
|
|
1132 *op = DECL_VALUE_EXPR (*op);
|
|
1133 *walk_subtrees = 0;
|
|
1134 }
|
|
1135
|
|
1136 return NULL;
|
|
1137 }
|
|
1138
|
|
1139 /* For a given function FUN, rewrite all addressable parameters so that
|
|
1140 a new automatic variable is introduced. Right after function entry
|
|
1141 a parameter is assigned to the variable. */
|
|
1142
|
|
1143 static void
|
|
1144 sanitize_rewrite_addressable_params (function *fun)
|
|
1145 {
|
|
1146 gimple *g;
|
|
1147 gimple_seq stmts = NULL;
|
|
1148 bool has_any_addressable_param = false;
|
|
1149 auto_vec<tree> clear_value_expr_list;
|
|
1150
|
|
1151 for (tree arg = DECL_ARGUMENTS (current_function_decl);
|
|
1152 arg; arg = DECL_CHAIN (arg))
|
|
1153 {
|
|
1154 tree type = TREE_TYPE (arg);
|
131
|
1155 if (TREE_ADDRESSABLE (arg)
|
|
1156 && !TREE_ADDRESSABLE (type)
|
|
1157 && !TREE_THIS_VOLATILE (arg)
|
111
|
1158 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
|
|
1159 {
|
|
1160 TREE_ADDRESSABLE (arg) = 0;
|
|
1161 /* The parameter is no longer addressable. */
|
|
1162 has_any_addressable_param = true;
|
|
1163
|
|
1164 /* Create a new automatic variable. */
|
|
1165 tree var = build_decl (DECL_SOURCE_LOCATION (arg),
|
|
1166 VAR_DECL, DECL_NAME (arg), type);
|
|
1167 TREE_ADDRESSABLE (var) = 1;
|
|
1168 DECL_IGNORED_P (var) = 1;
|
|
1169
|
|
1170 gimple_add_tmp_var (var);
|
|
1171
|
131
|
1172 /* We skip parameters that have a DECL_VALUE_EXPR. */
|
|
1173 if (DECL_HAS_VALUE_EXPR_P (arg))
|
|
1174 continue;
|
|
1175
|
111
|
1176 if (dump_file)
|
145
|
1177 {
|
|
1178 fprintf (dump_file,
|
|
1179 "Rewriting parameter whose address is taken: ");
|
|
1180 print_generic_expr (dump_file, arg, dump_flags);
|
|
1181 fputc ('\n', dump_file);
|
|
1182 }
|
111
|
1183
|
|
1184 SET_DECL_PT_UID (var, DECL_PT_UID (arg));
|
|
1185
|
|
1186 /* Assign value of parameter to newly created variable. */
|
|
1187 if ((TREE_CODE (type) == COMPLEX_TYPE
|
|
1188 || TREE_CODE (type) == VECTOR_TYPE))
|
|
1189 {
|
|
1190 /* We need to create a SSA name that will be used for the
|
|
1191 assignment. */
|
|
1192 DECL_GIMPLE_REG_P (arg) = 1;
|
|
1193 tree tmp = get_or_create_ssa_default_def (cfun, arg);
|
|
1194 g = gimple_build_assign (var, tmp);
|
|
1195 gimple_set_location (g, DECL_SOURCE_LOCATION (arg));
|
|
1196 gimple_seq_add_stmt (&stmts, g);
|
|
1197 }
|
|
1198 else
|
|
1199 {
|
|
1200 g = gimple_build_assign (var, arg);
|
|
1201 gimple_set_location (g, DECL_SOURCE_LOCATION (arg));
|
|
1202 gimple_seq_add_stmt (&stmts, g);
|
|
1203 }
|
|
1204
|
|
1205 if (target_for_debug_bind (arg))
|
|
1206 {
|
|
1207 g = gimple_build_debug_bind (arg, var, NULL);
|
|
1208 gimple_seq_add_stmt (&stmts, g);
|
|
1209 clear_value_expr_list.safe_push (arg);
|
|
1210 }
|
|
1211
|
|
1212 DECL_HAS_VALUE_EXPR_P (arg) = 1;
|
|
1213 SET_DECL_VALUE_EXPR (arg, var);
|
|
1214 }
|
|
1215 }
|
|
1216
|
|
1217 if (!has_any_addressable_param)
|
|
1218 return;
|
|
1219
|
|
1220 /* Replace all usages of PARM_DECLs with the newly
|
|
1221 created variable VAR. */
|
|
1222 basic_block bb;
|
|
1223 FOR_EACH_BB_FN (bb, fun)
|
|
1224 {
|
|
1225 gimple_stmt_iterator gsi;
|
|
1226 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1227 {
|
|
1228 gimple *stmt = gsi_stmt (gsi);
|
|
1229 gimple_stmt_iterator it = gsi_for_stmt (stmt);
|
|
1230 walk_gimple_stmt (&it, NULL, rewrite_usage_of_param, NULL);
|
|
1231 }
|
|
1232 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1233 {
|
|
1234 gphi *phi = dyn_cast<gphi *> (gsi_stmt (gsi));
|
|
1235 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
|
|
1236 {
|
|
1237 hash_set<tree> visited_nodes;
|
|
1238 walk_tree (gimple_phi_arg_def_ptr (phi, i),
|
|
1239 rewrite_usage_of_param, NULL, &visited_nodes);
|
|
1240 }
|
|
1241 }
|
|
1242 }
|
|
1243
|
|
1244 /* Unset value expr for parameters for which we created debug bind
|
|
1245 expressions. */
|
|
1246 unsigned i;
|
|
1247 tree arg;
|
|
1248 FOR_EACH_VEC_ELT (clear_value_expr_list, i, arg)
|
|
1249 {
|
|
1250 DECL_HAS_VALUE_EXPR_P (arg) = 0;
|
|
1251 SET_DECL_VALUE_EXPR (arg, NULL_TREE);
|
|
1252 }
|
|
1253
|
|
1254 /* Insert default assignments at the beginning of a function. */
|
|
1255 basic_block entry_bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
|
|
1256 entry_bb = split_edge (single_succ_edge (entry_bb));
|
|
1257
|
|
1258 gimple_stmt_iterator gsi = gsi_start_bb (entry_bb);
|
|
1259 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
|
|
1260 }
|
|
1261
|
|
1262 unsigned int
|
|
1263 pass_sanopt::execute (function *fun)
|
|
1264 {
|
|
1265 basic_block bb;
|
|
1266 int asan_num_accesses = 0;
|
|
1267 bool contains_asan_mark = false;
|
|
1268
|
|
1269 /* Try to remove redundant checks. */
|
|
1270 if (optimize
|
|
1271 && (flag_sanitize
|
|
1272 & (SANITIZE_NULL | SANITIZE_ALIGNMENT
|
|
1273 | SANITIZE_ADDRESS | SANITIZE_VPTR | SANITIZE_POINTER_OVERFLOW)))
|
|
1274 asan_num_accesses = sanopt_optimize (fun, &contains_asan_mark);
|
|
1275 else if (flag_sanitize & SANITIZE_ADDRESS)
|
|
1276 {
|
|
1277 gimple_stmt_iterator gsi;
|
|
1278 FOR_EACH_BB_FN (bb, fun)
|
|
1279 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
1280 {
|
|
1281 gimple *stmt = gsi_stmt (gsi);
|
|
1282 if (gimple_call_internal_p (stmt, IFN_ASAN_CHECK))
|
|
1283 ++asan_num_accesses;
|
|
1284 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
|
|
1285 contains_asan_mark = true;
|
|
1286 }
|
|
1287 }
|
|
1288
|
|
1289 if (contains_asan_mark)
|
|
1290 {
|
|
1291 sanitize_asan_mark_unpoison ();
|
|
1292 sanitize_asan_mark_poison ();
|
|
1293 }
|
|
1294
|
|
1295 if (asan_sanitize_stack_p ())
|
|
1296 sanitize_rewrite_addressable_params (fun);
|
|
1297
|
145
|
1298 bool use_calls = param_asan_instrumentation_with_call_threshold < INT_MAX
|
|
1299 && asan_num_accesses >= param_asan_instrumentation_with_call_threshold;
|
111
|
1300
|
|
1301 hash_map<tree, tree> shadow_vars_mapping;
|
|
1302 bool need_commit_edge_insert = false;
|
|
1303 FOR_EACH_BB_FN (bb, fun)
|
|
1304 {
|
|
1305 gimple_stmt_iterator gsi;
|
|
1306 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
|
|
1307 {
|
|
1308 gimple *stmt = gsi_stmt (gsi);
|
|
1309 bool no_next = false;
|
|
1310
|
|
1311 if (!is_gimple_call (stmt))
|
|
1312 {
|
|
1313 gsi_next (&gsi);
|
|
1314 continue;
|
|
1315 }
|
|
1316
|
|
1317 if (gimple_call_internal_p (stmt))
|
|
1318 {
|
|
1319 enum internal_fn ifn = gimple_call_internal_fn (stmt);
|
|
1320 switch (ifn)
|
|
1321 {
|
|
1322 case IFN_UBSAN_NULL:
|
|
1323 no_next = ubsan_expand_null_ifn (&gsi);
|
|
1324 break;
|
|
1325 case IFN_UBSAN_BOUNDS:
|
|
1326 no_next = ubsan_expand_bounds_ifn (&gsi);
|
|
1327 break;
|
|
1328 case IFN_UBSAN_OBJECT_SIZE:
|
|
1329 no_next = ubsan_expand_objsize_ifn (&gsi);
|
|
1330 break;
|
|
1331 case IFN_UBSAN_PTR:
|
|
1332 no_next = ubsan_expand_ptr_ifn (&gsi);
|
|
1333 break;
|
|
1334 case IFN_UBSAN_VPTR:
|
|
1335 no_next = ubsan_expand_vptr_ifn (&gsi);
|
|
1336 break;
|
|
1337 case IFN_ASAN_CHECK:
|
|
1338 no_next = asan_expand_check_ifn (&gsi, use_calls);
|
|
1339 break;
|
|
1340 case IFN_ASAN_MARK:
|
|
1341 no_next = asan_expand_mark_ifn (&gsi);
|
|
1342 break;
|
|
1343 case IFN_ASAN_POISON:
|
|
1344 no_next = asan_expand_poison_ifn (&gsi,
|
|
1345 &need_commit_edge_insert,
|
|
1346 shadow_vars_mapping);
|
|
1347 break;
|
|
1348 default:
|
|
1349 break;
|
|
1350 }
|
|
1351 }
|
|
1352 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
|
|
1353 {
|
|
1354 tree callee = gimple_call_fndecl (stmt);
|
|
1355 switch (DECL_FUNCTION_CODE (callee))
|
|
1356 {
|
|
1357 case BUILT_IN_UNREACHABLE:
|
|
1358 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
|
|
1359 no_next = ubsan_instrument_unreachable (&gsi);
|
|
1360 break;
|
|
1361 default:
|
|
1362 break;
|
|
1363 }
|
|
1364 }
|
|
1365
|
|
1366 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1367 {
|
|
1368 fprintf (dump_file, "Expanded: ");
|
|
1369 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
1370 }
|
|
1371
|
|
1372 if (!no_next)
|
|
1373 gsi_next (&gsi);
|
|
1374 }
|
|
1375 }
|
|
1376
|
|
1377 if (need_commit_edge_insert)
|
|
1378 gsi_commit_edge_inserts ();
|
|
1379
|
|
1380 return 0;
|
|
1381 }
|
|
1382
|
|
1383 } // anon namespace
|
|
1384
|
|
1385 gimple_opt_pass *
|
|
1386 make_pass_sanopt (gcc::context *ctxt)
|
|
1387 {
|
|
1388 return new pass_sanopt (ctxt);
|
|
1389 }
|