comparison gcc/asan.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2017 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "ssa.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "stringpool.h"
51 #include "attribs.h"
52 #include "asan.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "output.h"
57 #include "langhooks.h"
58 #include "cfgloop.h"
59 #include "gimple-builder.h"
60 #include "gimple-fold.h"
61 #include "ubsan.h"
62 #include "params.h"
63 #include "builtins.h"
64 #include "fnmatch.h"
65 #include "tree-inline.h"
66
67 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
68 with <2x slowdown on average.
69
70 The tool consists of two parts:
71 instrumentation module (this file) and a run-time library.
72 The instrumentation module adds a run-time check before every memory insn.
73 For a 8- or 16- byte load accessing address X:
74 ShadowAddr = (X >> 3) + Offset
75 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
76 if (ShadowValue)
77 __asan_report_load8(X);
78 For a load of N bytes (N=1, 2 or 4) from address X:
79 ShadowAddr = (X >> 3) + Offset
80 ShadowValue = *(char*)ShadowAddr;
81 if (ShadowValue)
82 if ((X & 7) + N - 1 > ShadowValue)
83 __asan_report_loadN(X);
84 Stores are instrumented similarly, but using __asan_report_storeN functions.
85 A call too __asan_init_vN() is inserted to the list of module CTORs.
86 N is the version number of the AddressSanitizer API. The changes between the
87 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
88
89 The run-time library redefines malloc (so that redzone are inserted around
90 the allocated memory) and free (so that reuse of free-ed memory is delayed),
91 provides __asan_report* and __asan_init_vN functions.
92
93 Read more:
94 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
95
96 The current implementation supports detection of out-of-bounds and
97 use-after-free in the heap, on the stack and for global variables.
98
99 [Protection of stack variables]
100
101 To understand how detection of out-of-bounds and use-after-free works
102 for stack variables, lets look at this example on x86_64 where the
103 stack grows downward:
104
105 int
106 foo ()
107 {
108 char a[23] = {0};
109 int b[2] = {0};
110
111 a[5] = 1;
112 b[1] = 2;
113
114 return a[5] + b[1];
115 }
116
117 For this function, the stack protected by asan will be organized as
118 follows, from the top of the stack to the bottom:
119
120 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
121
122 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
123 the next slot be 32 bytes aligned; this one is called Partial
124 Redzone; this 32 bytes alignment is an asan constraint]
125
126 Slot 3/ [24 bytes for variable 'a']
127
128 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
129
130 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
131
132 Slot 6/ [8 bytes for variable 'b']
133
134 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
135 'LEFT RedZone']
136
137 The 32 bytes of LEFT red zone at the bottom of the stack can be
138 decomposed as such:
139
140 1/ The first 8 bytes contain a magical asan number that is always
141 0x41B58AB3.
142
143 2/ The following 8 bytes contains a pointer to a string (to be
144 parsed at runtime by the runtime asan library), which format is
145 the following:
146
147 "<function-name> <space> <num-of-variables-on-the-stack>
148 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
149 <length-of-var-in-bytes> ){n} "
150
151 where '(...){n}' means the content inside the parenthesis occurs 'n'
152 times, with 'n' being the number of variables on the stack.
153
154 3/ The following 8 bytes contain the PC of the current function which
155 will be used by the run-time library to print an error message.
156
157 4/ The following 8 bytes are reserved for internal use by the run-time.
158
159 The shadow memory for that stack layout is going to look like this:
160
161 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
162 The F1 byte pattern is a magic number called
163 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
164 the memory for that shadow byte is part of a the LEFT red zone
165 intended to seat at the bottom of the variables on the stack.
166
167 - content of shadow memory 8 bytes for slots 6 and 5:
168 0xF4F4F400. The F4 byte pattern is a magic number
169 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
170 memory region for this shadow byte is a PARTIAL red zone
171 intended to pad a variable A, so that the slot following
172 {A,padding} is 32 bytes aligned.
173
174 Note that the fact that the least significant byte of this
175 shadow memory content is 00 means that 8 bytes of its
176 corresponding memory (which corresponds to the memory of
177 variable 'b') is addressable.
178
179 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
180 The F2 byte pattern is a magic number called
181 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
182 region for this shadow byte is a MIDDLE red zone intended to
183 seat between two 32 aligned slots of {variable,padding}.
184
185 - content of shadow memory 8 bytes for slot 3 and 2:
186 0xF4000000. This represents is the concatenation of
187 variable 'a' and the partial red zone following it, like what we
188 had for variable 'b'. The least significant 3 bytes being 00
189 means that the 3 bytes of variable 'a' are addressable.
190
191 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
192 The F3 byte pattern is a magic number called
193 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
194 region for this shadow byte is a RIGHT red zone intended to seat
195 at the top of the variables of the stack.
196
197 Note that the real variable layout is done in expand_used_vars in
198 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
199 stack variables as well as the different red zones, emits some
200 prologue code to populate the shadow memory as to poison (mark as
201 non-accessible) the regions of the red zones and mark the regions of
202 stack variables as accessible, and emit some epilogue code to
203 un-poison (mark as accessible) the regions of red zones right before
204 the function exits.
205
206 [Protection of global variables]
207
208 The basic idea is to insert a red zone between two global variables
209 and install a constructor function that calls the asan runtime to do
210 the populating of the relevant shadow memory regions at load time.
211
212 So the global variables are laid out as to insert a red zone between
213 them. The size of the red zones is so that each variable starts on a
214 32 bytes boundary.
215
216 Then a constructor function is installed so that, for each global
217 variable, it calls the runtime asan library function
218 __asan_register_globals_with an instance of this type:
219
220 struct __asan_global
221 {
222 // Address of the beginning of the global variable.
223 const void *__beg;
224
225 // Initial size of the global variable.
226 uptr __size;
227
228 // Size of the global variable + size of the red zone. This
229 // size is 32 bytes aligned.
230 uptr __size_with_redzone;
231
232 // Name of the global variable.
233 const void *__name;
234
235 // Name of the module where the global variable is declared.
236 const void *__module_name;
237
238 // 1 if it has dynamic initialization, 0 otherwise.
239 uptr __has_dynamic_init;
240
241 // A pointer to struct that contains source location, could be NULL.
242 __asan_global_source_location *__location;
243 }
244
245 A destructor function that calls the runtime asan library function
246 _asan_unregister_globals is also installed. */
247
248 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
249 static bool asan_shadow_offset_computed;
250 static vec<char *> sanitized_sections;
251 static tree last_alloca_addr;
252
253 /* Set of variable declarations that are going to be guarded by
254 use-after-scope sanitizer. */
255
256 static hash_set<tree> *asan_handled_variables = NULL;
257
258 hash_set <tree> *asan_used_labels = NULL;
259
260 /* Sets shadow offset to value in string VAL. */
261
262 bool
263 set_asan_shadow_offset (const char *val)
264 {
265 char *endp;
266
267 errno = 0;
268 #ifdef HAVE_LONG_LONG
269 asan_shadow_offset_value = strtoull (val, &endp, 0);
270 #else
271 asan_shadow_offset_value = strtoul (val, &endp, 0);
272 #endif
273 if (!(*val != '\0' && *endp == '\0' && errno == 0))
274 return false;
275
276 asan_shadow_offset_computed = true;
277
278 return true;
279 }
280
281 /* Set list of user-defined sections that need to be sanitized. */
282
283 void
284 set_sanitized_sections (const char *sections)
285 {
286 char *pat;
287 unsigned i;
288 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
289 free (pat);
290 sanitized_sections.truncate (0);
291
292 for (const char *s = sections; *s; )
293 {
294 const char *end;
295 for (end = s; *end && *end != ','; ++end);
296 size_t len = end - s;
297 sanitized_sections.safe_push (xstrndup (s, len));
298 s = *end ? end + 1 : end;
299 }
300 }
301
302 bool
303 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
304 {
305 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
306 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
307 }
308
309 bool
310 asan_sanitize_stack_p (void)
311 {
312 return (sanitize_flags_p (SANITIZE_ADDRESS) && ASAN_STACK);
313 }
314
315 bool
316 asan_sanitize_allocas_p (void)
317 {
318 return (asan_sanitize_stack_p () && ASAN_PROTECT_ALLOCAS);
319 }
320
321 /* Checks whether section SEC should be sanitized. */
322
323 static bool
324 section_sanitized_p (const char *sec)
325 {
326 char *pat;
327 unsigned i;
328 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
329 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
330 return true;
331 return false;
332 }
333
334 /* Returns Asan shadow offset. */
335
336 static unsigned HOST_WIDE_INT
337 asan_shadow_offset ()
338 {
339 if (!asan_shadow_offset_computed)
340 {
341 asan_shadow_offset_computed = true;
342 asan_shadow_offset_value = targetm.asan_shadow_offset ();
343 }
344 return asan_shadow_offset_value;
345 }
346
347 alias_set_type asan_shadow_set = -1;
348
349 /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
350 alias set is used for all shadow memory accesses. */
351 static GTY(()) tree shadow_ptr_types[3];
352
353 /* Decl for __asan_option_detect_stack_use_after_return. */
354 static GTY(()) tree asan_detect_stack_use_after_return;
355
356 /* Hashtable support for memory references used by gimple
357 statements. */
358
359 /* This type represents a reference to a memory region. */
360 struct asan_mem_ref
361 {
362 /* The expression of the beginning of the memory region. */
363 tree start;
364
365 /* The size of the access. */
366 HOST_WIDE_INT access_size;
367 };
368
369 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
370
371 /* Initializes an instance of asan_mem_ref. */
372
373 static void
374 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
375 {
376 ref->start = start;
377 ref->access_size = access_size;
378 }
379
380 /* Allocates memory for an instance of asan_mem_ref into the memory
381 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
382 START is the address of (or the expression pointing to) the
383 beginning of memory reference. ACCESS_SIZE is the size of the
384 access to the referenced memory. */
385
386 static asan_mem_ref*
387 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
388 {
389 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
390
391 asan_mem_ref_init (ref, start, access_size);
392 return ref;
393 }
394
395 /* This builds and returns a pointer to the end of the memory region
396 that starts at START and of length LEN. */
397
398 tree
399 asan_mem_ref_get_end (tree start, tree len)
400 {
401 if (len == NULL_TREE || integer_zerop (len))
402 return start;
403
404 if (!ptrofftype_p (len))
405 len = convert_to_ptrofftype (len);
406
407 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
408 }
409
410 /* Return a tree expression that represents the end of the referenced
411 memory region. Beware that this function can actually build a new
412 tree expression. */
413
414 tree
415 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
416 {
417 return asan_mem_ref_get_end (ref->start, len);
418 }
419
420 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
421 {
422 static inline hashval_t hash (const asan_mem_ref *);
423 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
424 };
425
426 /* Hash a memory reference. */
427
428 inline hashval_t
429 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
430 {
431 return iterative_hash_expr (mem_ref->start, 0);
432 }
433
434 /* Compare two memory references. We accept the length of either
435 memory references to be NULL_TREE. */
436
437 inline bool
438 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
439 const asan_mem_ref *m2)
440 {
441 return operand_equal_p (m1->start, m2->start, 0);
442 }
443
444 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
445
446 /* Returns a reference to the hash table containing memory references.
447 This function ensures that the hash table is created. Note that
448 this hash table is updated by the function
449 update_mem_ref_hash_table. */
450
451 static hash_table<asan_mem_ref_hasher> *
452 get_mem_ref_hash_table ()
453 {
454 if (!asan_mem_ref_ht)
455 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
456
457 return asan_mem_ref_ht;
458 }
459
460 /* Clear all entries from the memory references hash table. */
461
462 static void
463 empty_mem_ref_hash_table ()
464 {
465 if (asan_mem_ref_ht)
466 asan_mem_ref_ht->empty ();
467 }
468
469 /* Free the memory references hash table. */
470
471 static void
472 free_mem_ref_resources ()
473 {
474 delete asan_mem_ref_ht;
475 asan_mem_ref_ht = NULL;
476
477 asan_mem_ref_pool.release ();
478 }
479
480 /* Return true iff the memory reference REF has been instrumented. */
481
482 static bool
483 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
484 {
485 asan_mem_ref r;
486 asan_mem_ref_init (&r, ref, access_size);
487
488 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
489 return saved_ref && saved_ref->access_size >= access_size;
490 }
491
492 /* Return true iff the memory reference REF has been instrumented. */
493
494 static bool
495 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
496 {
497 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
498 }
499
500 /* Return true iff access to memory region starting at REF and of
501 length LEN has been instrumented. */
502
503 static bool
504 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
505 {
506 HOST_WIDE_INT size_in_bytes
507 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
508
509 return size_in_bytes != -1
510 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
511 }
512
513 /* Set REF to the memory reference present in a gimple assignment
514 ASSIGNMENT. Return true upon successful completion, false
515 otherwise. */
516
517 static bool
518 get_mem_ref_of_assignment (const gassign *assignment,
519 asan_mem_ref *ref,
520 bool *ref_is_store)
521 {
522 gcc_assert (gimple_assign_single_p (assignment));
523
524 if (gimple_store_p (assignment)
525 && !gimple_clobber_p (assignment))
526 {
527 ref->start = gimple_assign_lhs (assignment);
528 *ref_is_store = true;
529 }
530 else if (gimple_assign_load_p (assignment))
531 {
532 ref->start = gimple_assign_rhs1 (assignment);
533 *ref_is_store = false;
534 }
535 else
536 return false;
537
538 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
539 return true;
540 }
541
542 /* Return address of last allocated dynamic alloca. */
543
544 static tree
545 get_last_alloca_addr ()
546 {
547 if (last_alloca_addr)
548 return last_alloca_addr;
549
550 last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
551 gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
552 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
553 gsi_insert_on_edge_immediate (e, g);
554 return last_alloca_addr;
555 }
556
557 /* Insert __asan_allocas_unpoison (top, bottom) call after
558 __builtin_stack_restore (new_sp) call.
559 The pseudocode of this routine should look like this:
560 __builtin_stack_restore (new_sp);
561 top = last_alloca_addr;
562 bot = new_sp;
563 __asan_allocas_unpoison (top, bot);
564 last_alloca_addr = new_sp;
565 In general, we can't use new_sp as bot parameter because on some
566 architectures SP has non zero offset from dynamic stack area. Moreover, on
567 some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
568 particular function only after all callees were expanded to rtl.
569 The most noticeable example is PowerPC{,64}, see
570 http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
571 To overcome the issue we use following trick: pass new_sp as a second
572 parameter to __asan_allocas_unpoison and rewrite it during expansion with
573 virtual_dynamic_stack_rtx later in expand_asan_emit_allocas_unpoison
574 function.
575 */
576
577 static void
578 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
579 {
580 if (!iter || !asan_sanitize_allocas_p ())
581 return;
582
583 tree last_alloca = get_last_alloca_addr ();
584 tree restored_stack = gimple_call_arg (call, 0);
585 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
586 gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
587 gsi_insert_after (iter, g, GSI_NEW_STMT);
588 g = gimple_build_assign (last_alloca, restored_stack);
589 gsi_insert_after (iter, g, GSI_NEW_STMT);
590 }
591
592 /* Deploy and poison redzones around __builtin_alloca call. To do this, we
593 should replace this call with another one with changed parameters and
594 replace all its uses with new address, so
595 addr = __builtin_alloca (old_size, align);
596 is replaced by
597 left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
598 Following two statements are optimized out if we know that
599 old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
600 redzone.
601 misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
602 partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
603 right_redzone_size = ASAN_RED_ZONE_SIZE;
604 additional_size = left_redzone_size + partial_redzone_size +
605 right_redzone_size;
606 new_size = old_size + additional_size;
607 new_alloca = __builtin_alloca (new_size, max (align, 32))
608 __asan_alloca_poison (new_alloca, old_size)
609 addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
610 last_alloca_addr = new_alloca;
611 ADDITIONAL_SIZE is added to make new memory allocation contain not only
612 requested memory, but also left, partial and right redzones as well as some
613 additional space, required by alignment. */
614
615 static void
616 handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
617 {
618 if (!iter || !asan_sanitize_allocas_p ())
619 return;
620
621 gassign *g;
622 gcall *gg;
623 const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
624
625 tree last_alloca = get_last_alloca_addr ();
626 tree callee = gimple_call_fndecl (call);
627 tree old_size = gimple_call_arg (call, 0);
628 tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
629 : ptr_type_node;
630 tree partial_size = NULL_TREE;
631 unsigned int align
632 = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
633 ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
634
635 /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
636 bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
637 manually. */
638 align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
639
640 tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
641 tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
642
643 /* Extract lower bits from old_size. */
644 wide_int size_nonzero_bits = get_nonzero_bits (old_size);
645 wide_int rz_mask
646 = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
647 wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
648
649 /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
650 redzone. Otherwise, compute its size here. */
651 if (wi::ne_p (old_size_lower_bits, 0))
652 {
653 /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
654 partial_size = ASAN_RED_ZONE_SIZE - misalign. */
655 g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
656 BIT_AND_EXPR, old_size, alloca_rz_mask);
657 gsi_insert_before (iter, g, GSI_SAME_STMT);
658 tree misalign = gimple_assign_lhs (g);
659 g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
660 redzone_size, misalign);
661 gsi_insert_before (iter, g, GSI_SAME_STMT);
662 partial_size = gimple_assign_lhs (g);
663 }
664
665 /* additional_size = align + ASAN_RED_ZONE_SIZE. */
666 tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
667 + ASAN_RED_ZONE_SIZE);
668 /* If alloca has partial redzone, include it to additional_size too. */
669 if (partial_size)
670 {
671 /* additional_size += partial_size. */
672 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
673 partial_size, additional_size);
674 gsi_insert_before (iter, g, GSI_SAME_STMT);
675 additional_size = gimple_assign_lhs (g);
676 }
677
678 /* new_size = old_size + additional_size. */
679 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
680 additional_size);
681 gsi_insert_before (iter, g, GSI_SAME_STMT);
682 tree new_size = gimple_assign_lhs (g);
683
684 /* Build new __builtin_alloca call:
685 new_alloca_with_rz = __builtin_alloca (new_size, align). */
686 tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
687 gg = gimple_build_call (fn, 2, new_size,
688 build_int_cst (size_type_node, align));
689 tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
690 gimple_call_set_lhs (gg, new_alloca_with_rz);
691 gsi_insert_before (iter, gg, GSI_SAME_STMT);
692
693 /* new_alloca = new_alloca_with_rz + align. */
694 g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
695 new_alloca_with_rz,
696 build_int_cst (size_type_node,
697 align / BITS_PER_UNIT));
698 gsi_insert_before (iter, g, GSI_SAME_STMT);
699 tree new_alloca = gimple_assign_lhs (g);
700
701 /* Poison newly created alloca redzones:
702 __asan_alloca_poison (new_alloca, old_size). */
703 fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
704 gg = gimple_build_call (fn, 2, new_alloca, old_size);
705 gsi_insert_before (iter, gg, GSI_SAME_STMT);
706
707 /* Save new_alloca_with_rz value into last_alloca to use it during
708 allocas unpoisoning. */
709 g = gimple_build_assign (last_alloca, new_alloca_with_rz);
710 gsi_insert_before (iter, g, GSI_SAME_STMT);
711
712 /* Finally, replace old alloca ptr with NEW_ALLOCA. */
713 replace_call_with_value (iter, new_alloca);
714 }
715
716 /* Return the memory references contained in a gimple statement
717 representing a builtin call that has to do with memory access. */
718
719 static bool
720 get_mem_refs_of_builtin_call (gcall *call,
721 asan_mem_ref *src0,
722 tree *src0_len,
723 bool *src0_is_store,
724 asan_mem_ref *src1,
725 tree *src1_len,
726 bool *src1_is_store,
727 asan_mem_ref *dst,
728 tree *dst_len,
729 bool *dst_is_store,
730 bool *dest_is_deref,
731 bool *intercepted_p,
732 gimple_stmt_iterator *iter = NULL)
733 {
734 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
735
736 tree callee = gimple_call_fndecl (call);
737 tree source0 = NULL_TREE, source1 = NULL_TREE,
738 dest = NULL_TREE, len = NULL_TREE;
739 bool is_store = true, got_reference_p = false;
740 HOST_WIDE_INT access_size = 1;
741
742 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
743
744 switch (DECL_FUNCTION_CODE (callee))
745 {
746 /* (s, s, n) style memops. */
747 case BUILT_IN_BCMP:
748 case BUILT_IN_MEMCMP:
749 source0 = gimple_call_arg (call, 0);
750 source1 = gimple_call_arg (call, 1);
751 len = gimple_call_arg (call, 2);
752 break;
753
754 /* (src, dest, n) style memops. */
755 case BUILT_IN_BCOPY:
756 source0 = gimple_call_arg (call, 0);
757 dest = gimple_call_arg (call, 1);
758 len = gimple_call_arg (call, 2);
759 break;
760
761 /* (dest, src, n) style memops. */
762 case BUILT_IN_MEMCPY:
763 case BUILT_IN_MEMCPY_CHK:
764 case BUILT_IN_MEMMOVE:
765 case BUILT_IN_MEMMOVE_CHK:
766 case BUILT_IN_MEMPCPY:
767 case BUILT_IN_MEMPCPY_CHK:
768 dest = gimple_call_arg (call, 0);
769 source0 = gimple_call_arg (call, 1);
770 len = gimple_call_arg (call, 2);
771 break;
772
773 /* (dest, n) style memops. */
774 case BUILT_IN_BZERO:
775 dest = gimple_call_arg (call, 0);
776 len = gimple_call_arg (call, 1);
777 break;
778
779 /* (dest, x, n) style memops*/
780 case BUILT_IN_MEMSET:
781 case BUILT_IN_MEMSET_CHK:
782 dest = gimple_call_arg (call, 0);
783 len = gimple_call_arg (call, 2);
784 break;
785
786 case BUILT_IN_STRLEN:
787 source0 = gimple_call_arg (call, 0);
788 len = gimple_call_lhs (call);
789 break;
790
791 case BUILT_IN_STACK_RESTORE:
792 handle_builtin_stack_restore (call, iter);
793 break;
794
795 CASE_BUILT_IN_ALLOCA:
796 handle_builtin_alloca (call, iter);
797 break;
798 /* And now the __atomic* and __sync builtins.
799 These are handled differently from the classical memory memory
800 access builtins above. */
801
802 case BUILT_IN_ATOMIC_LOAD_1:
803 is_store = false;
804 /* FALLTHRU */
805 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
806 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
807 case BUILT_IN_SYNC_FETCH_AND_OR_1:
808 case BUILT_IN_SYNC_FETCH_AND_AND_1:
809 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
810 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
811 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
812 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
813 case BUILT_IN_SYNC_OR_AND_FETCH_1:
814 case BUILT_IN_SYNC_AND_AND_FETCH_1:
815 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
816 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
817 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
818 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
819 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
820 case BUILT_IN_SYNC_LOCK_RELEASE_1:
821 case BUILT_IN_ATOMIC_EXCHANGE_1:
822 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
823 case BUILT_IN_ATOMIC_STORE_1:
824 case BUILT_IN_ATOMIC_ADD_FETCH_1:
825 case BUILT_IN_ATOMIC_SUB_FETCH_1:
826 case BUILT_IN_ATOMIC_AND_FETCH_1:
827 case BUILT_IN_ATOMIC_NAND_FETCH_1:
828 case BUILT_IN_ATOMIC_XOR_FETCH_1:
829 case BUILT_IN_ATOMIC_OR_FETCH_1:
830 case BUILT_IN_ATOMIC_FETCH_ADD_1:
831 case BUILT_IN_ATOMIC_FETCH_SUB_1:
832 case BUILT_IN_ATOMIC_FETCH_AND_1:
833 case BUILT_IN_ATOMIC_FETCH_NAND_1:
834 case BUILT_IN_ATOMIC_FETCH_XOR_1:
835 case BUILT_IN_ATOMIC_FETCH_OR_1:
836 access_size = 1;
837 goto do_atomic;
838
839 case BUILT_IN_ATOMIC_LOAD_2:
840 is_store = false;
841 /* FALLTHRU */
842 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
843 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
844 case BUILT_IN_SYNC_FETCH_AND_OR_2:
845 case BUILT_IN_SYNC_FETCH_AND_AND_2:
846 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
847 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
848 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
849 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
850 case BUILT_IN_SYNC_OR_AND_FETCH_2:
851 case BUILT_IN_SYNC_AND_AND_FETCH_2:
852 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
853 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
854 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
855 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
856 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
857 case BUILT_IN_SYNC_LOCK_RELEASE_2:
858 case BUILT_IN_ATOMIC_EXCHANGE_2:
859 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
860 case BUILT_IN_ATOMIC_STORE_2:
861 case BUILT_IN_ATOMIC_ADD_FETCH_2:
862 case BUILT_IN_ATOMIC_SUB_FETCH_2:
863 case BUILT_IN_ATOMIC_AND_FETCH_2:
864 case BUILT_IN_ATOMIC_NAND_FETCH_2:
865 case BUILT_IN_ATOMIC_XOR_FETCH_2:
866 case BUILT_IN_ATOMIC_OR_FETCH_2:
867 case BUILT_IN_ATOMIC_FETCH_ADD_2:
868 case BUILT_IN_ATOMIC_FETCH_SUB_2:
869 case BUILT_IN_ATOMIC_FETCH_AND_2:
870 case BUILT_IN_ATOMIC_FETCH_NAND_2:
871 case BUILT_IN_ATOMIC_FETCH_XOR_2:
872 case BUILT_IN_ATOMIC_FETCH_OR_2:
873 access_size = 2;
874 goto do_atomic;
875
876 case BUILT_IN_ATOMIC_LOAD_4:
877 is_store = false;
878 /* FALLTHRU */
879 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
880 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
881 case BUILT_IN_SYNC_FETCH_AND_OR_4:
882 case BUILT_IN_SYNC_FETCH_AND_AND_4:
883 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
884 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
885 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
886 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
887 case BUILT_IN_SYNC_OR_AND_FETCH_4:
888 case BUILT_IN_SYNC_AND_AND_FETCH_4:
889 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
890 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
891 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
892 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
893 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
894 case BUILT_IN_SYNC_LOCK_RELEASE_4:
895 case BUILT_IN_ATOMIC_EXCHANGE_4:
896 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
897 case BUILT_IN_ATOMIC_STORE_4:
898 case BUILT_IN_ATOMIC_ADD_FETCH_4:
899 case BUILT_IN_ATOMIC_SUB_FETCH_4:
900 case BUILT_IN_ATOMIC_AND_FETCH_4:
901 case BUILT_IN_ATOMIC_NAND_FETCH_4:
902 case BUILT_IN_ATOMIC_XOR_FETCH_4:
903 case BUILT_IN_ATOMIC_OR_FETCH_4:
904 case BUILT_IN_ATOMIC_FETCH_ADD_4:
905 case BUILT_IN_ATOMIC_FETCH_SUB_4:
906 case BUILT_IN_ATOMIC_FETCH_AND_4:
907 case BUILT_IN_ATOMIC_FETCH_NAND_4:
908 case BUILT_IN_ATOMIC_FETCH_XOR_4:
909 case BUILT_IN_ATOMIC_FETCH_OR_4:
910 access_size = 4;
911 goto do_atomic;
912
913 case BUILT_IN_ATOMIC_LOAD_8:
914 is_store = false;
915 /* FALLTHRU */
916 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
917 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
918 case BUILT_IN_SYNC_FETCH_AND_OR_8:
919 case BUILT_IN_SYNC_FETCH_AND_AND_8:
920 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
921 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
922 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
923 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
924 case BUILT_IN_SYNC_OR_AND_FETCH_8:
925 case BUILT_IN_SYNC_AND_AND_FETCH_8:
926 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
927 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
928 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
929 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
930 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
931 case BUILT_IN_SYNC_LOCK_RELEASE_8:
932 case BUILT_IN_ATOMIC_EXCHANGE_8:
933 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
934 case BUILT_IN_ATOMIC_STORE_8:
935 case BUILT_IN_ATOMIC_ADD_FETCH_8:
936 case BUILT_IN_ATOMIC_SUB_FETCH_8:
937 case BUILT_IN_ATOMIC_AND_FETCH_8:
938 case BUILT_IN_ATOMIC_NAND_FETCH_8:
939 case BUILT_IN_ATOMIC_XOR_FETCH_8:
940 case BUILT_IN_ATOMIC_OR_FETCH_8:
941 case BUILT_IN_ATOMIC_FETCH_ADD_8:
942 case BUILT_IN_ATOMIC_FETCH_SUB_8:
943 case BUILT_IN_ATOMIC_FETCH_AND_8:
944 case BUILT_IN_ATOMIC_FETCH_NAND_8:
945 case BUILT_IN_ATOMIC_FETCH_XOR_8:
946 case BUILT_IN_ATOMIC_FETCH_OR_8:
947 access_size = 8;
948 goto do_atomic;
949
950 case BUILT_IN_ATOMIC_LOAD_16:
951 is_store = false;
952 /* FALLTHRU */
953 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
954 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
955 case BUILT_IN_SYNC_FETCH_AND_OR_16:
956 case BUILT_IN_SYNC_FETCH_AND_AND_16:
957 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
958 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
959 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
960 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
961 case BUILT_IN_SYNC_OR_AND_FETCH_16:
962 case BUILT_IN_SYNC_AND_AND_FETCH_16:
963 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
964 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
965 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
966 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
967 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
968 case BUILT_IN_SYNC_LOCK_RELEASE_16:
969 case BUILT_IN_ATOMIC_EXCHANGE_16:
970 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
971 case BUILT_IN_ATOMIC_STORE_16:
972 case BUILT_IN_ATOMIC_ADD_FETCH_16:
973 case BUILT_IN_ATOMIC_SUB_FETCH_16:
974 case BUILT_IN_ATOMIC_AND_FETCH_16:
975 case BUILT_IN_ATOMIC_NAND_FETCH_16:
976 case BUILT_IN_ATOMIC_XOR_FETCH_16:
977 case BUILT_IN_ATOMIC_OR_FETCH_16:
978 case BUILT_IN_ATOMIC_FETCH_ADD_16:
979 case BUILT_IN_ATOMIC_FETCH_SUB_16:
980 case BUILT_IN_ATOMIC_FETCH_AND_16:
981 case BUILT_IN_ATOMIC_FETCH_NAND_16:
982 case BUILT_IN_ATOMIC_FETCH_XOR_16:
983 case BUILT_IN_ATOMIC_FETCH_OR_16:
984 access_size = 16;
985 /* FALLTHRU */
986 do_atomic:
987 {
988 dest = gimple_call_arg (call, 0);
989 /* DEST represents the address of a memory location.
990 instrument_derefs wants the memory location, so lets
991 dereference the address DEST before handing it to
992 instrument_derefs. */
993 tree type = build_nonstandard_integer_type (access_size
994 * BITS_PER_UNIT, 1);
995 dest = build2 (MEM_REF, type, dest,
996 build_int_cst (build_pointer_type (char_type_node), 0));
997 break;
998 }
999
1000 default:
1001 /* The other builtins memory access are not instrumented in this
1002 function because they either don't have any length parameter,
1003 or their length parameter is just a limit. */
1004 break;
1005 }
1006
1007 if (len != NULL_TREE)
1008 {
1009 if (source0 != NULL_TREE)
1010 {
1011 src0->start = source0;
1012 src0->access_size = access_size;
1013 *src0_len = len;
1014 *src0_is_store = false;
1015 }
1016
1017 if (source1 != NULL_TREE)
1018 {
1019 src1->start = source1;
1020 src1->access_size = access_size;
1021 *src1_len = len;
1022 *src1_is_store = false;
1023 }
1024
1025 if (dest != NULL_TREE)
1026 {
1027 dst->start = dest;
1028 dst->access_size = access_size;
1029 *dst_len = len;
1030 *dst_is_store = true;
1031 }
1032
1033 got_reference_p = true;
1034 }
1035 else if (dest)
1036 {
1037 dst->start = dest;
1038 dst->access_size = access_size;
1039 *dst_len = NULL_TREE;
1040 *dst_is_store = is_store;
1041 *dest_is_deref = true;
1042 got_reference_p = true;
1043 }
1044
1045 return got_reference_p;
1046 }
1047
1048 /* Return true iff a given gimple statement has been instrumented.
1049 Note that the statement is "defined" by the memory references it
1050 contains. */
1051
1052 static bool
1053 has_stmt_been_instrumented_p (gimple *stmt)
1054 {
1055 if (gimple_assign_single_p (stmt))
1056 {
1057 bool r_is_store;
1058 asan_mem_ref r;
1059 asan_mem_ref_init (&r, NULL, 1);
1060
1061 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1062 &r_is_store))
1063 return has_mem_ref_been_instrumented (&r);
1064 }
1065 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1066 {
1067 asan_mem_ref src0, src1, dest;
1068 asan_mem_ref_init (&src0, NULL, 1);
1069 asan_mem_ref_init (&src1, NULL, 1);
1070 asan_mem_ref_init (&dest, NULL, 1);
1071
1072 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1073 bool src0_is_store = false, src1_is_store = false,
1074 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1075 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1076 &src0, &src0_len, &src0_is_store,
1077 &src1, &src1_len, &src1_is_store,
1078 &dest, &dest_len, &dest_is_store,
1079 &dest_is_deref, &intercepted_p))
1080 {
1081 if (src0.start != NULL_TREE
1082 && !has_mem_ref_been_instrumented (&src0, src0_len))
1083 return false;
1084
1085 if (src1.start != NULL_TREE
1086 && !has_mem_ref_been_instrumented (&src1, src1_len))
1087 return false;
1088
1089 if (dest.start != NULL_TREE
1090 && !has_mem_ref_been_instrumented (&dest, dest_len))
1091 return false;
1092
1093 return true;
1094 }
1095 }
1096 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
1097 {
1098 asan_mem_ref r;
1099 asan_mem_ref_init (&r, NULL, 1);
1100
1101 r.start = gimple_call_lhs (stmt);
1102 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1103 return has_mem_ref_been_instrumented (&r);
1104 }
1105
1106 return false;
1107 }
1108
1109 /* Insert a memory reference into the hash table. */
1110
1111 static void
1112 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1113 {
1114 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1115
1116 asan_mem_ref r;
1117 asan_mem_ref_init (&r, ref, access_size);
1118
1119 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1120 if (*slot == NULL || (*slot)->access_size < access_size)
1121 *slot = asan_mem_ref_new (ref, access_size);
1122 }
1123
1124 /* Initialize shadow_ptr_types array. */
1125
1126 static void
1127 asan_init_shadow_ptr_types (void)
1128 {
1129 asan_shadow_set = new_alias_set ();
1130 tree types[3] = { signed_char_type_node, short_integer_type_node,
1131 integer_type_node };
1132
1133 for (unsigned i = 0; i < 3; i++)
1134 {
1135 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1136 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1137 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1138 }
1139
1140 initialize_sanitizer_builtins ();
1141 }
1142
1143 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
1144
1145 static tree
1146 asan_pp_string (pretty_printer *pp)
1147 {
1148 const char *buf = pp_formatted_text (pp);
1149 size_t len = strlen (buf);
1150 tree ret = build_string (len + 1, buf);
1151 TREE_TYPE (ret)
1152 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1153 build_index_type (size_int (len)));
1154 TREE_READONLY (ret) = 1;
1155 TREE_STATIC (ret) = 1;
1156 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1157 }
1158
1159 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
1160
1161 static rtx
1162 asan_shadow_cst (unsigned char shadow_bytes[4])
1163 {
1164 int i;
1165 unsigned HOST_WIDE_INT val = 0;
1166 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1167 for (i = 0; i < 4; i++)
1168 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
1169 << (BITS_PER_UNIT * i);
1170 return gen_int_mode (val, SImode);
1171 }
1172
1173 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1174 though. */
1175
1176 static void
1177 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1178 {
1179 rtx_insn *insn, *insns, *jump;
1180 rtx_code_label *top_label;
1181 rtx end, addr, tmp;
1182
1183 start_sequence ();
1184 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1185 insns = get_insns ();
1186 end_sequence ();
1187 for (insn = insns; insn; insn = NEXT_INSN (insn))
1188 if (CALL_P (insn))
1189 break;
1190 if (insn == NULL_RTX)
1191 {
1192 emit_insn (insns);
1193 return;
1194 }
1195
1196 gcc_assert ((len & 3) == 0);
1197 top_label = gen_label_rtx ();
1198 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1199 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1200 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1201 emit_label (top_label);
1202
1203 emit_move_insn (shadow_mem, const0_rtx);
1204 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1205 true, OPTAB_LIB_WIDEN);
1206 if (tmp != addr)
1207 emit_move_insn (addr, tmp);
1208 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1209 jump = get_last_insn ();
1210 gcc_assert (JUMP_P (jump));
1211 add_reg_br_prob_note (jump,
1212 profile_probability::guessed_always ()
1213 .apply_scale (80, 100));
1214 }
1215
1216 void
1217 asan_function_start (void)
1218 {
1219 section *fnsec = function_section (current_function_decl);
1220 switch_to_section (fnsec);
1221 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1222 current_function_funcdef_no);
1223 }
1224
1225 /* Return number of shadow bytes that are occupied by a local variable
1226 of SIZE bytes. */
1227
1228 static unsigned HOST_WIDE_INT
1229 shadow_mem_size (unsigned HOST_WIDE_INT size)
1230 {
1231 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1232 }
1233
1234 /* Insert code to protect stack vars. The prologue sequence should be emitted
1235 directly, epilogue sequence returned. BASE is the register holding the
1236 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1237 array contains pairs of offsets in reverse order, always the end offset
1238 of some gap that needs protection followed by starting offset,
1239 and DECLS is an array of representative decls for each var partition.
1240 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1241 elements long (OFFSETS include gap before the first variable as well
1242 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1243 register which stack vars DECL_RTLs are based on. Either BASE should be
1244 assigned to PBASE, when not doing use after return protection, or
1245 corresponding address based on __asan_stack_malloc* return value. */
1246
1247 rtx_insn *
1248 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1249 HOST_WIDE_INT *offsets, tree *decls, int length)
1250 {
1251 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1252 rtx_code_label *lab;
1253 rtx_insn *insns;
1254 char buf[32];
1255 unsigned char shadow_bytes[4];
1256 HOST_WIDE_INT base_offset = offsets[length - 1];
1257 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1258 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1259 HOST_WIDE_INT last_offset, last_size;
1260 int l;
1261 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1262 tree str_cst, decl, id;
1263 int use_after_return_class = -1;
1264
1265 if (shadow_ptr_types[0] == NULL_TREE)
1266 asan_init_shadow_ptr_types ();
1267
1268 /* First of all, prepare the description string. */
1269 pretty_printer asan_pp;
1270
1271 pp_decimal_int (&asan_pp, length / 2 - 1);
1272 pp_space (&asan_pp);
1273 for (l = length - 2; l; l -= 2)
1274 {
1275 tree decl = decls[l / 2 - 1];
1276 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1277 pp_space (&asan_pp);
1278 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1279 pp_space (&asan_pp);
1280 if (DECL_P (decl) && DECL_NAME (decl))
1281 {
1282 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1283 pp_space (&asan_pp);
1284 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1285 }
1286 else
1287 pp_string (&asan_pp, "9 <unknown>");
1288 pp_space (&asan_pp);
1289 }
1290 str_cst = asan_pp_string (&asan_pp);
1291
1292 /* Emit the prologue sequence. */
1293 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1294 && ASAN_USE_AFTER_RETURN)
1295 {
1296 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1297 /* __asan_stack_malloc_N guarantees alignment
1298 N < 6 ? (64 << N) : 4096 bytes. */
1299 if (alignb > (use_after_return_class < 6
1300 ? (64U << use_after_return_class) : 4096U))
1301 use_after_return_class = -1;
1302 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1303 base_align_bias = ((asan_frame_size + alignb - 1)
1304 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1305 }
1306 /* Align base if target is STRICT_ALIGNMENT. */
1307 if (STRICT_ALIGNMENT)
1308 base = expand_binop (Pmode, and_optab, base,
1309 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1310 << ASAN_SHADOW_SHIFT)
1311 / BITS_PER_UNIT), Pmode), NULL_RTX,
1312 1, OPTAB_DIRECT);
1313
1314 if (use_after_return_class == -1 && pbase)
1315 emit_move_insn (pbase, base);
1316
1317 base = expand_binop (Pmode, add_optab, base,
1318 gen_int_mode (base_offset - base_align_bias, Pmode),
1319 NULL_RTX, 1, OPTAB_DIRECT);
1320 orig_base = NULL_RTX;
1321 if (use_after_return_class != -1)
1322 {
1323 if (asan_detect_stack_use_after_return == NULL_TREE)
1324 {
1325 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1326 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1327 integer_type_node);
1328 SET_DECL_ASSEMBLER_NAME (decl, id);
1329 TREE_ADDRESSABLE (decl) = 1;
1330 DECL_ARTIFICIAL (decl) = 1;
1331 DECL_IGNORED_P (decl) = 1;
1332 DECL_EXTERNAL (decl) = 1;
1333 TREE_STATIC (decl) = 1;
1334 TREE_PUBLIC (decl) = 1;
1335 TREE_USED (decl) = 1;
1336 asan_detect_stack_use_after_return = decl;
1337 }
1338 orig_base = gen_reg_rtx (Pmode);
1339 emit_move_insn (orig_base, base);
1340 ret = expand_normal (asan_detect_stack_use_after_return);
1341 lab = gen_label_rtx ();
1342 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1343 VOIDmode, 0, lab,
1344 profile_probability::very_likely ());
1345 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1346 use_after_return_class);
1347 ret = init_one_libfunc (buf);
1348 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1349 GEN_INT (asan_frame_size
1350 + base_align_bias),
1351 TYPE_MODE (pointer_sized_int_node));
1352 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1353 and NULL otherwise. Check RET value is NULL here and jump over the
1354 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1355 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1356 VOIDmode, 0, lab,
1357 profile_probability:: very_unlikely ());
1358 ret = convert_memory_address (Pmode, ret);
1359 emit_move_insn (base, ret);
1360 emit_label (lab);
1361 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1362 gen_int_mode (base_align_bias
1363 - base_offset, Pmode),
1364 NULL_RTX, 1, OPTAB_DIRECT));
1365 }
1366 mem = gen_rtx_MEM (ptr_mode, base);
1367 mem = adjust_address (mem, VOIDmode, base_align_bias);
1368 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1369 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1370 emit_move_insn (mem, expand_normal (str_cst));
1371 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1372 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1373 id = get_identifier (buf);
1374 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1375 VAR_DECL, id, char_type_node);
1376 SET_DECL_ASSEMBLER_NAME (decl, id);
1377 TREE_ADDRESSABLE (decl) = 1;
1378 TREE_READONLY (decl) = 1;
1379 DECL_ARTIFICIAL (decl) = 1;
1380 DECL_IGNORED_P (decl) = 1;
1381 TREE_STATIC (decl) = 1;
1382 TREE_PUBLIC (decl) = 0;
1383 TREE_USED (decl) = 1;
1384 DECL_INITIAL (decl) = decl;
1385 TREE_ASM_WRITTEN (decl) = 1;
1386 TREE_ASM_WRITTEN (id) = 1;
1387 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1388 shadow_base = expand_binop (Pmode, lshr_optab, base,
1389 GEN_INT (ASAN_SHADOW_SHIFT),
1390 NULL_RTX, 1, OPTAB_DIRECT);
1391 shadow_base
1392 = plus_constant (Pmode, shadow_base,
1393 asan_shadow_offset ()
1394 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1395 gcc_assert (asan_shadow_set != -1
1396 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1397 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1398 set_mem_alias_set (shadow_mem, asan_shadow_set);
1399 if (STRICT_ALIGNMENT)
1400 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1401 prev_offset = base_offset;
1402 for (l = length; l; l -= 2)
1403 {
1404 if (l == 2)
1405 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1406 offset = offsets[l - 1];
1407 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1408 {
1409 int i;
1410 HOST_WIDE_INT aoff
1411 = base_offset + ((offset - base_offset)
1412 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1413 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1414 (aoff - prev_offset)
1415 >> ASAN_SHADOW_SHIFT);
1416 prev_offset = aoff;
1417 for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
1418 if (aoff < offset)
1419 {
1420 if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
1421 shadow_bytes[i] = 0;
1422 else
1423 shadow_bytes[i] = offset - aoff;
1424 }
1425 else
1426 shadow_bytes[i] = ASAN_STACK_MAGIC_MIDDLE;
1427 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1428 offset = aoff;
1429 }
1430 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1431 {
1432 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1433 (offset - prev_offset)
1434 >> ASAN_SHADOW_SHIFT);
1435 prev_offset = offset;
1436 memset (shadow_bytes, cur_shadow_byte, 4);
1437 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1438 offset += ASAN_RED_ZONE_SIZE;
1439 }
1440 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1441 }
1442 do_pending_stack_adjust ();
1443
1444 /* Construct epilogue sequence. */
1445 start_sequence ();
1446
1447 lab = NULL;
1448 if (use_after_return_class != -1)
1449 {
1450 rtx_code_label *lab2 = gen_label_rtx ();
1451 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1452 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1453 VOIDmode, 0, lab2,
1454 profile_probability::very_likely ());
1455 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1456 set_mem_alias_set (shadow_mem, asan_shadow_set);
1457 mem = gen_rtx_MEM (ptr_mode, base);
1458 mem = adjust_address (mem, VOIDmode, base_align_bias);
1459 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1460 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1461 if (use_after_return_class < 5
1462 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1463 BITS_PER_UNIT, true))
1464 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1465 BITS_PER_UNIT, true, 0);
1466 else if (use_after_return_class >= 5
1467 || !set_storage_via_setmem (shadow_mem,
1468 GEN_INT (sz),
1469 gen_int_mode (c, QImode),
1470 BITS_PER_UNIT, BITS_PER_UNIT,
1471 -1, sz, sz, sz))
1472 {
1473 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1474 use_after_return_class);
1475 ret = init_one_libfunc (buf);
1476 rtx addr = convert_memory_address (ptr_mode, base);
1477 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1478 emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
1479 GEN_INT (asan_frame_size + base_align_bias),
1480 TYPE_MODE (pointer_sized_int_node),
1481 orig_addr, ptr_mode);
1482 }
1483 lab = gen_label_rtx ();
1484 emit_jump (lab);
1485 emit_label (lab2);
1486 }
1487
1488 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1489 set_mem_alias_set (shadow_mem, asan_shadow_set);
1490
1491 if (STRICT_ALIGNMENT)
1492 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1493
1494 prev_offset = base_offset;
1495 last_offset = base_offset;
1496 last_size = 0;
1497 for (l = length; l; l -= 2)
1498 {
1499 offset = base_offset + ((offsets[l - 1] - base_offset)
1500 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1501 if (last_offset + last_size != offset)
1502 {
1503 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1504 (last_offset - prev_offset)
1505 >> ASAN_SHADOW_SHIFT);
1506 prev_offset = last_offset;
1507 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1508 last_offset = offset;
1509 last_size = 0;
1510 }
1511 last_size += base_offset + ((offsets[l - 2] - base_offset)
1512 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1513 - offset;
1514
1515 /* Unpoison shadow memory that corresponds to a variable that is
1516 is subject of use-after-return sanitization. */
1517 if (l > 2)
1518 {
1519 decl = decls[l / 2 - 2];
1520 if (asan_handled_variables != NULL
1521 && asan_handled_variables->contains (decl))
1522 {
1523 HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
1524 if (dump_file && (dump_flags & TDF_DETAILS))
1525 {
1526 const char *n = (DECL_NAME (decl)
1527 ? IDENTIFIER_POINTER (DECL_NAME (decl))
1528 : "<unknown>");
1529 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1530 "%s (%" PRId64 " B)\n", n, size);
1531 }
1532
1533 last_size += size & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
1534 }
1535 }
1536 }
1537 if (last_size)
1538 {
1539 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1540 (last_offset - prev_offset)
1541 >> ASAN_SHADOW_SHIFT);
1542 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1543 }
1544
1545 /* Clean-up set with instrumented stack variables. */
1546 delete asan_handled_variables;
1547 asan_handled_variables = NULL;
1548 delete asan_used_labels;
1549 asan_used_labels = NULL;
1550
1551 do_pending_stack_adjust ();
1552 if (lab)
1553 emit_label (lab);
1554
1555 insns = get_insns ();
1556 end_sequence ();
1557 return insns;
1558 }
1559
1560 /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
1561 to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
1562 indicates whether we're emitting new instructions sequence or not. */
1563
1564 rtx_insn *
1565 asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
1566 {
1567 if (before)
1568 push_to_sequence (before);
1569 else
1570 start_sequence ();
1571 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
1572 top = convert_memory_address (ptr_mode, top);
1573 bot = convert_memory_address (ptr_mode, bot);
1574 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1575 top, ptr_mode, bot, ptr_mode);
1576
1577 do_pending_stack_adjust ();
1578 rtx_insn *insns = get_insns ();
1579 end_sequence ();
1580 return insns;
1581 }
1582
1583 /* Return true if DECL, a global var, might be overridden and needs
1584 therefore a local alias. */
1585
1586 static bool
1587 asan_needs_local_alias (tree decl)
1588 {
1589 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1590 }
1591
1592 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1593 therefore doesn't need protection. */
1594
1595 static bool
1596 is_odr_indicator (tree decl)
1597 {
1598 return (DECL_ARTIFICIAL (decl)
1599 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1600 }
1601
1602 /* Return true if DECL is a VAR_DECL that should be protected
1603 by Address Sanitizer, by appending a red zone with protected
1604 shadow memory after it and aligning it to at least
1605 ASAN_RED_ZONE_SIZE bytes. */
1606
1607 bool
1608 asan_protect_global (tree decl)
1609 {
1610 if (!ASAN_GLOBALS)
1611 return false;
1612
1613 rtx rtl, symbol;
1614
1615 if (TREE_CODE (decl) == STRING_CST)
1616 {
1617 /* Instrument all STRING_CSTs except those created
1618 by asan_pp_string here. */
1619 if (shadow_ptr_types[0] != NULL_TREE
1620 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1621 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1622 return false;
1623 return true;
1624 }
1625 if (!VAR_P (decl)
1626 /* TLS vars aren't statically protectable. */
1627 || DECL_THREAD_LOCAL_P (decl)
1628 /* Externs will be protected elsewhere. */
1629 || DECL_EXTERNAL (decl)
1630 || !DECL_RTL_SET_P (decl)
1631 /* Comdat vars pose an ABI problem, we can't know if
1632 the var that is selected by the linker will have
1633 padding or not. */
1634 || DECL_ONE_ONLY (decl)
1635 /* Similarly for common vars. People can use -fno-common.
1636 Note: Linux kernel is built with -fno-common, so we do instrument
1637 globals there even if it is C. */
1638 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1639 /* Don't protect if using user section, often vars placed
1640 into user section from multiple TUs are then assumed
1641 to be an array of such vars, putting padding in there
1642 breaks this assumption. */
1643 || (DECL_SECTION_NAME (decl) != NULL
1644 && !symtab_node::get (decl)->implicit_section
1645 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1646 || DECL_SIZE (decl) == 0
1647 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1648 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1649 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1650 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1651 || is_odr_indicator (decl))
1652 return false;
1653
1654 rtl = DECL_RTL (decl);
1655 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1656 return false;
1657 symbol = XEXP (rtl, 0);
1658
1659 if (CONSTANT_POOL_ADDRESS_P (symbol)
1660 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1661 return false;
1662
1663 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1664 return false;
1665
1666 if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
1667 return false;
1668
1669 return true;
1670 }
1671
1672 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1673 IS_STORE is either 1 (for a store) or 0 (for a load). */
1674
1675 static tree
1676 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1677 int *nargs)
1678 {
1679 static enum built_in_function report[2][2][6]
1680 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1681 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1682 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1683 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1684 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1685 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1686 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1687 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1688 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1689 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1690 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1691 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1692 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1693 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1694 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1695 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1696 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1697 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1698 if (size_in_bytes == -1)
1699 {
1700 *nargs = 2;
1701 return builtin_decl_implicit (report[recover_p][is_store][5]);
1702 }
1703 *nargs = 1;
1704 int size_log2 = exact_log2 (size_in_bytes);
1705 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1706 }
1707
1708 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1709 IS_STORE is either 1 (for a store) or 0 (for a load). */
1710
1711 static tree
1712 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1713 int *nargs)
1714 {
1715 static enum built_in_function check[2][2][6]
1716 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1717 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1718 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1719 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1720 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1721 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1722 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1723 BUILT_IN_ASAN_LOAD2_NOABORT,
1724 BUILT_IN_ASAN_LOAD4_NOABORT,
1725 BUILT_IN_ASAN_LOAD8_NOABORT,
1726 BUILT_IN_ASAN_LOAD16_NOABORT,
1727 BUILT_IN_ASAN_LOADN_NOABORT },
1728 { BUILT_IN_ASAN_STORE1_NOABORT,
1729 BUILT_IN_ASAN_STORE2_NOABORT,
1730 BUILT_IN_ASAN_STORE4_NOABORT,
1731 BUILT_IN_ASAN_STORE8_NOABORT,
1732 BUILT_IN_ASAN_STORE16_NOABORT,
1733 BUILT_IN_ASAN_STOREN_NOABORT } } };
1734 if (size_in_bytes == -1)
1735 {
1736 *nargs = 2;
1737 return builtin_decl_implicit (check[recover_p][is_store][5]);
1738 }
1739 *nargs = 1;
1740 int size_log2 = exact_log2 (size_in_bytes);
1741 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1742 }
1743
1744 /* Split the current basic block and create a condition statement
1745 insertion point right before or after the statement pointed to by
1746 ITER. Return an iterator to the point at which the caller might
1747 safely insert the condition statement.
1748
1749 THEN_BLOCK must be set to the address of an uninitialized instance
1750 of basic_block. The function will then set *THEN_BLOCK to the
1751 'then block' of the condition statement to be inserted by the
1752 caller.
1753
1754 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1755 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1756
1757 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1758 block' of the condition statement to be inserted by the caller.
1759
1760 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1761 statements starting from *ITER, and *THEN_BLOCK is a new empty
1762 block.
1763
1764 *ITER is adjusted to point to always point to the first statement
1765 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1766 same as what ITER was pointing to prior to calling this function,
1767 if BEFORE_P is true; otherwise, it is its following statement. */
1768
1769 gimple_stmt_iterator
1770 create_cond_insert_point (gimple_stmt_iterator *iter,
1771 bool before_p,
1772 bool then_more_likely_p,
1773 bool create_then_fallthru_edge,
1774 basic_block *then_block,
1775 basic_block *fallthrough_block)
1776 {
1777 gimple_stmt_iterator gsi = *iter;
1778
1779 if (!gsi_end_p (gsi) && before_p)
1780 gsi_prev (&gsi);
1781
1782 basic_block cur_bb = gsi_bb (*iter);
1783
1784 edge e = split_block (cur_bb, gsi_stmt (gsi));
1785
1786 /* Get a hold on the 'condition block', the 'then block' and the
1787 'else block'. */
1788 basic_block cond_bb = e->src;
1789 basic_block fallthru_bb = e->dest;
1790 basic_block then_bb = create_empty_bb (cond_bb);
1791 if (current_loops)
1792 {
1793 add_bb_to_loop (then_bb, cond_bb->loop_father);
1794 loops_state_set (LOOPS_NEED_FIXUP);
1795 }
1796
1797 /* Set up the newly created 'then block'. */
1798 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1799 profile_probability fallthrough_probability
1800 = then_more_likely_p
1801 ? profile_probability::very_unlikely ()
1802 : profile_probability::very_likely ();
1803 e->probability = fallthrough_probability.invert ();
1804 if (create_then_fallthru_edge)
1805 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1806
1807 /* Set up the fallthrough basic block. */
1808 e = find_edge (cond_bb, fallthru_bb);
1809 e->flags = EDGE_FALSE_VALUE;
1810 e->probability = fallthrough_probability;
1811
1812 /* Update dominance info for the newly created then_bb; note that
1813 fallthru_bb's dominance info has already been updated by
1814 split_bock. */
1815 if (dom_info_available_p (CDI_DOMINATORS))
1816 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1817
1818 *then_block = then_bb;
1819 *fallthrough_block = fallthru_bb;
1820 *iter = gsi_start_bb (fallthru_bb);
1821
1822 return gsi_last_bb (cond_bb);
1823 }
1824
1825 /* Insert an if condition followed by a 'then block' right before the
1826 statement pointed to by ITER. The fallthrough block -- which is the
1827 else block of the condition as well as the destination of the
1828 outcoming edge of the 'then block' -- starts with the statement
1829 pointed to by ITER.
1830
1831 COND is the condition of the if.
1832
1833 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1834 'then block' is higher than the probability of the edge to the
1835 fallthrough block.
1836
1837 Upon completion of the function, *THEN_BB is set to the newly
1838 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1839 fallthrough block.
1840
1841 *ITER is adjusted to still point to the same statement it was
1842 pointing to initially. */
1843
1844 static void
1845 insert_if_then_before_iter (gcond *cond,
1846 gimple_stmt_iterator *iter,
1847 bool then_more_likely_p,
1848 basic_block *then_bb,
1849 basic_block *fallthrough_bb)
1850 {
1851 gimple_stmt_iterator cond_insert_point =
1852 create_cond_insert_point (iter,
1853 /*before_p=*/true,
1854 then_more_likely_p,
1855 /*create_then_fallthru_edge=*/true,
1856 then_bb,
1857 fallthrough_bb);
1858 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1859 }
1860
1861 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
1862 If RETURN_ADDRESS is set to true, return memory location instread
1863 of a value in the shadow memory. */
1864
1865 static tree
1866 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1867 tree base_addr, tree shadow_ptr_type,
1868 bool return_address = false)
1869 {
1870 tree t, uintptr_type = TREE_TYPE (base_addr);
1871 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1872 gimple *g;
1873
1874 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1875 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1876 base_addr, t);
1877 gimple_set_location (g, location);
1878 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1879
1880 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1881 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1882 gimple_assign_lhs (g), t);
1883 gimple_set_location (g, location);
1884 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1885
1886 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1887 gimple_assign_lhs (g));
1888 gimple_set_location (g, location);
1889 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1890
1891 if (!return_address)
1892 {
1893 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1894 build_int_cst (shadow_ptr_type, 0));
1895 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1896 gimple_set_location (g, location);
1897 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1898 }
1899
1900 return gimple_assign_lhs (g);
1901 }
1902
1903 /* BASE can already be an SSA_NAME; in that case, do not create a
1904 new SSA_NAME for it. */
1905
1906 static tree
1907 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1908 bool before_p)
1909 {
1910 if (TREE_CODE (base) == SSA_NAME)
1911 return base;
1912 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1913 TREE_CODE (base), base);
1914 gimple_set_location (g, loc);
1915 if (before_p)
1916 gsi_insert_before (iter, g, GSI_SAME_STMT);
1917 else
1918 gsi_insert_after (iter, g, GSI_NEW_STMT);
1919 return gimple_assign_lhs (g);
1920 }
1921
1922 /* LEN can already have necessary size and precision;
1923 in that case, do not create a new variable. */
1924
1925 tree
1926 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1927 bool before_p)
1928 {
1929 if (ptrofftype_p (len))
1930 return len;
1931 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1932 NOP_EXPR, len);
1933 gimple_set_location (g, loc);
1934 if (before_p)
1935 gsi_insert_before (iter, g, GSI_SAME_STMT);
1936 else
1937 gsi_insert_after (iter, g, GSI_NEW_STMT);
1938 return gimple_assign_lhs (g);
1939 }
1940
1941 /* Instrument the memory access instruction BASE. Insert new
1942 statements before or after ITER.
1943
1944 Note that the memory access represented by BASE can be either an
1945 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1946 location. IS_STORE is TRUE for a store, FALSE for a load.
1947 BEFORE_P is TRUE for inserting the instrumentation code before
1948 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1949 for a scalar memory access and FALSE for memory region access.
1950 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1951 length. ALIGN tells alignment of accessed memory object.
1952
1953 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1954 memory region have already been instrumented.
1955
1956 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1957 statement it was pointing to prior to calling this function,
1958 otherwise, it points to the statement logically following it. */
1959
1960 static void
1961 build_check_stmt (location_t loc, tree base, tree len,
1962 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1963 bool is_non_zero_len, bool before_p, bool is_store,
1964 bool is_scalar_access, unsigned int align = 0)
1965 {
1966 gimple_stmt_iterator gsi = *iter;
1967 gimple *g;
1968
1969 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1970
1971 gsi = *iter;
1972
1973 base = unshare_expr (base);
1974 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1975
1976 if (len)
1977 {
1978 len = unshare_expr (len);
1979 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1980 }
1981 else
1982 {
1983 gcc_assert (size_in_bytes != -1);
1984 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1985 }
1986
1987 if (size_in_bytes > 1)
1988 {
1989 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1990 || size_in_bytes > 16)
1991 is_scalar_access = false;
1992 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1993 {
1994 /* On non-strict alignment targets, if
1995 16-byte access is just 8-byte aligned,
1996 this will result in misaligned shadow
1997 memory 2 byte load, but otherwise can
1998 be handled using one read. */
1999 if (size_in_bytes != 16
2000 || STRICT_ALIGNMENT
2001 || align < 8 * BITS_PER_UNIT)
2002 is_scalar_access = false;
2003 }
2004 }
2005
2006 HOST_WIDE_INT flags = 0;
2007 if (is_store)
2008 flags |= ASAN_CHECK_STORE;
2009 if (is_non_zero_len)
2010 flags |= ASAN_CHECK_NON_ZERO_LEN;
2011 if (is_scalar_access)
2012 flags |= ASAN_CHECK_SCALAR_ACCESS;
2013
2014 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
2015 build_int_cst (integer_type_node, flags),
2016 base, len,
2017 build_int_cst (integer_type_node,
2018 align / BITS_PER_UNIT));
2019 gimple_set_location (g, loc);
2020 if (before_p)
2021 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2022 else
2023 {
2024 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2025 gsi_next (&gsi);
2026 *iter = gsi;
2027 }
2028 }
2029
2030 /* If T represents a memory access, add instrumentation code before ITER.
2031 LOCATION is source code location.
2032 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
2033
2034 static void
2035 instrument_derefs (gimple_stmt_iterator *iter, tree t,
2036 location_t location, bool is_store)
2037 {
2038 if (is_store && !ASAN_INSTRUMENT_WRITES)
2039 return;
2040 if (!is_store && !ASAN_INSTRUMENT_READS)
2041 return;
2042
2043 tree type, base;
2044 HOST_WIDE_INT size_in_bytes;
2045 if (location == UNKNOWN_LOCATION)
2046 location = EXPR_LOCATION (t);
2047
2048 type = TREE_TYPE (t);
2049 switch (TREE_CODE (t))
2050 {
2051 case ARRAY_REF:
2052 case COMPONENT_REF:
2053 case INDIRECT_REF:
2054 case MEM_REF:
2055 case VAR_DECL:
2056 case BIT_FIELD_REF:
2057 break;
2058 /* FALLTHRU */
2059 default:
2060 return;
2061 }
2062
2063 size_in_bytes = int_size_in_bytes (type);
2064 if (size_in_bytes <= 0)
2065 return;
2066
2067 HOST_WIDE_INT bitsize, bitpos;
2068 tree offset;
2069 machine_mode mode;
2070 int unsignedp, reversep, volatilep = 0;
2071 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2072 &unsignedp, &reversep, &volatilep);
2073
2074 if (TREE_CODE (t) == COMPONENT_REF
2075 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2076 {
2077 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2078 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2079 TREE_OPERAND (t, 0), repr,
2080 TREE_OPERAND (t, 2)),
2081 location, is_store);
2082 return;
2083 }
2084
2085 if (bitpos % BITS_PER_UNIT
2086 || bitsize != size_in_bytes * BITS_PER_UNIT)
2087 return;
2088
2089 if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2090 return;
2091
2092 if (VAR_P (inner)
2093 && offset == NULL_TREE
2094 && bitpos >= 0
2095 && DECL_SIZE (inner)
2096 && tree_fits_shwi_p (DECL_SIZE (inner))
2097 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
2098 {
2099 if (DECL_THREAD_LOCAL_P (inner))
2100 return;
2101 if (!ASAN_GLOBALS && is_global_var (inner))
2102 return;
2103 if (!TREE_STATIC (inner))
2104 {
2105 /* Automatic vars in the current function will be always
2106 accessible. */
2107 if (decl_function_context (inner) == current_function_decl
2108 && (!asan_sanitize_use_after_scope ()
2109 || !TREE_ADDRESSABLE (inner)))
2110 return;
2111 }
2112 /* Always instrument external vars, they might be dynamically
2113 initialized. */
2114 else if (!DECL_EXTERNAL (inner))
2115 {
2116 /* For static vars if they are known not to be dynamically
2117 initialized, they will be always accessible. */
2118 varpool_node *vnode = varpool_node::get (inner);
2119 if (vnode && !vnode->dynamically_initialized)
2120 return;
2121 }
2122 }
2123
2124 base = build_fold_addr_expr (t);
2125 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2126 {
2127 unsigned int align = get_object_alignment (t);
2128 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2129 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2130 is_store, /*is_scalar_access*/true, align);
2131 update_mem_ref_hash_table (base, size_in_bytes);
2132 update_mem_ref_hash_table (t, size_in_bytes);
2133 }
2134
2135 }
2136
2137 /* Insert a memory reference into the hash table if access length
2138 can be determined in compile time. */
2139
2140 static void
2141 maybe_update_mem_ref_hash_table (tree base, tree len)
2142 {
2143 if (!POINTER_TYPE_P (TREE_TYPE (base))
2144 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2145 return;
2146
2147 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2148
2149 if (size_in_bytes != -1)
2150 update_mem_ref_hash_table (base, size_in_bytes);
2151 }
2152
2153 /* Instrument an access to a contiguous memory region that starts at
2154 the address pointed to by BASE, over a length of LEN (expressed in
2155 the sizeof (*BASE) bytes). ITER points to the instruction before
2156 which the instrumentation instructions must be inserted. LOCATION
2157 is the source location that the instrumentation instructions must
2158 have. If IS_STORE is true, then the memory access is a store;
2159 otherwise, it's a load. */
2160
2161 static void
2162 instrument_mem_region_access (tree base, tree len,
2163 gimple_stmt_iterator *iter,
2164 location_t location, bool is_store)
2165 {
2166 if (!POINTER_TYPE_P (TREE_TYPE (base))
2167 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2168 || integer_zerop (len))
2169 return;
2170
2171 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2172
2173 if ((size_in_bytes == -1)
2174 || !has_mem_ref_been_instrumented (base, size_in_bytes))
2175 {
2176 build_check_stmt (location, base, len, size_in_bytes, iter,
2177 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2178 is_store, /*is_scalar_access*/false, /*align*/0);
2179 }
2180
2181 maybe_update_mem_ref_hash_table (base, len);
2182 *iter = gsi_for_stmt (gsi_stmt (*iter));
2183 }
2184
2185 /* Instrument the call to a built-in memory access function that is
2186 pointed to by the iterator ITER.
2187
2188 Upon completion, return TRUE iff *ITER has been advanced to the
2189 statement following the one it was originally pointing to. */
2190
2191 static bool
2192 instrument_builtin_call (gimple_stmt_iterator *iter)
2193 {
2194 if (!ASAN_MEMINTRIN)
2195 return false;
2196
2197 bool iter_advanced_p = false;
2198 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2199
2200 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2201
2202 location_t loc = gimple_location (call);
2203
2204 asan_mem_ref src0, src1, dest;
2205 asan_mem_ref_init (&src0, NULL, 1);
2206 asan_mem_ref_init (&src1, NULL, 1);
2207 asan_mem_ref_init (&dest, NULL, 1);
2208
2209 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2210 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2211 dest_is_deref = false, intercepted_p = true;
2212
2213 if (get_mem_refs_of_builtin_call (call,
2214 &src0, &src0_len, &src0_is_store,
2215 &src1, &src1_len, &src1_is_store,
2216 &dest, &dest_len, &dest_is_store,
2217 &dest_is_deref, &intercepted_p, iter))
2218 {
2219 if (dest_is_deref)
2220 {
2221 instrument_derefs (iter, dest.start, loc, dest_is_store);
2222 gsi_next (iter);
2223 iter_advanced_p = true;
2224 }
2225 else if (!intercepted_p
2226 && (src0_len || src1_len || dest_len))
2227 {
2228 if (src0.start != NULL_TREE)
2229 instrument_mem_region_access (src0.start, src0_len,
2230 iter, loc, /*is_store=*/false);
2231 if (src1.start != NULL_TREE)
2232 instrument_mem_region_access (src1.start, src1_len,
2233 iter, loc, /*is_store=*/false);
2234 if (dest.start != NULL_TREE)
2235 instrument_mem_region_access (dest.start, dest_len,
2236 iter, loc, /*is_store=*/true);
2237
2238 *iter = gsi_for_stmt (call);
2239 gsi_next (iter);
2240 iter_advanced_p = true;
2241 }
2242 else
2243 {
2244 if (src0.start != NULL_TREE)
2245 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2246 if (src1.start != NULL_TREE)
2247 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2248 if (dest.start != NULL_TREE)
2249 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2250 }
2251 }
2252 return iter_advanced_p;
2253 }
2254
2255 /* Instrument the assignment statement ITER if it is subject to
2256 instrumentation. Return TRUE iff instrumentation actually
2257 happened. In that case, the iterator ITER is advanced to the next
2258 logical expression following the one initially pointed to by ITER,
2259 and the relevant memory reference that which access has been
2260 instrumented is added to the memory references hash table. */
2261
2262 static bool
2263 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2264 {
2265 gimple *s = gsi_stmt (*iter);
2266
2267 gcc_assert (gimple_assign_single_p (s));
2268
2269 tree ref_expr = NULL_TREE;
2270 bool is_store, is_instrumented = false;
2271
2272 if (gimple_store_p (s))
2273 {
2274 ref_expr = gimple_assign_lhs (s);
2275 is_store = true;
2276 instrument_derefs (iter, ref_expr,
2277 gimple_location (s),
2278 is_store);
2279 is_instrumented = true;
2280 }
2281
2282 if (gimple_assign_load_p (s))
2283 {
2284 ref_expr = gimple_assign_rhs1 (s);
2285 is_store = false;
2286 instrument_derefs (iter, ref_expr,
2287 gimple_location (s),
2288 is_store);
2289 is_instrumented = true;
2290 }
2291
2292 if (is_instrumented)
2293 gsi_next (iter);
2294
2295 return is_instrumented;
2296 }
2297
2298 /* Instrument the function call pointed to by the iterator ITER, if it
2299 is subject to instrumentation. At the moment, the only function
2300 calls that are instrumented are some built-in functions that access
2301 memory. Look at instrument_builtin_call to learn more.
2302
2303 Upon completion return TRUE iff *ITER was advanced to the statement
2304 following the one it was originally pointing to. */
2305
2306 static bool
2307 maybe_instrument_call (gimple_stmt_iterator *iter)
2308 {
2309 gimple *stmt = gsi_stmt (*iter);
2310 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2311
2312 if (is_builtin && instrument_builtin_call (iter))
2313 return true;
2314
2315 if (gimple_call_noreturn_p (stmt))
2316 {
2317 if (is_builtin)
2318 {
2319 tree callee = gimple_call_fndecl (stmt);
2320 switch (DECL_FUNCTION_CODE (callee))
2321 {
2322 case BUILT_IN_UNREACHABLE:
2323 case BUILT_IN_TRAP:
2324 /* Don't instrument these. */
2325 return false;
2326 default:
2327 break;
2328 }
2329 }
2330 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2331 gimple *g = gimple_build_call (decl, 0);
2332 gimple_set_location (g, gimple_location (stmt));
2333 gsi_insert_before (iter, g, GSI_SAME_STMT);
2334 }
2335
2336 bool instrumented = false;
2337 if (gimple_store_p (stmt))
2338 {
2339 tree ref_expr = gimple_call_lhs (stmt);
2340 instrument_derefs (iter, ref_expr,
2341 gimple_location (stmt),
2342 /*is_store=*/true);
2343
2344 instrumented = true;
2345 }
2346
2347 /* Walk through gimple_call arguments and check them id needed. */
2348 unsigned args_num = gimple_call_num_args (stmt);
2349 for (unsigned i = 0; i < args_num; ++i)
2350 {
2351 tree arg = gimple_call_arg (stmt, i);
2352 /* If ARG is not a non-aggregate register variable, compiler in general
2353 creates temporary for it and pass it as argument to gimple call.
2354 But in some cases, e.g. when we pass by value a small structure that
2355 fits to register, compiler can avoid extra overhead by pulling out
2356 these temporaries. In this case, we should check the argument. */
2357 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2358 {
2359 instrument_derefs (iter, arg,
2360 gimple_location (stmt),
2361 /*is_store=*/false);
2362 instrumented = true;
2363 }
2364 }
2365 if (instrumented)
2366 gsi_next (iter);
2367 return instrumented;
2368 }
2369
2370 /* Walk each instruction of all basic block and instrument those that
2371 represent memory references: loads, stores, or function calls.
2372 In a given basic block, this function avoids instrumenting memory
2373 references that have already been instrumented. */
2374
2375 static void
2376 transform_statements (void)
2377 {
2378 basic_block bb, last_bb = NULL;
2379 gimple_stmt_iterator i;
2380 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2381
2382 FOR_EACH_BB_FN (bb, cfun)
2383 {
2384 basic_block prev_bb = bb;
2385
2386 if (bb->index >= saved_last_basic_block) continue;
2387
2388 /* Flush the mem ref hash table, if current bb doesn't have
2389 exactly one predecessor, or if that predecessor (skipping
2390 over asan created basic blocks) isn't the last processed
2391 basic block. Thus we effectively flush on extended basic
2392 block boundaries. */
2393 while (single_pred_p (prev_bb))
2394 {
2395 prev_bb = single_pred (prev_bb);
2396 if (prev_bb->index < saved_last_basic_block)
2397 break;
2398 }
2399 if (prev_bb != last_bb)
2400 empty_mem_ref_hash_table ();
2401 last_bb = bb;
2402
2403 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2404 {
2405 gimple *s = gsi_stmt (i);
2406
2407 if (has_stmt_been_instrumented_p (s))
2408 gsi_next (&i);
2409 else if (gimple_assign_single_p (s)
2410 && !gimple_clobber_p (s)
2411 && maybe_instrument_assignment (&i))
2412 /* Nothing to do as maybe_instrument_assignment advanced
2413 the iterator I. */;
2414 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2415 /* Nothing to do as maybe_instrument_call
2416 advanced the iterator I. */;
2417 else
2418 {
2419 /* No instrumentation happened.
2420
2421 If the current instruction is a function call that
2422 might free something, let's forget about the memory
2423 references that got instrumented. Otherwise we might
2424 miss some instrumentation opportunities. Do the same
2425 for a ASAN_MARK poisoning internal function. */
2426 if (is_gimple_call (s)
2427 && (!nonfreeing_call_p (s)
2428 || asan_mark_p (s, ASAN_MARK_POISON)))
2429 empty_mem_ref_hash_table ();
2430
2431 gsi_next (&i);
2432 }
2433 }
2434 }
2435 free_mem_ref_resources ();
2436 }
2437
2438 /* Build
2439 __asan_before_dynamic_init (module_name)
2440 or
2441 __asan_after_dynamic_init ()
2442 call. */
2443
2444 tree
2445 asan_dynamic_init_call (bool after_p)
2446 {
2447 if (shadow_ptr_types[0] == NULL_TREE)
2448 asan_init_shadow_ptr_types ();
2449
2450 tree fn = builtin_decl_implicit (after_p
2451 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2452 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2453 tree module_name_cst = NULL_TREE;
2454 if (!after_p)
2455 {
2456 pretty_printer module_name_pp;
2457 pp_string (&module_name_pp, main_input_filename);
2458
2459 module_name_cst = asan_pp_string (&module_name_pp);
2460 module_name_cst = fold_convert (const_ptr_type_node,
2461 module_name_cst);
2462 }
2463
2464 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2465 }
2466
2467 /* Build
2468 struct __asan_global
2469 {
2470 const void *__beg;
2471 uptr __size;
2472 uptr __size_with_redzone;
2473 const void *__name;
2474 const void *__module_name;
2475 uptr __has_dynamic_init;
2476 __asan_global_source_location *__location;
2477 char *__odr_indicator;
2478 } type. */
2479
2480 static tree
2481 asan_global_struct (void)
2482 {
2483 static const char *field_names[]
2484 = { "__beg", "__size", "__size_with_redzone",
2485 "__name", "__module_name", "__has_dynamic_init", "__location",
2486 "__odr_indicator" };
2487 tree fields[ARRAY_SIZE (field_names)], ret;
2488 unsigned i;
2489
2490 ret = make_node (RECORD_TYPE);
2491 for (i = 0; i < ARRAY_SIZE (field_names); i++)
2492 {
2493 fields[i]
2494 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2495 get_identifier (field_names[i]),
2496 (i == 0 || i == 3) ? const_ptr_type_node
2497 : pointer_sized_int_node);
2498 DECL_CONTEXT (fields[i]) = ret;
2499 if (i)
2500 DECL_CHAIN (fields[i - 1]) = fields[i];
2501 }
2502 tree type_decl = build_decl (input_location, TYPE_DECL,
2503 get_identifier ("__asan_global"), ret);
2504 DECL_IGNORED_P (type_decl) = 1;
2505 DECL_ARTIFICIAL (type_decl) = 1;
2506 TYPE_FIELDS (ret) = fields[0];
2507 TYPE_NAME (ret) = type_decl;
2508 TYPE_STUB_DECL (ret) = type_decl;
2509 layout_type (ret);
2510 return ret;
2511 }
2512
2513 /* Create and return odr indicator symbol for DECL.
2514 TYPE is __asan_global struct type as returned by asan_global_struct. */
2515
2516 static tree
2517 create_odr_indicator (tree decl, tree type)
2518 {
2519 char *name;
2520 tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2521 tree decl_name
2522 = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2523 : DECL_NAME (decl));
2524 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
2525 if (decl_name == NULL_TREE)
2526 return build_int_cst (uptr, 0);
2527 const char *dname = IDENTIFIER_POINTER (decl_name);
2528 if (HAS_DECL_ASSEMBLER_NAME_P (decl))
2529 dname = targetm.strip_name_encoding (dname);
2530 size_t len = strlen (dname) + sizeof ("__odr_asan_");
2531 name = XALLOCAVEC (char, len);
2532 snprintf (name, len, "__odr_asan_%s", dname);
2533 #ifndef NO_DOT_IN_LABEL
2534 name[sizeof ("__odr_asan") - 1] = '.';
2535 #elif !defined(NO_DOLLAR_IN_LABEL)
2536 name[sizeof ("__odr_asan") - 1] = '$';
2537 #endif
2538 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2539 char_type_node);
2540 TREE_ADDRESSABLE (var) = 1;
2541 TREE_READONLY (var) = 0;
2542 TREE_THIS_VOLATILE (var) = 1;
2543 DECL_GIMPLE_REG_P (var) = 0;
2544 DECL_ARTIFICIAL (var) = 1;
2545 DECL_IGNORED_P (var) = 1;
2546 TREE_STATIC (var) = 1;
2547 TREE_PUBLIC (var) = 1;
2548 DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2549 DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2550
2551 TREE_USED (var) = 1;
2552 tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2553 build_int_cst (unsigned_type_node, 0));
2554 TREE_CONSTANT (ctor) = 1;
2555 TREE_STATIC (ctor) = 1;
2556 DECL_INITIAL (var) = ctor;
2557 DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2558 NULL, DECL_ATTRIBUTES (var));
2559 make_decl_rtl (var);
2560 varpool_node::finalize_decl (var);
2561 return fold_convert (uptr, build_fold_addr_expr (var));
2562 }
2563
2564 /* Return true if DECL, a global var, might be overridden and needs
2565 an additional odr indicator symbol. */
2566
2567 static bool
2568 asan_needs_odr_indicator_p (tree decl)
2569 {
2570 /* Don't emit ODR indicators for kernel because:
2571 a) Kernel is written in C thus doesn't need ODR indicators.
2572 b) Some kernel code may have assumptions about symbols containing specific
2573 patterns in their names. Since ODR indicators contain original names
2574 of symbols they are emitted for, these assumptions would be broken for
2575 ODR indicator symbols. */
2576 return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2577 && !DECL_ARTIFICIAL (decl)
2578 && !DECL_WEAK (decl)
2579 && TREE_PUBLIC (decl));
2580 }
2581
2582 /* Append description of a single global DECL into vector V.
2583 TYPE is __asan_global struct type as returned by asan_global_struct. */
2584
2585 static void
2586 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2587 {
2588 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2589 unsigned HOST_WIDE_INT size;
2590 tree str_cst, module_name_cst, refdecl = decl;
2591 vec<constructor_elt, va_gc> *vinner = NULL;
2592
2593 pretty_printer asan_pp, module_name_pp;
2594
2595 if (DECL_NAME (decl))
2596 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2597 else
2598 pp_string (&asan_pp, "<unknown>");
2599 str_cst = asan_pp_string (&asan_pp);
2600
2601 pp_string (&module_name_pp, main_input_filename);
2602 module_name_cst = asan_pp_string (&module_name_pp);
2603
2604 if (asan_needs_local_alias (decl))
2605 {
2606 char buf[20];
2607 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2608 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2609 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2610 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2611 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2612 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2613 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2614 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2615 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2616 TREE_STATIC (refdecl) = 1;
2617 TREE_PUBLIC (refdecl) = 0;
2618 TREE_USED (refdecl) = 1;
2619 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2620 }
2621
2622 tree odr_indicator_ptr
2623 = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2624 : build_int_cst (uptr, 0));
2625 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2626 fold_convert (const_ptr_type_node,
2627 build_fold_addr_expr (refdecl)));
2628 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2629 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2630 size += asan_red_zone_size (size);
2631 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2632 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2633 fold_convert (const_ptr_type_node, str_cst));
2634 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2635 fold_convert (const_ptr_type_node, module_name_cst));
2636 varpool_node *vnode = varpool_node::get (decl);
2637 int has_dynamic_init = 0;
2638 /* FIXME: Enable initialization order fiasco detection in LTO mode once
2639 proper fix for PR 79061 will be applied. */
2640 if (!in_lto_p)
2641 has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2642 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2643 build_int_cst (uptr, has_dynamic_init));
2644 tree locptr = NULL_TREE;
2645 location_t loc = DECL_SOURCE_LOCATION (decl);
2646 expanded_location xloc = expand_location (loc);
2647 if (xloc.file != NULL)
2648 {
2649 static int lasanloccnt = 0;
2650 char buf[25];
2651 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2652 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2653 ubsan_get_source_location_type ());
2654 TREE_STATIC (var) = 1;
2655 TREE_PUBLIC (var) = 0;
2656 DECL_ARTIFICIAL (var) = 1;
2657 DECL_IGNORED_P (var) = 1;
2658 pretty_printer filename_pp;
2659 pp_string (&filename_pp, xloc.file);
2660 tree str = asan_pp_string (&filename_pp);
2661 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2662 NULL_TREE, str, NULL_TREE,
2663 build_int_cst (unsigned_type_node,
2664 xloc.line), NULL_TREE,
2665 build_int_cst (unsigned_type_node,
2666 xloc.column));
2667 TREE_CONSTANT (ctor) = 1;
2668 TREE_STATIC (ctor) = 1;
2669 DECL_INITIAL (var) = ctor;
2670 varpool_node::finalize_decl (var);
2671 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2672 }
2673 else
2674 locptr = build_int_cst (uptr, 0);
2675 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2676 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2677 init = build_constructor (type, vinner);
2678 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2679 }
2680
2681 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2682 void
2683 initialize_sanitizer_builtins (void)
2684 {
2685 tree decl;
2686
2687 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2688 return;
2689
2690 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2691 tree BT_FN_VOID_PTR
2692 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2693 tree BT_FN_VOID_CONST_PTR
2694 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2695 tree BT_FN_VOID_PTR_PTR
2696 = build_function_type_list (void_type_node, ptr_type_node,
2697 ptr_type_node, NULL_TREE);
2698 tree BT_FN_VOID_PTR_PTR_PTR
2699 = build_function_type_list (void_type_node, ptr_type_node,
2700 ptr_type_node, ptr_type_node, NULL_TREE);
2701 tree BT_FN_VOID_PTR_PTRMODE
2702 = build_function_type_list (void_type_node, ptr_type_node,
2703 pointer_sized_int_node, NULL_TREE);
2704 tree BT_FN_VOID_INT
2705 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2706 tree BT_FN_SIZE_CONST_PTR_INT
2707 = build_function_type_list (size_type_node, const_ptr_type_node,
2708 integer_type_node, NULL_TREE);
2709
2710 tree BT_FN_VOID_UINT8_UINT8
2711 = build_function_type_list (void_type_node, unsigned_char_type_node,
2712 unsigned_char_type_node, NULL_TREE);
2713 tree BT_FN_VOID_UINT16_UINT16
2714 = build_function_type_list (void_type_node, uint16_type_node,
2715 uint16_type_node, NULL_TREE);
2716 tree BT_FN_VOID_UINT32_UINT32
2717 = build_function_type_list (void_type_node, uint32_type_node,
2718 uint32_type_node, NULL_TREE);
2719 tree BT_FN_VOID_UINT64_UINT64
2720 = build_function_type_list (void_type_node, uint64_type_node,
2721 uint64_type_node, NULL_TREE);
2722 tree BT_FN_VOID_FLOAT_FLOAT
2723 = build_function_type_list (void_type_node, float_type_node,
2724 float_type_node, NULL_TREE);
2725 tree BT_FN_VOID_DOUBLE_DOUBLE
2726 = build_function_type_list (void_type_node, double_type_node,
2727 double_type_node, NULL_TREE);
2728 tree BT_FN_VOID_UINT64_PTR
2729 = build_function_type_list (void_type_node, uint64_type_node,
2730 ptr_type_node, NULL_TREE);
2731
2732 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2733 tree BT_FN_IX_CONST_VPTR_INT[5];
2734 tree BT_FN_IX_VPTR_IX_INT[5];
2735 tree BT_FN_VOID_VPTR_IX_INT[5];
2736 tree vptr
2737 = build_pointer_type (build_qualified_type (void_type_node,
2738 TYPE_QUAL_VOLATILE));
2739 tree cvptr
2740 = build_pointer_type (build_qualified_type (void_type_node,
2741 TYPE_QUAL_VOLATILE
2742 |TYPE_QUAL_CONST));
2743 tree boolt
2744 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2745 int i;
2746 for (i = 0; i < 5; i++)
2747 {
2748 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2749 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2750 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2751 integer_type_node, integer_type_node,
2752 NULL_TREE);
2753 BT_FN_IX_CONST_VPTR_INT[i]
2754 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2755 BT_FN_IX_VPTR_IX_INT[i]
2756 = build_function_type_list (ix, vptr, ix, integer_type_node,
2757 NULL_TREE);
2758 BT_FN_VOID_VPTR_IX_INT[i]
2759 = build_function_type_list (void_type_node, vptr, ix,
2760 integer_type_node, NULL_TREE);
2761 }
2762 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2763 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2764 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2765 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2766 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2767 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2768 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2769 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2770 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2771 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2772 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2773 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2774 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2775 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2776 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2777 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2778 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2779 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2780 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2781 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2782 #undef ATTR_NOTHROW_LEAF_LIST
2783 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2784 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2785 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2786 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2787 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2788 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2789 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2790 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2791 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2792 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2793 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2794 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2795 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2796 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2797 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2798 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2799 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2800 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2801 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2802 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2803 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2804 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2805 #undef DEF_BUILTIN_STUB
2806 #define DEF_BUILTIN_STUB(ENUM, NAME)
2807 #undef DEF_SANITIZER_BUILTIN
2808 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2809 do { \
2810 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2811 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2812 set_call_expr_flags (decl, ATTRS); \
2813 set_builtin_decl (ENUM, decl, true); \
2814 } while (0);
2815
2816 #include "sanitizer.def"
2817
2818 /* -fsanitize=object-size uses __builtin_object_size, but that might
2819 not be available for e.g. Fortran at this point. We use
2820 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2821 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2822 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2823 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2824 BT_FN_SIZE_CONST_PTR_INT,
2825 ATTR_PURE_NOTHROW_LEAF_LIST)
2826
2827 #undef DEF_SANITIZER_BUILTIN
2828 #undef DEF_BUILTIN_STUB
2829 }
2830
2831 /* Called via htab_traverse. Count number of emitted
2832 STRING_CSTs in the constant hash table. */
2833
2834 int
2835 count_string_csts (constant_descriptor_tree **slot,
2836 unsigned HOST_WIDE_INT *data)
2837 {
2838 struct constant_descriptor_tree *desc = *slot;
2839 if (TREE_CODE (desc->value) == STRING_CST
2840 && TREE_ASM_WRITTEN (desc->value)
2841 && asan_protect_global (desc->value))
2842 ++*data;
2843 return 1;
2844 }
2845
2846 /* Helper structure to pass two parameters to
2847 add_string_csts. */
2848
2849 struct asan_add_string_csts_data
2850 {
2851 tree type;
2852 vec<constructor_elt, va_gc> *v;
2853 };
2854
2855 /* Called via hash_table::traverse. Call asan_add_global
2856 on emitted STRING_CSTs from the constant hash table. */
2857
2858 int
2859 add_string_csts (constant_descriptor_tree **slot,
2860 asan_add_string_csts_data *aascd)
2861 {
2862 struct constant_descriptor_tree *desc = *slot;
2863 if (TREE_CODE (desc->value) == STRING_CST
2864 && TREE_ASM_WRITTEN (desc->value)
2865 && asan_protect_global (desc->value))
2866 {
2867 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2868 aascd->type, aascd->v);
2869 }
2870 return 1;
2871 }
2872
2873 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2874 invoke ggc_collect. */
2875 static GTY(()) tree asan_ctor_statements;
2876
2877 /* Module-level instrumentation.
2878 - Insert __asan_init_vN() into the list of CTORs.
2879 - TODO: insert redzones around globals.
2880 */
2881
2882 void
2883 asan_finish_file (void)
2884 {
2885 varpool_node *vnode;
2886 unsigned HOST_WIDE_INT gcount = 0;
2887
2888 if (shadow_ptr_types[0] == NULL_TREE)
2889 asan_init_shadow_ptr_types ();
2890 /* Avoid instrumenting code in the asan ctors/dtors.
2891 We don't need to insert padding after the description strings,
2892 nor after .LASAN* array. */
2893 flag_sanitize &= ~SANITIZE_ADDRESS;
2894
2895 /* For user-space we want asan constructors to run first.
2896 Linux kernel does not support priorities other than default, and the only
2897 other user of constructors is coverage. So we run with the default
2898 priority. */
2899 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2900 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2901
2902 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2903 {
2904 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2905 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2906 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2907 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2908 }
2909 FOR_EACH_DEFINED_VARIABLE (vnode)
2910 if (TREE_ASM_WRITTEN (vnode->decl)
2911 && asan_protect_global (vnode->decl))
2912 ++gcount;
2913 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2914 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2915 (&gcount);
2916 if (gcount)
2917 {
2918 tree type = asan_global_struct (), var, ctor;
2919 tree dtor_statements = NULL_TREE;
2920 vec<constructor_elt, va_gc> *v;
2921 char buf[20];
2922
2923 type = build_array_type_nelts (type, gcount);
2924 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2925 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2926 type);
2927 TREE_STATIC (var) = 1;
2928 TREE_PUBLIC (var) = 0;
2929 DECL_ARTIFICIAL (var) = 1;
2930 DECL_IGNORED_P (var) = 1;
2931 vec_alloc (v, gcount);
2932 FOR_EACH_DEFINED_VARIABLE (vnode)
2933 if (TREE_ASM_WRITTEN (vnode->decl)
2934 && asan_protect_global (vnode->decl))
2935 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2936 struct asan_add_string_csts_data aascd;
2937 aascd.type = TREE_TYPE (type);
2938 aascd.v = v;
2939 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2940 (&aascd);
2941 ctor = build_constructor (type, v);
2942 TREE_CONSTANT (ctor) = 1;
2943 TREE_STATIC (ctor) = 1;
2944 DECL_INITIAL (var) = ctor;
2945 varpool_node::finalize_decl (var);
2946
2947 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2948 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2949 append_to_statement_list (build_call_expr (fn, 2,
2950 build_fold_addr_expr (var),
2951 gcount_tree),
2952 &asan_ctor_statements);
2953
2954 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2955 append_to_statement_list (build_call_expr (fn, 2,
2956 build_fold_addr_expr (var),
2957 gcount_tree),
2958 &dtor_statements);
2959 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2960 }
2961 if (asan_ctor_statements)
2962 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2963 flag_sanitize |= SANITIZE_ADDRESS;
2964 }
2965
2966 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
2967 on SHADOW address. Newly added statements will be added to ITER with
2968 given location LOC. We mark SIZE bytes in shadow memory, where
2969 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
2970 end of a variable. */
2971
2972 static void
2973 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
2974 tree shadow,
2975 unsigned HOST_WIDE_INT base_addr_offset,
2976 bool is_clobber, unsigned size,
2977 unsigned last_chunk_size)
2978 {
2979 tree shadow_ptr_type;
2980
2981 switch (size)
2982 {
2983 case 1:
2984 shadow_ptr_type = shadow_ptr_types[0];
2985 break;
2986 case 2:
2987 shadow_ptr_type = shadow_ptr_types[1];
2988 break;
2989 case 4:
2990 shadow_ptr_type = shadow_ptr_types[2];
2991 break;
2992 default:
2993 gcc_unreachable ();
2994 }
2995
2996 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
2997 unsigned HOST_WIDE_INT val = 0;
2998 unsigned last_pos = size;
2999 if (last_chunk_size && !is_clobber)
3000 last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3001 for (unsigned i = 0; i < size; ++i)
3002 {
3003 unsigned char shadow_c = c;
3004 if (i == last_pos)
3005 shadow_c = last_chunk_size;
3006 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3007 }
3008
3009 /* Handle last chunk in unpoisoning. */
3010 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3011
3012 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3013 build_int_cst (shadow_ptr_type, base_addr_offset));
3014
3015 gimple *g = gimple_build_assign (dest, magic);
3016 gimple_set_location (g, loc);
3017 gsi_insert_after (iter, g, GSI_NEW_STMT);
3018 }
3019
3020 /* Expand the ASAN_MARK builtins. */
3021
3022 bool
3023 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3024 {
3025 gimple *g = gsi_stmt (*iter);
3026 location_t loc = gimple_location (g);
3027 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3028 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3029
3030 tree base = gimple_call_arg (g, 1);
3031 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3032 tree decl = TREE_OPERAND (base, 0);
3033
3034 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3035 if (TREE_CODE (decl) == COMPONENT_REF
3036 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3037 decl = TREE_OPERAND (decl, 0);
3038
3039 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3040
3041 if (is_poison)
3042 {
3043 if (asan_handled_variables == NULL)
3044 asan_handled_variables = new hash_set<tree> (16);
3045 asan_handled_variables->add (decl);
3046 }
3047 tree len = gimple_call_arg (g, 2);
3048
3049 gcc_assert (tree_fits_shwi_p (len));
3050 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
3051 gcc_assert (size_in_bytes);
3052
3053 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3054 NOP_EXPR, base);
3055 gimple_set_location (g, loc);
3056 gsi_replace (iter, g, false);
3057 tree base_addr = gimple_assign_lhs (g);
3058
3059 /* Generate direct emission if size_in_bytes is small. */
3060 if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
3061 {
3062 unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
3063
3064 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3065 shadow_ptr_types[0], true);
3066
3067 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3068 {
3069 unsigned size = 1;
3070 if (shadow_size - offset >= 4)
3071 size = 4;
3072 else if (shadow_size - offset >= 2)
3073 size = 2;
3074
3075 unsigned HOST_WIDE_INT last_chunk_size = 0;
3076 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3077 if (s > size_in_bytes)
3078 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3079
3080 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3081 size, last_chunk_size);
3082 offset += size;
3083 }
3084 }
3085 else
3086 {
3087 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3088 NOP_EXPR, len);
3089 gimple_set_location (g, loc);
3090 gsi_insert_before (iter, g, GSI_SAME_STMT);
3091 tree sz_arg = gimple_assign_lhs (g);
3092
3093 tree fun
3094 = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3095 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3096 g = gimple_build_call (fun, 2, base_addr, sz_arg);
3097 gimple_set_location (g, loc);
3098 gsi_insert_after (iter, g, GSI_NEW_STMT);
3099 }
3100
3101 return false;
3102 }
3103
3104 /* Expand the ASAN_{LOAD,STORE} builtins. */
3105
3106 bool
3107 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3108 {
3109 gimple *g = gsi_stmt (*iter);
3110 location_t loc = gimple_location (g);
3111 bool recover_p;
3112 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3113 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3114 else
3115 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3116
3117 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3118 gcc_assert (flags < ASAN_CHECK_LAST);
3119 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3120 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3121 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3122
3123 tree base = gimple_call_arg (g, 1);
3124 tree len = gimple_call_arg (g, 2);
3125 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3126
3127 HOST_WIDE_INT size_in_bytes
3128 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3129
3130 if (use_calls)
3131 {
3132 /* Instrument using callbacks. */
3133 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3134 NOP_EXPR, base);
3135 gimple_set_location (g, loc);
3136 gsi_insert_before (iter, g, GSI_SAME_STMT);
3137 tree base_addr = gimple_assign_lhs (g);
3138
3139 int nargs;
3140 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3141 if (nargs == 1)
3142 g = gimple_build_call (fun, 1, base_addr);
3143 else
3144 {
3145 gcc_assert (nargs == 2);
3146 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3147 NOP_EXPR, len);
3148 gimple_set_location (g, loc);
3149 gsi_insert_before (iter, g, GSI_SAME_STMT);
3150 tree sz_arg = gimple_assign_lhs (g);
3151 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
3152 }
3153 gimple_set_location (g, loc);
3154 gsi_replace (iter, g, false);
3155 return false;
3156 }
3157
3158 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
3159
3160 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
3161 tree shadow_type = TREE_TYPE (shadow_ptr_type);
3162
3163 gimple_stmt_iterator gsi = *iter;
3164
3165 if (!is_non_zero_len)
3166 {
3167 /* So, the length of the memory area to asan-protect is
3168 non-constant. Let's guard the generated instrumentation code
3169 like:
3170
3171 if (len != 0)
3172 {
3173 //asan instrumentation code goes here.
3174 }
3175 // falltrough instructions, starting with *ITER. */
3176
3177 g = gimple_build_cond (NE_EXPR,
3178 len,
3179 build_int_cst (TREE_TYPE (len), 0),
3180 NULL_TREE, NULL_TREE);
3181 gimple_set_location (g, loc);
3182
3183 basic_block then_bb, fallthrough_bb;
3184 insert_if_then_before_iter (as_a <gcond *> (g), iter,
3185 /*then_more_likely_p=*/true,
3186 &then_bb, &fallthrough_bb);
3187 /* Note that fallthrough_bb starts with the statement that was
3188 pointed to by ITER. */
3189
3190 /* The 'then block' of the 'if (len != 0) condition is where
3191 we'll generate the asan instrumentation code now. */
3192 gsi = gsi_last_bb (then_bb);
3193 }
3194
3195 /* Get an iterator on the point where we can add the condition
3196 statement for the instrumentation. */
3197 basic_block then_bb, else_bb;
3198 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
3199 /*then_more_likely_p=*/false,
3200 /*create_then_fallthru_edge*/recover_p,
3201 &then_bb,
3202 &else_bb);
3203
3204 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3205 NOP_EXPR, base);
3206 gimple_set_location (g, loc);
3207 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
3208 tree base_addr = gimple_assign_lhs (g);
3209
3210 tree t = NULL_TREE;
3211 if (real_size_in_bytes >= 8)
3212 {
3213 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3214 shadow_ptr_type);
3215 t = shadow;
3216 }
3217 else
3218 {
3219 /* Slow path for 1, 2 and 4 byte accesses. */
3220 /* Test (shadow != 0)
3221 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
3222 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3223 shadow_ptr_type);
3224 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3225 gimple_seq seq = NULL;
3226 gimple_seq_add_stmt (&seq, shadow_test);
3227 /* Aligned (>= 8 bytes) can test just
3228 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
3229 to be 0. */
3230 if (align < 8)
3231 {
3232 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3233 base_addr, 7));
3234 gimple_seq_add_stmt (&seq,
3235 build_type_cast (shadow_type,
3236 gimple_seq_last (seq)));
3237 if (real_size_in_bytes > 1)
3238 gimple_seq_add_stmt (&seq,
3239 build_assign (PLUS_EXPR,
3240 gimple_seq_last (seq),
3241 real_size_in_bytes - 1));
3242 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3243 }
3244 else
3245 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3246 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3247 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3248 gimple_seq_last (seq)));
3249 t = gimple_assign_lhs (gimple_seq_last (seq));
3250 gimple_seq_set_location (seq, loc);
3251 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3252
3253 /* For non-constant, misaligned or otherwise weird access sizes,
3254 check first and last byte. */
3255 if (size_in_bytes == -1)
3256 {
3257 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3258 MINUS_EXPR, len,
3259 build_int_cst (pointer_sized_int_node, 1));
3260 gimple_set_location (g, loc);
3261 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3262 tree last = gimple_assign_lhs (g);
3263 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3264 PLUS_EXPR, base_addr, last);
3265 gimple_set_location (g, loc);
3266 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3267 tree base_end_addr = gimple_assign_lhs (g);
3268
3269 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3270 shadow_ptr_type);
3271 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3272 gimple_seq seq = NULL;
3273 gimple_seq_add_stmt (&seq, shadow_test);
3274 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3275 base_end_addr, 7));
3276 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3277 gimple_seq_last (seq)));
3278 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3279 gimple_seq_last (seq),
3280 shadow));
3281 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3282 gimple_seq_last (seq)));
3283 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3284 gimple_seq_last (seq)));
3285 t = gimple_assign_lhs (gimple_seq_last (seq));
3286 gimple_seq_set_location (seq, loc);
3287 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3288 }
3289 }
3290
3291 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3292 NULL_TREE, NULL_TREE);
3293 gimple_set_location (g, loc);
3294 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3295
3296 /* Generate call to the run-time library (e.g. __asan_report_load8). */
3297 gsi = gsi_start_bb (then_bb);
3298 int nargs;
3299 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3300 g = gimple_build_call (fun, nargs, base_addr, len);
3301 gimple_set_location (g, loc);
3302 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3303
3304 gsi_remove (iter, true);
3305 *iter = gsi_start_bb (else_bb);
3306
3307 return true;
3308 }
3309
3310 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3311 into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
3312
3313 static tree
3314 create_asan_shadow_var (tree var_decl,
3315 hash_map<tree, tree> &shadow_vars_mapping)
3316 {
3317 tree *slot = shadow_vars_mapping.get (var_decl);
3318 if (slot == NULL)
3319 {
3320 tree shadow_var = copy_node (var_decl);
3321
3322 copy_body_data id;
3323 memset (&id, 0, sizeof (copy_body_data));
3324 id.src_fn = id.dst_fn = current_function_decl;
3325 copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3326
3327 DECL_ARTIFICIAL (shadow_var) = 1;
3328 DECL_IGNORED_P (shadow_var) = 1;
3329 DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3330 gimple_add_tmp_var (shadow_var);
3331
3332 shadow_vars_mapping.put (var_decl, shadow_var);
3333 return shadow_var;
3334 }
3335 else
3336 return *slot;
3337 }
3338
3339 /* Expand ASAN_POISON ifn. */
3340
3341 bool
3342 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3343 bool *need_commit_edge_insert,
3344 hash_map<tree, tree> &shadow_vars_mapping)
3345 {
3346 gimple *g = gsi_stmt (*iter);
3347 tree poisoned_var = gimple_call_lhs (g);
3348 if (!poisoned_var || has_zero_uses (poisoned_var))
3349 {
3350 gsi_remove (iter, true);
3351 return true;
3352 }
3353
3354 if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
3355 SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
3356 create_tmp_var (TREE_TYPE (poisoned_var)));
3357
3358 tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3359 shadow_vars_mapping);
3360
3361 bool recover_p;
3362 if (flag_sanitize & SANITIZE_USER_ADDRESS)
3363 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3364 else
3365 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3366 tree size = DECL_SIZE_UNIT (shadow_var);
3367 gimple *poison_call
3368 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3369 build_int_cst (integer_type_node,
3370 ASAN_MARK_POISON),
3371 build_fold_addr_expr (shadow_var), size);
3372
3373 gimple *use;
3374 imm_use_iterator imm_iter;
3375 FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
3376 {
3377 if (is_gimple_debug (use))
3378 continue;
3379
3380 int nargs;
3381 bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
3382 tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
3383 &nargs);
3384
3385 gcall *call = gimple_build_call (fun, 1,
3386 build_fold_addr_expr (shadow_var));
3387 gimple_set_location (call, gimple_location (use));
3388 gimple *call_to_insert = call;
3389
3390 /* The USE can be a gimple PHI node. If so, insert the call on
3391 all edges leading to the PHI node. */
3392 if (is_a <gphi *> (use))
3393 {
3394 gphi *phi = dyn_cast<gphi *> (use);
3395 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3396 if (gimple_phi_arg_def (phi, i) == poisoned_var)
3397 {
3398 edge e = gimple_phi_arg_edge (phi, i);
3399
3400 /* Do not insert on an edge we can't split. */
3401 if (e->flags & EDGE_ABNORMAL)
3402 continue;
3403
3404 if (call_to_insert == NULL)
3405 call_to_insert = gimple_copy (call);
3406
3407 gsi_insert_seq_on_edge (e, call_to_insert);
3408 *need_commit_edge_insert = true;
3409 call_to_insert = NULL;
3410 }
3411 }
3412 else
3413 {
3414 gimple_stmt_iterator gsi = gsi_for_stmt (use);
3415 if (store_p)
3416 gsi_replace (&gsi, call, true);
3417 else
3418 gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3419 }
3420 }
3421
3422 SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3423 SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3424 gsi_replace (iter, poison_call, false);
3425
3426 return true;
3427 }
3428
3429 /* Instrument the current function. */
3430
3431 static unsigned int
3432 asan_instrument (void)
3433 {
3434 if (shadow_ptr_types[0] == NULL_TREE)
3435 asan_init_shadow_ptr_types ();
3436 transform_statements ();
3437 last_alloca_addr = NULL_TREE;
3438 return 0;
3439 }
3440
3441 static bool
3442 gate_asan (void)
3443 {
3444 return sanitize_flags_p (SANITIZE_ADDRESS);
3445 }
3446
3447 namespace {
3448
3449 const pass_data pass_data_asan =
3450 {
3451 GIMPLE_PASS, /* type */
3452 "asan", /* name */
3453 OPTGROUP_NONE, /* optinfo_flags */
3454 TV_NONE, /* tv_id */
3455 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3456 0, /* properties_provided */
3457 0, /* properties_destroyed */
3458 0, /* todo_flags_start */
3459 TODO_update_ssa, /* todo_flags_finish */
3460 };
3461
3462 class pass_asan : public gimple_opt_pass
3463 {
3464 public:
3465 pass_asan (gcc::context *ctxt)
3466 : gimple_opt_pass (pass_data_asan, ctxt)
3467 {}
3468
3469 /* opt_pass methods: */
3470 opt_pass * clone () { return new pass_asan (m_ctxt); }
3471 virtual bool gate (function *) { return gate_asan (); }
3472 virtual unsigned int execute (function *) { return asan_instrument (); }
3473
3474 }; // class pass_asan
3475
3476 } // anon namespace
3477
3478 gimple_opt_pass *
3479 make_pass_asan (gcc::context *ctxt)
3480 {
3481 return new pass_asan (ctxt);
3482 }
3483
3484 namespace {
3485
3486 const pass_data pass_data_asan_O0 =
3487 {
3488 GIMPLE_PASS, /* type */
3489 "asan0", /* name */
3490 OPTGROUP_NONE, /* optinfo_flags */
3491 TV_NONE, /* tv_id */
3492 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3493 0, /* properties_provided */
3494 0, /* properties_destroyed */
3495 0, /* todo_flags_start */
3496 TODO_update_ssa, /* todo_flags_finish */
3497 };
3498
3499 class pass_asan_O0 : public gimple_opt_pass
3500 {
3501 public:
3502 pass_asan_O0 (gcc::context *ctxt)
3503 : gimple_opt_pass (pass_data_asan_O0, ctxt)
3504 {}
3505
3506 /* opt_pass methods: */
3507 virtual bool gate (function *) { return !optimize && gate_asan (); }
3508 virtual unsigned int execute (function *) { return asan_instrument (); }
3509
3510 }; // class pass_asan_O0
3511
3512 } // anon namespace
3513
3514 gimple_opt_pass *
3515 make_pass_asan_O0 (gcc::context *ctxt)
3516 {
3517 return new pass_asan_O0 (ctxt);
3518 }
3519
3520 #include "gt-asan.h"