111
|
1 /* AddressSanitizer, a fast memory error detector.
|
145
|
2 Copyright (C) 2012-2020 Free Software Foundation, Inc.
|
111
|
3 Contributed by Kostya Serebryany <kcc@google.com>
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify it under
|
|
8 the terms of the GNU General Public License as published by the Free
|
|
9 Software Foundation; either version 3, or (at your option) any later
|
|
10 version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
15 for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21
|
|
22 #include "config.h"
|
|
23 #include "system.h"
|
|
24 #include "coretypes.h"
|
|
25 #include "backend.h"
|
|
26 #include "target.h"
|
|
27 #include "rtl.h"
|
|
28 #include "tree.h"
|
|
29 #include "gimple.h"
|
|
30 #include "cfghooks.h"
|
|
31 #include "alloc-pool.h"
|
|
32 #include "tree-pass.h"
|
|
33 #include "memmodel.h"
|
|
34 #include "tm_p.h"
|
|
35 #include "ssa.h"
|
|
36 #include "stringpool.h"
|
|
37 #include "tree-ssanames.h"
|
|
38 #include "optabs.h"
|
|
39 #include "emit-rtl.h"
|
|
40 #include "cgraph.h"
|
|
41 #include "gimple-pretty-print.h"
|
|
42 #include "alias.h"
|
|
43 #include "fold-const.h"
|
|
44 #include "cfganal.h"
|
|
45 #include "gimplify.h"
|
|
46 #include "gimple-iterator.h"
|
|
47 #include "varasm.h"
|
|
48 #include "stor-layout.h"
|
|
49 #include "tree-iterator.h"
|
|
50 #include "stringpool.h"
|
|
51 #include "attribs.h"
|
|
52 #include "asan.h"
|
|
53 #include "dojump.h"
|
|
54 #include "explow.h"
|
|
55 #include "expr.h"
|
|
56 #include "output.h"
|
|
57 #include "langhooks.h"
|
|
58 #include "cfgloop.h"
|
|
59 #include "gimple-builder.h"
|
|
60 #include "gimple-fold.h"
|
|
61 #include "ubsan.h"
|
|
62 #include "builtins.h"
|
|
63 #include "fnmatch.h"
|
|
64 #include "tree-inline.h"
|
|
65
|
|
66 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
|
|
67 with <2x slowdown on average.
|
|
68
|
|
69 The tool consists of two parts:
|
|
70 instrumentation module (this file) and a run-time library.
|
|
71 The instrumentation module adds a run-time check before every memory insn.
|
|
72 For a 8- or 16- byte load accessing address X:
|
|
73 ShadowAddr = (X >> 3) + Offset
|
|
74 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
|
|
75 if (ShadowValue)
|
|
76 __asan_report_load8(X);
|
|
77 For a load of N bytes (N=1, 2 or 4) from address X:
|
|
78 ShadowAddr = (X >> 3) + Offset
|
|
79 ShadowValue = *(char*)ShadowAddr;
|
|
80 if (ShadowValue)
|
|
81 if ((X & 7) + N - 1 > ShadowValue)
|
|
82 __asan_report_loadN(X);
|
|
83 Stores are instrumented similarly, but using __asan_report_storeN functions.
|
|
84 A call too __asan_init_vN() is inserted to the list of module CTORs.
|
|
85 N is the version number of the AddressSanitizer API. The changes between the
|
|
86 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
|
|
87
|
|
88 The run-time library redefines malloc (so that redzone are inserted around
|
|
89 the allocated memory) and free (so that reuse of free-ed memory is delayed),
|
|
90 provides __asan_report* and __asan_init_vN functions.
|
|
91
|
|
92 Read more:
|
|
93 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
|
|
94
|
|
95 The current implementation supports detection of out-of-bounds and
|
|
96 use-after-free in the heap, on the stack and for global variables.
|
|
97
|
|
98 [Protection of stack variables]
|
|
99
|
|
100 To understand how detection of out-of-bounds and use-after-free works
|
|
101 for stack variables, lets look at this example on x86_64 where the
|
|
102 stack grows downward:
|
|
103
|
|
104 int
|
|
105 foo ()
|
|
106 {
|
|
107 char a[23] = {0};
|
|
108 int b[2] = {0};
|
|
109
|
|
110 a[5] = 1;
|
|
111 b[1] = 2;
|
|
112
|
|
113 return a[5] + b[1];
|
|
114 }
|
|
115
|
|
116 For this function, the stack protected by asan will be organized as
|
|
117 follows, from the top of the stack to the bottom:
|
|
118
|
|
119 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
|
|
120
|
|
121 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
|
|
122 the next slot be 32 bytes aligned; this one is called Partial
|
|
123 Redzone; this 32 bytes alignment is an asan constraint]
|
|
124
|
|
125 Slot 3/ [24 bytes for variable 'a']
|
|
126
|
|
127 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
|
|
128
|
|
129 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
|
|
130
|
|
131 Slot 6/ [8 bytes for variable 'b']
|
|
132
|
|
133 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
|
|
134 'LEFT RedZone']
|
|
135
|
|
136 The 32 bytes of LEFT red zone at the bottom of the stack can be
|
|
137 decomposed as such:
|
|
138
|
|
139 1/ The first 8 bytes contain a magical asan number that is always
|
|
140 0x41B58AB3.
|
|
141
|
|
142 2/ The following 8 bytes contains a pointer to a string (to be
|
|
143 parsed at runtime by the runtime asan library), which format is
|
|
144 the following:
|
|
145
|
|
146 "<function-name> <space> <num-of-variables-on-the-stack>
|
|
147 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
|
|
148 <length-of-var-in-bytes> ){n} "
|
|
149
|
|
150 where '(...){n}' means the content inside the parenthesis occurs 'n'
|
|
151 times, with 'n' being the number of variables on the stack.
|
|
152
|
|
153 3/ The following 8 bytes contain the PC of the current function which
|
|
154 will be used by the run-time library to print an error message.
|
|
155
|
|
156 4/ The following 8 bytes are reserved for internal use by the run-time.
|
|
157
|
|
158 The shadow memory for that stack layout is going to look like this:
|
|
159
|
|
160 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
|
|
161 The F1 byte pattern is a magic number called
|
|
162 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
|
|
163 the memory for that shadow byte is part of a the LEFT red zone
|
|
164 intended to seat at the bottom of the variables on the stack.
|
|
165
|
|
166 - content of shadow memory 8 bytes for slots 6 and 5:
|
|
167 0xF4F4F400. The F4 byte pattern is a magic number
|
|
168 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
|
|
169 memory region for this shadow byte is a PARTIAL red zone
|
|
170 intended to pad a variable A, so that the slot following
|
|
171 {A,padding} is 32 bytes aligned.
|
|
172
|
|
173 Note that the fact that the least significant byte of this
|
|
174 shadow memory content is 00 means that 8 bytes of its
|
|
175 corresponding memory (which corresponds to the memory of
|
|
176 variable 'b') is addressable.
|
|
177
|
|
178 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
|
|
179 The F2 byte pattern is a magic number called
|
|
180 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
|
|
181 region for this shadow byte is a MIDDLE red zone intended to
|
|
182 seat between two 32 aligned slots of {variable,padding}.
|
|
183
|
|
184 - content of shadow memory 8 bytes for slot 3 and 2:
|
|
185 0xF4000000. This represents is the concatenation of
|
|
186 variable 'a' and the partial red zone following it, like what we
|
|
187 had for variable 'b'. The least significant 3 bytes being 00
|
|
188 means that the 3 bytes of variable 'a' are addressable.
|
|
189
|
|
190 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
|
|
191 The F3 byte pattern is a magic number called
|
|
192 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
|
|
193 region for this shadow byte is a RIGHT red zone intended to seat
|
|
194 at the top of the variables of the stack.
|
|
195
|
|
196 Note that the real variable layout is done in expand_used_vars in
|
|
197 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
|
|
198 stack variables as well as the different red zones, emits some
|
|
199 prologue code to populate the shadow memory as to poison (mark as
|
|
200 non-accessible) the regions of the red zones and mark the regions of
|
|
201 stack variables as accessible, and emit some epilogue code to
|
|
202 un-poison (mark as accessible) the regions of red zones right before
|
|
203 the function exits.
|
|
204
|
|
205 [Protection of global variables]
|
|
206
|
|
207 The basic idea is to insert a red zone between two global variables
|
|
208 and install a constructor function that calls the asan runtime to do
|
|
209 the populating of the relevant shadow memory regions at load time.
|
|
210
|
|
211 So the global variables are laid out as to insert a red zone between
|
|
212 them. The size of the red zones is so that each variable starts on a
|
|
213 32 bytes boundary.
|
|
214
|
|
215 Then a constructor function is installed so that, for each global
|
|
216 variable, it calls the runtime asan library function
|
|
217 __asan_register_globals_with an instance of this type:
|
|
218
|
|
219 struct __asan_global
|
|
220 {
|
|
221 // Address of the beginning of the global variable.
|
|
222 const void *__beg;
|
|
223
|
|
224 // Initial size of the global variable.
|
|
225 uptr __size;
|
|
226
|
|
227 // Size of the global variable + size of the red zone. This
|
|
228 // size is 32 bytes aligned.
|
|
229 uptr __size_with_redzone;
|
|
230
|
|
231 // Name of the global variable.
|
|
232 const void *__name;
|
|
233
|
|
234 // Name of the module where the global variable is declared.
|
|
235 const void *__module_name;
|
|
236
|
|
237 // 1 if it has dynamic initialization, 0 otherwise.
|
|
238 uptr __has_dynamic_init;
|
|
239
|
|
240 // A pointer to struct that contains source location, could be NULL.
|
|
241 __asan_global_source_location *__location;
|
|
242 }
|
|
243
|
|
244 A destructor function that calls the runtime asan library function
|
|
245 _asan_unregister_globals is also installed. */
|
|
246
|
|
247 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
|
|
248 static bool asan_shadow_offset_computed;
|
|
249 static vec<char *> sanitized_sections;
|
|
250 static tree last_alloca_addr;
|
|
251
|
|
252 /* Set of variable declarations that are going to be guarded by
|
|
253 use-after-scope sanitizer. */
|
|
254
|
131
|
255 hash_set<tree> *asan_handled_variables = NULL;
|
111
|
256
|
|
257 hash_set <tree> *asan_used_labels = NULL;
|
|
258
|
|
259 /* Sets shadow offset to value in string VAL. */
|
|
260
|
|
261 bool
|
|
262 set_asan_shadow_offset (const char *val)
|
|
263 {
|
|
264 char *endp;
|
|
265
|
|
266 errno = 0;
|
|
267 #ifdef HAVE_LONG_LONG
|
|
268 asan_shadow_offset_value = strtoull (val, &endp, 0);
|
|
269 #else
|
|
270 asan_shadow_offset_value = strtoul (val, &endp, 0);
|
|
271 #endif
|
|
272 if (!(*val != '\0' && *endp == '\0' && errno == 0))
|
|
273 return false;
|
|
274
|
|
275 asan_shadow_offset_computed = true;
|
|
276
|
|
277 return true;
|
|
278 }
|
|
279
|
|
280 /* Set list of user-defined sections that need to be sanitized. */
|
|
281
|
|
282 void
|
|
283 set_sanitized_sections (const char *sections)
|
|
284 {
|
|
285 char *pat;
|
|
286 unsigned i;
|
|
287 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
|
|
288 free (pat);
|
|
289 sanitized_sections.truncate (0);
|
|
290
|
|
291 for (const char *s = sections; *s; )
|
|
292 {
|
|
293 const char *end;
|
|
294 for (end = s; *end && *end != ','; ++end);
|
|
295 size_t len = end - s;
|
|
296 sanitized_sections.safe_push (xstrndup (s, len));
|
|
297 s = *end ? end + 1 : end;
|
|
298 }
|
|
299 }
|
|
300
|
|
301 bool
|
|
302 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
|
|
303 {
|
|
304 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
|
|
305 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
|
|
306 }
|
|
307
|
|
308 bool
|
|
309 asan_sanitize_stack_p (void)
|
|
310 {
|
145
|
311 return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
|
111
|
312 }
|
|
313
|
|
314 bool
|
|
315 asan_sanitize_allocas_p (void)
|
|
316 {
|
145
|
317 return (asan_sanitize_stack_p () && param_asan_protect_allocas);
|
111
|
318 }
|
|
319
|
|
320 /* Checks whether section SEC should be sanitized. */
|
|
321
|
|
322 static bool
|
|
323 section_sanitized_p (const char *sec)
|
|
324 {
|
|
325 char *pat;
|
|
326 unsigned i;
|
|
327 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
|
|
328 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
|
|
329 return true;
|
|
330 return false;
|
|
331 }
|
|
332
|
|
333 /* Returns Asan shadow offset. */
|
|
334
|
|
335 static unsigned HOST_WIDE_INT
|
|
336 asan_shadow_offset ()
|
|
337 {
|
|
338 if (!asan_shadow_offset_computed)
|
|
339 {
|
|
340 asan_shadow_offset_computed = true;
|
|
341 asan_shadow_offset_value = targetm.asan_shadow_offset ();
|
|
342 }
|
|
343 return asan_shadow_offset_value;
|
|
344 }
|
|
345
|
|
346 alias_set_type asan_shadow_set = -1;
|
|
347
|
|
348 /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
|
|
349 alias set is used for all shadow memory accesses. */
|
|
350 static GTY(()) tree shadow_ptr_types[3];
|
|
351
|
|
352 /* Decl for __asan_option_detect_stack_use_after_return. */
|
|
353 static GTY(()) tree asan_detect_stack_use_after_return;
|
|
354
|
|
355 /* Hashtable support for memory references used by gimple
|
|
356 statements. */
|
|
357
|
|
358 /* This type represents a reference to a memory region. */
|
|
359 struct asan_mem_ref
|
|
360 {
|
|
361 /* The expression of the beginning of the memory region. */
|
|
362 tree start;
|
|
363
|
|
364 /* The size of the access. */
|
|
365 HOST_WIDE_INT access_size;
|
|
366 };
|
|
367
|
|
368 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
|
|
369
|
|
370 /* Initializes an instance of asan_mem_ref. */
|
|
371
|
|
372 static void
|
|
373 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
|
|
374 {
|
|
375 ref->start = start;
|
|
376 ref->access_size = access_size;
|
|
377 }
|
|
378
|
|
379 /* Allocates memory for an instance of asan_mem_ref into the memory
|
|
380 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
|
|
381 START is the address of (or the expression pointing to) the
|
|
382 beginning of memory reference. ACCESS_SIZE is the size of the
|
|
383 access to the referenced memory. */
|
|
384
|
|
385 static asan_mem_ref*
|
|
386 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
|
|
387 {
|
|
388 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
|
|
389
|
|
390 asan_mem_ref_init (ref, start, access_size);
|
|
391 return ref;
|
|
392 }
|
|
393
|
|
394 /* This builds and returns a pointer to the end of the memory region
|
|
395 that starts at START and of length LEN. */
|
|
396
|
|
397 tree
|
|
398 asan_mem_ref_get_end (tree start, tree len)
|
|
399 {
|
|
400 if (len == NULL_TREE || integer_zerop (len))
|
|
401 return start;
|
|
402
|
|
403 if (!ptrofftype_p (len))
|
|
404 len = convert_to_ptrofftype (len);
|
|
405
|
|
406 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
|
|
407 }
|
|
408
|
|
409 /* Return a tree expression that represents the end of the referenced
|
|
410 memory region. Beware that this function can actually build a new
|
|
411 tree expression. */
|
|
412
|
|
413 tree
|
|
414 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
|
|
415 {
|
|
416 return asan_mem_ref_get_end (ref->start, len);
|
|
417 }
|
|
418
|
|
419 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
|
|
420 {
|
|
421 static inline hashval_t hash (const asan_mem_ref *);
|
|
422 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
|
|
423 };
|
|
424
|
|
425 /* Hash a memory reference. */
|
|
426
|
|
427 inline hashval_t
|
|
428 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
|
|
429 {
|
|
430 return iterative_hash_expr (mem_ref->start, 0);
|
|
431 }
|
|
432
|
|
433 /* Compare two memory references. We accept the length of either
|
|
434 memory references to be NULL_TREE. */
|
|
435
|
|
436 inline bool
|
|
437 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
|
|
438 const asan_mem_ref *m2)
|
|
439 {
|
|
440 return operand_equal_p (m1->start, m2->start, 0);
|
|
441 }
|
|
442
|
|
443 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
|
|
444
|
|
445 /* Returns a reference to the hash table containing memory references.
|
|
446 This function ensures that the hash table is created. Note that
|
|
447 this hash table is updated by the function
|
|
448 update_mem_ref_hash_table. */
|
|
449
|
|
450 static hash_table<asan_mem_ref_hasher> *
|
|
451 get_mem_ref_hash_table ()
|
|
452 {
|
|
453 if (!asan_mem_ref_ht)
|
|
454 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
|
|
455
|
|
456 return asan_mem_ref_ht;
|
|
457 }
|
|
458
|
|
459 /* Clear all entries from the memory references hash table. */
|
|
460
|
|
461 static void
|
|
462 empty_mem_ref_hash_table ()
|
|
463 {
|
|
464 if (asan_mem_ref_ht)
|
|
465 asan_mem_ref_ht->empty ();
|
|
466 }
|
|
467
|
|
468 /* Free the memory references hash table. */
|
|
469
|
|
470 static void
|
|
471 free_mem_ref_resources ()
|
|
472 {
|
|
473 delete asan_mem_ref_ht;
|
|
474 asan_mem_ref_ht = NULL;
|
|
475
|
|
476 asan_mem_ref_pool.release ();
|
|
477 }
|
|
478
|
|
479 /* Return true iff the memory reference REF has been instrumented. */
|
|
480
|
|
481 static bool
|
|
482 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
|
|
483 {
|
|
484 asan_mem_ref r;
|
|
485 asan_mem_ref_init (&r, ref, access_size);
|
|
486
|
|
487 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
|
|
488 return saved_ref && saved_ref->access_size >= access_size;
|
|
489 }
|
|
490
|
|
491 /* Return true iff the memory reference REF has been instrumented. */
|
|
492
|
|
493 static bool
|
|
494 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
|
|
495 {
|
|
496 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
|
|
497 }
|
|
498
|
|
499 /* Return true iff access to memory region starting at REF and of
|
|
500 length LEN has been instrumented. */
|
|
501
|
|
502 static bool
|
|
503 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
|
|
504 {
|
|
505 HOST_WIDE_INT size_in_bytes
|
|
506 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
|
507
|
|
508 return size_in_bytes != -1
|
|
509 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
|
|
510 }
|
|
511
|
|
512 /* Set REF to the memory reference present in a gimple assignment
|
|
513 ASSIGNMENT. Return true upon successful completion, false
|
|
514 otherwise. */
|
|
515
|
|
516 static bool
|
|
517 get_mem_ref_of_assignment (const gassign *assignment,
|
|
518 asan_mem_ref *ref,
|
|
519 bool *ref_is_store)
|
|
520 {
|
|
521 gcc_assert (gimple_assign_single_p (assignment));
|
|
522
|
|
523 if (gimple_store_p (assignment)
|
|
524 && !gimple_clobber_p (assignment))
|
|
525 {
|
|
526 ref->start = gimple_assign_lhs (assignment);
|
|
527 *ref_is_store = true;
|
|
528 }
|
|
529 else if (gimple_assign_load_p (assignment))
|
|
530 {
|
|
531 ref->start = gimple_assign_rhs1 (assignment);
|
|
532 *ref_is_store = false;
|
|
533 }
|
|
534 else
|
|
535 return false;
|
|
536
|
|
537 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
|
|
538 return true;
|
|
539 }
|
|
540
|
|
541 /* Return address of last allocated dynamic alloca. */
|
|
542
|
|
543 static tree
|
|
544 get_last_alloca_addr ()
|
|
545 {
|
|
546 if (last_alloca_addr)
|
|
547 return last_alloca_addr;
|
|
548
|
|
549 last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
|
|
550 gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
|
|
551 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
|
552 gsi_insert_on_edge_immediate (e, g);
|
|
553 return last_alloca_addr;
|
|
554 }
|
|
555
|
131
|
556 /* Insert __asan_allocas_unpoison (top, bottom) call before
|
111
|
557 __builtin_stack_restore (new_sp) call.
|
|
558 The pseudocode of this routine should look like this:
|
|
559 top = last_alloca_addr;
|
|
560 bot = new_sp;
|
|
561 __asan_allocas_unpoison (top, bot);
|
|
562 last_alloca_addr = new_sp;
|
131
|
563 __builtin_stack_restore (new_sp);
|
111
|
564 In general, we can't use new_sp as bot parameter because on some
|
|
565 architectures SP has non zero offset from dynamic stack area. Moreover, on
|
|
566 some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
|
|
567 particular function only after all callees were expanded to rtl.
|
|
568 The most noticeable example is PowerPC{,64}, see
|
|
569 http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
|
|
570 To overcome the issue we use following trick: pass new_sp as a second
|
|
571 parameter to __asan_allocas_unpoison and rewrite it during expansion with
|
131
|
572 new_sp + (virtual_dynamic_stack_rtx - sp) later in
|
|
573 expand_asan_emit_allocas_unpoison function. */
|
111
|
574
|
|
575 static void
|
|
576 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
|
|
577 {
|
|
578 if (!iter || !asan_sanitize_allocas_p ())
|
|
579 return;
|
|
580
|
|
581 tree last_alloca = get_last_alloca_addr ();
|
|
582 tree restored_stack = gimple_call_arg (call, 0);
|
|
583 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
|
|
584 gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
|
131
|
585 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
111
|
586 g = gimple_build_assign (last_alloca, restored_stack);
|
131
|
587 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
111
|
588 }
|
|
589
|
|
590 /* Deploy and poison redzones around __builtin_alloca call. To do this, we
|
|
591 should replace this call with another one with changed parameters and
|
|
592 replace all its uses with new address, so
|
|
593 addr = __builtin_alloca (old_size, align);
|
|
594 is replaced by
|
|
595 left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
|
|
596 Following two statements are optimized out if we know that
|
|
597 old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
|
|
598 redzone.
|
|
599 misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
|
|
600 partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
|
|
601 right_redzone_size = ASAN_RED_ZONE_SIZE;
|
|
602 additional_size = left_redzone_size + partial_redzone_size +
|
|
603 right_redzone_size;
|
|
604 new_size = old_size + additional_size;
|
|
605 new_alloca = __builtin_alloca (new_size, max (align, 32))
|
|
606 __asan_alloca_poison (new_alloca, old_size)
|
|
607 addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
|
|
608 last_alloca_addr = new_alloca;
|
|
609 ADDITIONAL_SIZE is added to make new memory allocation contain not only
|
|
610 requested memory, but also left, partial and right redzones as well as some
|
|
611 additional space, required by alignment. */
|
|
612
|
|
613 static void
|
|
614 handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
|
|
615 {
|
|
616 if (!iter || !asan_sanitize_allocas_p ())
|
|
617 return;
|
|
618
|
|
619 gassign *g;
|
|
620 gcall *gg;
|
|
621 const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
|
|
622
|
|
623 tree last_alloca = get_last_alloca_addr ();
|
|
624 tree callee = gimple_call_fndecl (call);
|
|
625 tree old_size = gimple_call_arg (call, 0);
|
|
626 tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
|
|
627 : ptr_type_node;
|
|
628 tree partial_size = NULL_TREE;
|
|
629 unsigned int align
|
|
630 = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
|
|
631 ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
|
|
632
|
|
633 /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
|
|
634 bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
|
|
635 manually. */
|
|
636 align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
|
|
637
|
|
638 tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
|
|
639 tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
|
|
640
|
|
641 /* Extract lower bits from old_size. */
|
|
642 wide_int size_nonzero_bits = get_nonzero_bits (old_size);
|
|
643 wide_int rz_mask
|
|
644 = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
|
|
645 wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
|
|
646
|
|
647 /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
|
|
648 redzone. Otherwise, compute its size here. */
|
|
649 if (wi::ne_p (old_size_lower_bits, 0))
|
|
650 {
|
|
651 /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
|
|
652 partial_size = ASAN_RED_ZONE_SIZE - misalign. */
|
|
653 g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
|
|
654 BIT_AND_EXPR, old_size, alloca_rz_mask);
|
|
655 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
656 tree misalign = gimple_assign_lhs (g);
|
|
657 g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
|
|
658 redzone_size, misalign);
|
|
659 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
660 partial_size = gimple_assign_lhs (g);
|
|
661 }
|
|
662
|
|
663 /* additional_size = align + ASAN_RED_ZONE_SIZE. */
|
|
664 tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
|
|
665 + ASAN_RED_ZONE_SIZE);
|
|
666 /* If alloca has partial redzone, include it to additional_size too. */
|
|
667 if (partial_size)
|
|
668 {
|
|
669 /* additional_size += partial_size. */
|
|
670 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
|
|
671 partial_size, additional_size);
|
|
672 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
673 additional_size = gimple_assign_lhs (g);
|
|
674 }
|
|
675
|
|
676 /* new_size = old_size + additional_size. */
|
|
677 g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
|
|
678 additional_size);
|
|
679 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
680 tree new_size = gimple_assign_lhs (g);
|
|
681
|
|
682 /* Build new __builtin_alloca call:
|
|
683 new_alloca_with_rz = __builtin_alloca (new_size, align). */
|
|
684 tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
|
|
685 gg = gimple_build_call (fn, 2, new_size,
|
|
686 build_int_cst (size_type_node, align));
|
|
687 tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
|
|
688 gimple_call_set_lhs (gg, new_alloca_with_rz);
|
|
689 gsi_insert_before (iter, gg, GSI_SAME_STMT);
|
|
690
|
|
691 /* new_alloca = new_alloca_with_rz + align. */
|
|
692 g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
|
|
693 new_alloca_with_rz,
|
|
694 build_int_cst (size_type_node,
|
|
695 align / BITS_PER_UNIT));
|
|
696 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
697 tree new_alloca = gimple_assign_lhs (g);
|
|
698
|
|
699 /* Poison newly created alloca redzones:
|
|
700 __asan_alloca_poison (new_alloca, old_size). */
|
|
701 fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
|
|
702 gg = gimple_build_call (fn, 2, new_alloca, old_size);
|
|
703 gsi_insert_before (iter, gg, GSI_SAME_STMT);
|
|
704
|
|
705 /* Save new_alloca_with_rz value into last_alloca to use it during
|
|
706 allocas unpoisoning. */
|
|
707 g = gimple_build_assign (last_alloca, new_alloca_with_rz);
|
|
708 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
709
|
|
710 /* Finally, replace old alloca ptr with NEW_ALLOCA. */
|
|
711 replace_call_with_value (iter, new_alloca);
|
|
712 }
|
|
713
|
|
714 /* Return the memory references contained in a gimple statement
|
|
715 representing a builtin call that has to do with memory access. */
|
|
716
|
|
717 static bool
|
|
718 get_mem_refs_of_builtin_call (gcall *call,
|
|
719 asan_mem_ref *src0,
|
|
720 tree *src0_len,
|
|
721 bool *src0_is_store,
|
|
722 asan_mem_ref *src1,
|
|
723 tree *src1_len,
|
|
724 bool *src1_is_store,
|
|
725 asan_mem_ref *dst,
|
|
726 tree *dst_len,
|
|
727 bool *dst_is_store,
|
|
728 bool *dest_is_deref,
|
|
729 bool *intercepted_p,
|
|
730 gimple_stmt_iterator *iter = NULL)
|
|
731 {
|
|
732 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
|
|
733
|
|
734 tree callee = gimple_call_fndecl (call);
|
|
735 tree source0 = NULL_TREE, source1 = NULL_TREE,
|
|
736 dest = NULL_TREE, len = NULL_TREE;
|
|
737 bool is_store = true, got_reference_p = false;
|
|
738 HOST_WIDE_INT access_size = 1;
|
|
739
|
|
740 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
|
|
741
|
|
742 switch (DECL_FUNCTION_CODE (callee))
|
|
743 {
|
|
744 /* (s, s, n) style memops. */
|
|
745 case BUILT_IN_BCMP:
|
|
746 case BUILT_IN_MEMCMP:
|
|
747 source0 = gimple_call_arg (call, 0);
|
|
748 source1 = gimple_call_arg (call, 1);
|
|
749 len = gimple_call_arg (call, 2);
|
|
750 break;
|
|
751
|
|
752 /* (src, dest, n) style memops. */
|
|
753 case BUILT_IN_BCOPY:
|
|
754 source0 = gimple_call_arg (call, 0);
|
|
755 dest = gimple_call_arg (call, 1);
|
|
756 len = gimple_call_arg (call, 2);
|
|
757 break;
|
|
758
|
|
759 /* (dest, src, n) style memops. */
|
|
760 case BUILT_IN_MEMCPY:
|
|
761 case BUILT_IN_MEMCPY_CHK:
|
|
762 case BUILT_IN_MEMMOVE:
|
|
763 case BUILT_IN_MEMMOVE_CHK:
|
|
764 case BUILT_IN_MEMPCPY:
|
|
765 case BUILT_IN_MEMPCPY_CHK:
|
|
766 dest = gimple_call_arg (call, 0);
|
|
767 source0 = gimple_call_arg (call, 1);
|
|
768 len = gimple_call_arg (call, 2);
|
|
769 break;
|
|
770
|
|
771 /* (dest, n) style memops. */
|
|
772 case BUILT_IN_BZERO:
|
|
773 dest = gimple_call_arg (call, 0);
|
|
774 len = gimple_call_arg (call, 1);
|
|
775 break;
|
|
776
|
|
777 /* (dest, x, n) style memops*/
|
|
778 case BUILT_IN_MEMSET:
|
|
779 case BUILT_IN_MEMSET_CHK:
|
|
780 dest = gimple_call_arg (call, 0);
|
|
781 len = gimple_call_arg (call, 2);
|
|
782 break;
|
|
783
|
|
784 case BUILT_IN_STRLEN:
|
|
785 source0 = gimple_call_arg (call, 0);
|
|
786 len = gimple_call_lhs (call);
|
|
787 break;
|
|
788
|
|
789 case BUILT_IN_STACK_RESTORE:
|
|
790 handle_builtin_stack_restore (call, iter);
|
|
791 break;
|
|
792
|
|
793 CASE_BUILT_IN_ALLOCA:
|
|
794 handle_builtin_alloca (call, iter);
|
|
795 break;
|
|
796 /* And now the __atomic* and __sync builtins.
|
|
797 These are handled differently from the classical memory memory
|
|
798 access builtins above. */
|
|
799
|
|
800 case BUILT_IN_ATOMIC_LOAD_1:
|
|
801 is_store = false;
|
|
802 /* FALLTHRU */
|
|
803 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
|
|
804 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
|
|
805 case BUILT_IN_SYNC_FETCH_AND_OR_1:
|
|
806 case BUILT_IN_SYNC_FETCH_AND_AND_1:
|
|
807 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
|
|
808 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
|
|
809 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
|
|
810 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
|
|
811 case BUILT_IN_SYNC_OR_AND_FETCH_1:
|
|
812 case BUILT_IN_SYNC_AND_AND_FETCH_1:
|
|
813 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
|
|
814 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
|
|
815 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
|
|
816 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
|
|
817 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
|
|
818 case BUILT_IN_SYNC_LOCK_RELEASE_1:
|
|
819 case BUILT_IN_ATOMIC_EXCHANGE_1:
|
|
820 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
|
|
821 case BUILT_IN_ATOMIC_STORE_1:
|
|
822 case BUILT_IN_ATOMIC_ADD_FETCH_1:
|
|
823 case BUILT_IN_ATOMIC_SUB_FETCH_1:
|
|
824 case BUILT_IN_ATOMIC_AND_FETCH_1:
|
|
825 case BUILT_IN_ATOMIC_NAND_FETCH_1:
|
|
826 case BUILT_IN_ATOMIC_XOR_FETCH_1:
|
|
827 case BUILT_IN_ATOMIC_OR_FETCH_1:
|
|
828 case BUILT_IN_ATOMIC_FETCH_ADD_1:
|
|
829 case BUILT_IN_ATOMIC_FETCH_SUB_1:
|
|
830 case BUILT_IN_ATOMIC_FETCH_AND_1:
|
|
831 case BUILT_IN_ATOMIC_FETCH_NAND_1:
|
|
832 case BUILT_IN_ATOMIC_FETCH_XOR_1:
|
|
833 case BUILT_IN_ATOMIC_FETCH_OR_1:
|
|
834 access_size = 1;
|
|
835 goto do_atomic;
|
|
836
|
|
837 case BUILT_IN_ATOMIC_LOAD_2:
|
|
838 is_store = false;
|
|
839 /* FALLTHRU */
|
|
840 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
|
|
841 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
|
|
842 case BUILT_IN_SYNC_FETCH_AND_OR_2:
|
|
843 case BUILT_IN_SYNC_FETCH_AND_AND_2:
|
|
844 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
|
|
845 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
|
|
846 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
|
|
847 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
|
|
848 case BUILT_IN_SYNC_OR_AND_FETCH_2:
|
|
849 case BUILT_IN_SYNC_AND_AND_FETCH_2:
|
|
850 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
|
|
851 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
|
|
852 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
|
|
853 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
|
|
854 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
|
|
855 case BUILT_IN_SYNC_LOCK_RELEASE_2:
|
|
856 case BUILT_IN_ATOMIC_EXCHANGE_2:
|
|
857 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
|
|
858 case BUILT_IN_ATOMIC_STORE_2:
|
|
859 case BUILT_IN_ATOMIC_ADD_FETCH_2:
|
|
860 case BUILT_IN_ATOMIC_SUB_FETCH_2:
|
|
861 case BUILT_IN_ATOMIC_AND_FETCH_2:
|
|
862 case BUILT_IN_ATOMIC_NAND_FETCH_2:
|
|
863 case BUILT_IN_ATOMIC_XOR_FETCH_2:
|
|
864 case BUILT_IN_ATOMIC_OR_FETCH_2:
|
|
865 case BUILT_IN_ATOMIC_FETCH_ADD_2:
|
|
866 case BUILT_IN_ATOMIC_FETCH_SUB_2:
|
|
867 case BUILT_IN_ATOMIC_FETCH_AND_2:
|
|
868 case BUILT_IN_ATOMIC_FETCH_NAND_2:
|
|
869 case BUILT_IN_ATOMIC_FETCH_XOR_2:
|
|
870 case BUILT_IN_ATOMIC_FETCH_OR_2:
|
|
871 access_size = 2;
|
|
872 goto do_atomic;
|
|
873
|
|
874 case BUILT_IN_ATOMIC_LOAD_4:
|
|
875 is_store = false;
|
|
876 /* FALLTHRU */
|
|
877 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
|
|
878 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
|
|
879 case BUILT_IN_SYNC_FETCH_AND_OR_4:
|
|
880 case BUILT_IN_SYNC_FETCH_AND_AND_4:
|
|
881 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
|
|
882 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
|
|
883 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
|
|
884 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
|
|
885 case BUILT_IN_SYNC_OR_AND_FETCH_4:
|
|
886 case BUILT_IN_SYNC_AND_AND_FETCH_4:
|
|
887 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
|
|
888 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
|
|
889 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
|
|
890 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
|
|
891 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
|
|
892 case BUILT_IN_SYNC_LOCK_RELEASE_4:
|
|
893 case BUILT_IN_ATOMIC_EXCHANGE_4:
|
|
894 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
|
|
895 case BUILT_IN_ATOMIC_STORE_4:
|
|
896 case BUILT_IN_ATOMIC_ADD_FETCH_4:
|
|
897 case BUILT_IN_ATOMIC_SUB_FETCH_4:
|
|
898 case BUILT_IN_ATOMIC_AND_FETCH_4:
|
|
899 case BUILT_IN_ATOMIC_NAND_FETCH_4:
|
|
900 case BUILT_IN_ATOMIC_XOR_FETCH_4:
|
|
901 case BUILT_IN_ATOMIC_OR_FETCH_4:
|
|
902 case BUILT_IN_ATOMIC_FETCH_ADD_4:
|
|
903 case BUILT_IN_ATOMIC_FETCH_SUB_4:
|
|
904 case BUILT_IN_ATOMIC_FETCH_AND_4:
|
|
905 case BUILT_IN_ATOMIC_FETCH_NAND_4:
|
|
906 case BUILT_IN_ATOMIC_FETCH_XOR_4:
|
|
907 case BUILT_IN_ATOMIC_FETCH_OR_4:
|
|
908 access_size = 4;
|
|
909 goto do_atomic;
|
|
910
|
|
911 case BUILT_IN_ATOMIC_LOAD_8:
|
|
912 is_store = false;
|
|
913 /* FALLTHRU */
|
|
914 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
|
|
915 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
|
|
916 case BUILT_IN_SYNC_FETCH_AND_OR_8:
|
|
917 case BUILT_IN_SYNC_FETCH_AND_AND_8:
|
|
918 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
|
|
919 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
|
|
920 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
|
|
921 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
|
|
922 case BUILT_IN_SYNC_OR_AND_FETCH_8:
|
|
923 case BUILT_IN_SYNC_AND_AND_FETCH_8:
|
|
924 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
|
|
925 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
|
|
926 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
|
|
927 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
|
|
928 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
|
|
929 case BUILT_IN_SYNC_LOCK_RELEASE_8:
|
|
930 case BUILT_IN_ATOMIC_EXCHANGE_8:
|
|
931 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
|
|
932 case BUILT_IN_ATOMIC_STORE_8:
|
|
933 case BUILT_IN_ATOMIC_ADD_FETCH_8:
|
|
934 case BUILT_IN_ATOMIC_SUB_FETCH_8:
|
|
935 case BUILT_IN_ATOMIC_AND_FETCH_8:
|
|
936 case BUILT_IN_ATOMIC_NAND_FETCH_8:
|
|
937 case BUILT_IN_ATOMIC_XOR_FETCH_8:
|
|
938 case BUILT_IN_ATOMIC_OR_FETCH_8:
|
|
939 case BUILT_IN_ATOMIC_FETCH_ADD_8:
|
|
940 case BUILT_IN_ATOMIC_FETCH_SUB_8:
|
|
941 case BUILT_IN_ATOMIC_FETCH_AND_8:
|
|
942 case BUILT_IN_ATOMIC_FETCH_NAND_8:
|
|
943 case BUILT_IN_ATOMIC_FETCH_XOR_8:
|
|
944 case BUILT_IN_ATOMIC_FETCH_OR_8:
|
|
945 access_size = 8;
|
|
946 goto do_atomic;
|
|
947
|
|
948 case BUILT_IN_ATOMIC_LOAD_16:
|
|
949 is_store = false;
|
|
950 /* FALLTHRU */
|
|
951 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
|
|
952 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
|
|
953 case BUILT_IN_SYNC_FETCH_AND_OR_16:
|
|
954 case BUILT_IN_SYNC_FETCH_AND_AND_16:
|
|
955 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
|
|
956 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
|
|
957 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
|
|
958 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
|
|
959 case BUILT_IN_SYNC_OR_AND_FETCH_16:
|
|
960 case BUILT_IN_SYNC_AND_AND_FETCH_16:
|
|
961 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
|
|
962 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
|
|
963 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
|
|
964 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
|
|
965 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
|
|
966 case BUILT_IN_SYNC_LOCK_RELEASE_16:
|
|
967 case BUILT_IN_ATOMIC_EXCHANGE_16:
|
|
968 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
|
|
969 case BUILT_IN_ATOMIC_STORE_16:
|
|
970 case BUILT_IN_ATOMIC_ADD_FETCH_16:
|
|
971 case BUILT_IN_ATOMIC_SUB_FETCH_16:
|
|
972 case BUILT_IN_ATOMIC_AND_FETCH_16:
|
|
973 case BUILT_IN_ATOMIC_NAND_FETCH_16:
|
|
974 case BUILT_IN_ATOMIC_XOR_FETCH_16:
|
|
975 case BUILT_IN_ATOMIC_OR_FETCH_16:
|
|
976 case BUILT_IN_ATOMIC_FETCH_ADD_16:
|
|
977 case BUILT_IN_ATOMIC_FETCH_SUB_16:
|
|
978 case BUILT_IN_ATOMIC_FETCH_AND_16:
|
|
979 case BUILT_IN_ATOMIC_FETCH_NAND_16:
|
|
980 case BUILT_IN_ATOMIC_FETCH_XOR_16:
|
|
981 case BUILT_IN_ATOMIC_FETCH_OR_16:
|
|
982 access_size = 16;
|
|
983 /* FALLTHRU */
|
|
984 do_atomic:
|
|
985 {
|
|
986 dest = gimple_call_arg (call, 0);
|
|
987 /* DEST represents the address of a memory location.
|
|
988 instrument_derefs wants the memory location, so lets
|
|
989 dereference the address DEST before handing it to
|
|
990 instrument_derefs. */
|
|
991 tree type = build_nonstandard_integer_type (access_size
|
|
992 * BITS_PER_UNIT, 1);
|
|
993 dest = build2 (MEM_REF, type, dest,
|
|
994 build_int_cst (build_pointer_type (char_type_node), 0));
|
|
995 break;
|
|
996 }
|
|
997
|
|
998 default:
|
|
999 /* The other builtins memory access are not instrumented in this
|
|
1000 function because they either don't have any length parameter,
|
|
1001 or their length parameter is just a limit. */
|
|
1002 break;
|
|
1003 }
|
|
1004
|
|
1005 if (len != NULL_TREE)
|
|
1006 {
|
|
1007 if (source0 != NULL_TREE)
|
|
1008 {
|
|
1009 src0->start = source0;
|
|
1010 src0->access_size = access_size;
|
|
1011 *src0_len = len;
|
|
1012 *src0_is_store = false;
|
|
1013 }
|
|
1014
|
|
1015 if (source1 != NULL_TREE)
|
|
1016 {
|
|
1017 src1->start = source1;
|
|
1018 src1->access_size = access_size;
|
|
1019 *src1_len = len;
|
|
1020 *src1_is_store = false;
|
|
1021 }
|
|
1022
|
|
1023 if (dest != NULL_TREE)
|
|
1024 {
|
|
1025 dst->start = dest;
|
|
1026 dst->access_size = access_size;
|
|
1027 *dst_len = len;
|
|
1028 *dst_is_store = true;
|
|
1029 }
|
|
1030
|
|
1031 got_reference_p = true;
|
|
1032 }
|
|
1033 else if (dest)
|
|
1034 {
|
|
1035 dst->start = dest;
|
|
1036 dst->access_size = access_size;
|
|
1037 *dst_len = NULL_TREE;
|
|
1038 *dst_is_store = is_store;
|
|
1039 *dest_is_deref = true;
|
|
1040 got_reference_p = true;
|
|
1041 }
|
|
1042
|
|
1043 return got_reference_p;
|
|
1044 }
|
|
1045
|
|
1046 /* Return true iff a given gimple statement has been instrumented.
|
|
1047 Note that the statement is "defined" by the memory references it
|
|
1048 contains. */
|
|
1049
|
|
1050 static bool
|
|
1051 has_stmt_been_instrumented_p (gimple *stmt)
|
|
1052 {
|
|
1053 if (gimple_assign_single_p (stmt))
|
|
1054 {
|
|
1055 bool r_is_store;
|
|
1056 asan_mem_ref r;
|
|
1057 asan_mem_ref_init (&r, NULL, 1);
|
|
1058
|
|
1059 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
|
|
1060 &r_is_store))
|
|
1061 return has_mem_ref_been_instrumented (&r);
|
|
1062 }
|
|
1063 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
|
|
1064 {
|
|
1065 asan_mem_ref src0, src1, dest;
|
|
1066 asan_mem_ref_init (&src0, NULL, 1);
|
|
1067 asan_mem_ref_init (&src1, NULL, 1);
|
|
1068 asan_mem_ref_init (&dest, NULL, 1);
|
|
1069
|
|
1070 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
|
|
1071 bool src0_is_store = false, src1_is_store = false,
|
|
1072 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
|
|
1073 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
|
|
1074 &src0, &src0_len, &src0_is_store,
|
|
1075 &src1, &src1_len, &src1_is_store,
|
|
1076 &dest, &dest_len, &dest_is_store,
|
|
1077 &dest_is_deref, &intercepted_p))
|
|
1078 {
|
|
1079 if (src0.start != NULL_TREE
|
|
1080 && !has_mem_ref_been_instrumented (&src0, src0_len))
|
|
1081 return false;
|
|
1082
|
|
1083 if (src1.start != NULL_TREE
|
|
1084 && !has_mem_ref_been_instrumented (&src1, src1_len))
|
|
1085 return false;
|
|
1086
|
|
1087 if (dest.start != NULL_TREE
|
|
1088 && !has_mem_ref_been_instrumented (&dest, dest_len))
|
|
1089 return false;
|
|
1090
|
|
1091 return true;
|
|
1092 }
|
|
1093 }
|
|
1094 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
|
|
1095 {
|
|
1096 asan_mem_ref r;
|
|
1097 asan_mem_ref_init (&r, NULL, 1);
|
|
1098
|
|
1099 r.start = gimple_call_lhs (stmt);
|
|
1100 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
|
|
1101 return has_mem_ref_been_instrumented (&r);
|
|
1102 }
|
|
1103
|
|
1104 return false;
|
|
1105 }
|
|
1106
|
|
1107 /* Insert a memory reference into the hash table. */
|
|
1108
|
|
1109 static void
|
|
1110 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
|
|
1111 {
|
|
1112 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
|
|
1113
|
|
1114 asan_mem_ref r;
|
|
1115 asan_mem_ref_init (&r, ref, access_size);
|
|
1116
|
|
1117 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
|
|
1118 if (*slot == NULL || (*slot)->access_size < access_size)
|
|
1119 *slot = asan_mem_ref_new (ref, access_size);
|
|
1120 }
|
|
1121
|
|
1122 /* Initialize shadow_ptr_types array. */
|
|
1123
|
|
1124 static void
|
|
1125 asan_init_shadow_ptr_types (void)
|
|
1126 {
|
|
1127 asan_shadow_set = new_alias_set ();
|
|
1128 tree types[3] = { signed_char_type_node, short_integer_type_node,
|
|
1129 integer_type_node };
|
|
1130
|
|
1131 for (unsigned i = 0; i < 3; i++)
|
|
1132 {
|
|
1133 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
|
|
1134 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
|
|
1135 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
|
|
1136 }
|
|
1137
|
|
1138 initialize_sanitizer_builtins ();
|
|
1139 }
|
|
1140
|
|
1141 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
|
|
1142
|
|
1143 static tree
|
|
1144 asan_pp_string (pretty_printer *pp)
|
|
1145 {
|
|
1146 const char *buf = pp_formatted_text (pp);
|
|
1147 size_t len = strlen (buf);
|
|
1148 tree ret = build_string (len + 1, buf);
|
|
1149 TREE_TYPE (ret)
|
|
1150 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
|
|
1151 build_index_type (size_int (len)));
|
|
1152 TREE_READONLY (ret) = 1;
|
|
1153 TREE_STATIC (ret) = 1;
|
|
1154 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
|
|
1155 }
|
|
1156
|
|
1157 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
|
|
1158 though. */
|
|
1159
|
|
1160 static void
|
|
1161 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
|
|
1162 {
|
|
1163 rtx_insn *insn, *insns, *jump;
|
|
1164 rtx_code_label *top_label;
|
|
1165 rtx end, addr, tmp;
|
|
1166
|
145
|
1167 gcc_assert ((len & 3) == 0);
|
111
|
1168 start_sequence ();
|
|
1169 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
|
|
1170 insns = get_insns ();
|
|
1171 end_sequence ();
|
|
1172 for (insn = insns; insn; insn = NEXT_INSN (insn))
|
|
1173 if (CALL_P (insn))
|
|
1174 break;
|
|
1175 if (insn == NULL_RTX)
|
|
1176 {
|
|
1177 emit_insn (insns);
|
|
1178 return;
|
|
1179 }
|
|
1180
|
|
1181 top_label = gen_label_rtx ();
|
|
1182 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
|
|
1183 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
|
|
1184 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
|
|
1185 emit_label (top_label);
|
|
1186
|
|
1187 emit_move_insn (shadow_mem, const0_rtx);
|
|
1188 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
|
|
1189 true, OPTAB_LIB_WIDEN);
|
|
1190 if (tmp != addr)
|
|
1191 emit_move_insn (addr, tmp);
|
|
1192 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
|
|
1193 jump = get_last_insn ();
|
|
1194 gcc_assert (JUMP_P (jump));
|
|
1195 add_reg_br_prob_note (jump,
|
|
1196 profile_probability::guessed_always ()
|
|
1197 .apply_scale (80, 100));
|
|
1198 }
|
|
1199
|
|
1200 void
|
|
1201 asan_function_start (void)
|
|
1202 {
|
|
1203 section *fnsec = function_section (current_function_decl);
|
|
1204 switch_to_section (fnsec);
|
|
1205 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
|
|
1206 current_function_funcdef_no);
|
|
1207 }
|
|
1208
|
|
1209 /* Return number of shadow bytes that are occupied by a local variable
|
|
1210 of SIZE bytes. */
|
|
1211
|
|
1212 static unsigned HOST_WIDE_INT
|
|
1213 shadow_mem_size (unsigned HOST_WIDE_INT size)
|
|
1214 {
|
131
|
1215 /* It must be possible to align stack variables to granularity
|
|
1216 of shadow memory. */
|
|
1217 gcc_assert (BITS_PER_UNIT
|
|
1218 * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
|
|
1219
|
111
|
1220 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
|
|
1221 }
|
|
1222
|
145
|
1223 /* Always emit 4 bytes at a time. */
|
|
1224 #define RZ_BUFFER_SIZE 4
|
|
1225
|
|
1226 /* ASAN redzone buffer container that handles emission of shadow bytes. */
|
|
1227 class asan_redzone_buffer
|
|
1228 {
|
|
1229 public:
|
|
1230 /* Constructor. */
|
|
1231 asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
|
|
1232 m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
|
|
1233 m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
|
|
1234 {}
|
|
1235
|
|
1236 /* Emit VALUE shadow byte at a given OFFSET. */
|
|
1237 void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
|
|
1238
|
|
1239 /* Emit RTX emission of the content of the buffer. */
|
|
1240 void flush_redzone_payload (void);
|
|
1241
|
|
1242 private:
|
|
1243 /* Flush if the content of the buffer is full
|
|
1244 (equal to RZ_BUFFER_SIZE). */
|
|
1245 void flush_if_full (void);
|
|
1246
|
|
1247 /* Memory where we last emitted a redzone payload. */
|
|
1248 rtx m_shadow_mem;
|
|
1249
|
|
1250 /* Relative offset where we last emitted a redzone payload. */
|
|
1251 HOST_WIDE_INT m_prev_offset;
|
|
1252
|
|
1253 /* Relative original offset. Used for checking only. */
|
|
1254 HOST_WIDE_INT m_original_offset;
|
|
1255
|
|
1256 public:
|
|
1257 /* Buffer with redzone payload. */
|
|
1258 auto_vec<unsigned char> m_shadow_bytes;
|
|
1259 };
|
|
1260
|
|
1261 /* Emit VALUE shadow byte at a given OFFSET. */
|
|
1262
|
|
1263 void
|
|
1264 asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
|
|
1265 unsigned char value)
|
|
1266 {
|
|
1267 gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
|
|
1268 gcc_assert (offset >= m_prev_offset);
|
|
1269
|
|
1270 HOST_WIDE_INT off
|
|
1271 = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
|
|
1272 if (off == offset)
|
|
1273 {
|
|
1274 /* Consecutive shadow memory byte. */
|
|
1275 m_shadow_bytes.safe_push (value);
|
|
1276 flush_if_full ();
|
|
1277 }
|
|
1278 else
|
|
1279 {
|
|
1280 if (!m_shadow_bytes.is_empty ())
|
|
1281 flush_redzone_payload ();
|
|
1282
|
|
1283 /* Maybe start earlier in order to use aligned store. */
|
|
1284 HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
|
|
1285 if (align)
|
|
1286 {
|
|
1287 offset -= align;
|
|
1288 for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
|
|
1289 m_shadow_bytes.safe_push (0);
|
|
1290 }
|
|
1291
|
|
1292 /* Adjust m_prev_offset and m_shadow_mem. */
|
|
1293 HOST_WIDE_INT diff = offset - m_prev_offset;
|
|
1294 m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
|
|
1295 diff >> ASAN_SHADOW_SHIFT);
|
|
1296 m_prev_offset = offset;
|
|
1297 m_shadow_bytes.safe_push (value);
|
|
1298 flush_if_full ();
|
|
1299 }
|
|
1300 }
|
|
1301
|
|
1302 /* Emit RTX emission of the content of the buffer. */
|
|
1303
|
|
1304 void
|
|
1305 asan_redzone_buffer::flush_redzone_payload (void)
|
|
1306 {
|
|
1307 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
|
|
1308
|
|
1309 if (m_shadow_bytes.is_empty ())
|
|
1310 return;
|
|
1311
|
|
1312 /* Be sure we always emit to an aligned address. */
|
|
1313 gcc_assert (((m_prev_offset - m_original_offset)
|
|
1314 & (ASAN_RED_ZONE_SIZE - 1)) == 0);
|
|
1315
|
|
1316 /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed. */
|
|
1317 unsigned l = m_shadow_bytes.length ();
|
|
1318 for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
|
|
1319 m_shadow_bytes.safe_push (0);
|
|
1320
|
|
1321 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1322 fprintf (dump_file,
|
|
1323 "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
|
|
1324
|
|
1325 unsigned HOST_WIDE_INT val = 0;
|
|
1326 for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
|
|
1327 {
|
|
1328 unsigned char v
|
|
1329 = m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
|
|
1330 val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
|
|
1331 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1332 fprintf (dump_file, "%02x ", v);
|
|
1333 }
|
|
1334
|
|
1335 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1336 fprintf (dump_file, "\n");
|
|
1337
|
|
1338 rtx c = gen_int_mode (val, SImode);
|
|
1339 m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
|
|
1340 emit_move_insn (m_shadow_mem, c);
|
|
1341 m_shadow_bytes.truncate (0);
|
|
1342 }
|
|
1343
|
|
1344 /* Flush if the content of the buffer is full
|
|
1345 (equal to RZ_BUFFER_SIZE). */
|
|
1346
|
|
1347 void
|
|
1348 asan_redzone_buffer::flush_if_full (void)
|
|
1349 {
|
|
1350 if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
|
|
1351 flush_redzone_payload ();
|
|
1352 }
|
|
1353
|
111
|
1354 /* Insert code to protect stack vars. The prologue sequence should be emitted
|
|
1355 directly, epilogue sequence returned. BASE is the register holding the
|
|
1356 stack base, against which OFFSETS array offsets are relative to, OFFSETS
|
|
1357 array contains pairs of offsets in reverse order, always the end offset
|
|
1358 of some gap that needs protection followed by starting offset,
|
|
1359 and DECLS is an array of representative decls for each var partition.
|
|
1360 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
|
|
1361 elements long (OFFSETS include gap before the first variable as well
|
|
1362 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
|
|
1363 register which stack vars DECL_RTLs are based on. Either BASE should be
|
|
1364 assigned to PBASE, when not doing use after return protection, or
|
|
1365 corresponding address based on __asan_stack_malloc* return value. */
|
|
1366
|
|
1367 rtx_insn *
|
|
1368 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
|
|
1369 HOST_WIDE_INT *offsets, tree *decls, int length)
|
|
1370 {
|
|
1371 rtx shadow_base, shadow_mem, ret, mem, orig_base;
|
|
1372 rtx_code_label *lab;
|
|
1373 rtx_insn *insns;
|
|
1374 char buf[32];
|
|
1375 HOST_WIDE_INT base_offset = offsets[length - 1];
|
|
1376 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
|
|
1377 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
|
145
|
1378 HOST_WIDE_INT last_offset, last_size, last_size_aligned;
|
111
|
1379 int l;
|
|
1380 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
|
|
1381 tree str_cst, decl, id;
|
|
1382 int use_after_return_class = -1;
|
|
1383
|
|
1384 if (shadow_ptr_types[0] == NULL_TREE)
|
|
1385 asan_init_shadow_ptr_types ();
|
|
1386
|
131
|
1387 expanded_location cfun_xloc
|
|
1388 = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
|
|
1389
|
111
|
1390 /* First of all, prepare the description string. */
|
|
1391 pretty_printer asan_pp;
|
|
1392
|
|
1393 pp_decimal_int (&asan_pp, length / 2 - 1);
|
|
1394 pp_space (&asan_pp);
|
|
1395 for (l = length - 2; l; l -= 2)
|
|
1396 {
|
|
1397 tree decl = decls[l / 2 - 1];
|
|
1398 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
|
|
1399 pp_space (&asan_pp);
|
|
1400 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
|
|
1401 pp_space (&asan_pp);
|
131
|
1402
|
|
1403 expanded_location xloc
|
|
1404 = expand_location (DECL_SOURCE_LOCATION (decl));
|
|
1405 char location[32];
|
|
1406
|
|
1407 if (xloc.file == cfun_xloc.file)
|
|
1408 sprintf (location, ":%d", xloc.line);
|
|
1409 else
|
|
1410 location[0] = '\0';
|
|
1411
|
111
|
1412 if (DECL_P (decl) && DECL_NAME (decl))
|
|
1413 {
|
131
|
1414 unsigned idlen
|
|
1415 = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
|
|
1416 pp_decimal_int (&asan_pp, idlen);
|
111
|
1417 pp_space (&asan_pp);
|
|
1418 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
|
131
|
1419 pp_string (&asan_pp, location);
|
111
|
1420 }
|
|
1421 else
|
|
1422 pp_string (&asan_pp, "9 <unknown>");
|
131
|
1423
|
|
1424 if (l > 2)
|
|
1425 pp_space (&asan_pp);
|
111
|
1426 }
|
|
1427 str_cst = asan_pp_string (&asan_pp);
|
|
1428
|
|
1429 /* Emit the prologue sequence. */
|
|
1430 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
|
145
|
1431 && param_asan_use_after_return)
|
111
|
1432 {
|
|
1433 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
|
|
1434 /* __asan_stack_malloc_N guarantees alignment
|
|
1435 N < 6 ? (64 << N) : 4096 bytes. */
|
|
1436 if (alignb > (use_after_return_class < 6
|
|
1437 ? (64U << use_after_return_class) : 4096U))
|
|
1438 use_after_return_class = -1;
|
|
1439 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
|
|
1440 base_align_bias = ((asan_frame_size + alignb - 1)
|
|
1441 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
|
|
1442 }
|
145
|
1443
|
111
|
1444 /* Align base if target is STRICT_ALIGNMENT. */
|
|
1445 if (STRICT_ALIGNMENT)
|
145
|
1446 {
|
|
1447 const HOST_WIDE_INT align
|
|
1448 = (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
|
|
1449 base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
|
|
1450 NULL_RTX, 1, OPTAB_DIRECT);
|
|
1451 }
|
111
|
1452
|
|
1453 if (use_after_return_class == -1 && pbase)
|
|
1454 emit_move_insn (pbase, base);
|
|
1455
|
|
1456 base = expand_binop (Pmode, add_optab, base,
|
|
1457 gen_int_mode (base_offset - base_align_bias, Pmode),
|
|
1458 NULL_RTX, 1, OPTAB_DIRECT);
|
|
1459 orig_base = NULL_RTX;
|
|
1460 if (use_after_return_class != -1)
|
|
1461 {
|
|
1462 if (asan_detect_stack_use_after_return == NULL_TREE)
|
|
1463 {
|
|
1464 id = get_identifier ("__asan_option_detect_stack_use_after_return");
|
|
1465 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
|
|
1466 integer_type_node);
|
|
1467 SET_DECL_ASSEMBLER_NAME (decl, id);
|
|
1468 TREE_ADDRESSABLE (decl) = 1;
|
|
1469 DECL_ARTIFICIAL (decl) = 1;
|
|
1470 DECL_IGNORED_P (decl) = 1;
|
|
1471 DECL_EXTERNAL (decl) = 1;
|
|
1472 TREE_STATIC (decl) = 1;
|
|
1473 TREE_PUBLIC (decl) = 1;
|
|
1474 TREE_USED (decl) = 1;
|
|
1475 asan_detect_stack_use_after_return = decl;
|
|
1476 }
|
|
1477 orig_base = gen_reg_rtx (Pmode);
|
|
1478 emit_move_insn (orig_base, base);
|
|
1479 ret = expand_normal (asan_detect_stack_use_after_return);
|
|
1480 lab = gen_label_rtx ();
|
|
1481 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
|
|
1482 VOIDmode, 0, lab,
|
|
1483 profile_probability::very_likely ());
|
|
1484 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
|
|
1485 use_after_return_class);
|
|
1486 ret = init_one_libfunc (buf);
|
|
1487 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
|
|
1488 GEN_INT (asan_frame_size
|
|
1489 + base_align_bias),
|
|
1490 TYPE_MODE (pointer_sized_int_node));
|
|
1491 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
|
|
1492 and NULL otherwise. Check RET value is NULL here and jump over the
|
|
1493 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
|
|
1494 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
|
|
1495 VOIDmode, 0, lab,
|
|
1496 profile_probability:: very_unlikely ());
|
|
1497 ret = convert_memory_address (Pmode, ret);
|
|
1498 emit_move_insn (base, ret);
|
|
1499 emit_label (lab);
|
|
1500 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
|
|
1501 gen_int_mode (base_align_bias
|
|
1502 - base_offset, Pmode),
|
|
1503 NULL_RTX, 1, OPTAB_DIRECT));
|
|
1504 }
|
|
1505 mem = gen_rtx_MEM (ptr_mode, base);
|
|
1506 mem = adjust_address (mem, VOIDmode, base_align_bias);
|
|
1507 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
|
|
1508 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
|
|
1509 emit_move_insn (mem, expand_normal (str_cst));
|
|
1510 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
|
|
1511 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
|
|
1512 id = get_identifier (buf);
|
|
1513 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
|
|
1514 VAR_DECL, id, char_type_node);
|
|
1515 SET_DECL_ASSEMBLER_NAME (decl, id);
|
|
1516 TREE_ADDRESSABLE (decl) = 1;
|
|
1517 TREE_READONLY (decl) = 1;
|
|
1518 DECL_ARTIFICIAL (decl) = 1;
|
|
1519 DECL_IGNORED_P (decl) = 1;
|
|
1520 TREE_STATIC (decl) = 1;
|
|
1521 TREE_PUBLIC (decl) = 0;
|
|
1522 TREE_USED (decl) = 1;
|
|
1523 DECL_INITIAL (decl) = decl;
|
|
1524 TREE_ASM_WRITTEN (decl) = 1;
|
|
1525 TREE_ASM_WRITTEN (id) = 1;
|
|
1526 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
|
|
1527 shadow_base = expand_binop (Pmode, lshr_optab, base,
|
131
|
1528 gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
|
111
|
1529 NULL_RTX, 1, OPTAB_DIRECT);
|
|
1530 shadow_base
|
|
1531 = plus_constant (Pmode, shadow_base,
|
|
1532 asan_shadow_offset ()
|
|
1533 + (base_align_bias >> ASAN_SHADOW_SHIFT));
|
|
1534 gcc_assert (asan_shadow_set != -1
|
|
1535 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
|
|
1536 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
|
|
1537 set_mem_alias_set (shadow_mem, asan_shadow_set);
|
|
1538 if (STRICT_ALIGNMENT)
|
|
1539 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
|
|
1540 prev_offset = base_offset;
|
145
|
1541
|
|
1542 asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
|
111
|
1543 for (l = length; l; l -= 2)
|
|
1544 {
|
|
1545 if (l == 2)
|
|
1546 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
|
|
1547 offset = offsets[l - 1];
|
145
|
1548
|
|
1549 bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
|
|
1550 /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
|
|
1551 the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
|
|
1552 In that case we have to emit one extra byte that will describe
|
|
1553 how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed. */
|
|
1554 if (extra_byte)
|
111
|
1555 {
|
|
1556 HOST_WIDE_INT aoff
|
|
1557 = base_offset + ((offset - base_offset)
|
145
|
1558 & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
|
|
1559 rz_buffer.emit_redzone_byte (aoff, offset - aoff);
|
|
1560 offset = aoff + ASAN_SHADOW_GRANULARITY;
|
111
|
1561 }
|
145
|
1562
|
|
1563 /* Calculate size of red zone payload. */
|
|
1564 while (offset < offsets[l - 2])
|
111
|
1565 {
|
145
|
1566 rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
|
|
1567 offset += ASAN_SHADOW_GRANULARITY;
|
111
|
1568 }
|
145
|
1569
|
111
|
1570 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
|
|
1571 }
|
145
|
1572
|
|
1573 /* As the automatic variables are aligned to
|
|
1574 ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
|
|
1575 flushed here. */
|
|
1576 gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
|
|
1577
|
111
|
1578 do_pending_stack_adjust ();
|
|
1579
|
|
1580 /* Construct epilogue sequence. */
|
|
1581 start_sequence ();
|
|
1582
|
|
1583 lab = NULL;
|
|
1584 if (use_after_return_class != -1)
|
|
1585 {
|
|
1586 rtx_code_label *lab2 = gen_label_rtx ();
|
|
1587 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
|
|
1588 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
|
|
1589 VOIDmode, 0, lab2,
|
|
1590 profile_probability::very_likely ());
|
|
1591 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
|
|
1592 set_mem_alias_set (shadow_mem, asan_shadow_set);
|
|
1593 mem = gen_rtx_MEM (ptr_mode, base);
|
|
1594 mem = adjust_address (mem, VOIDmode, base_align_bias);
|
|
1595 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
|
|
1596 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
|
|
1597 if (use_after_return_class < 5
|
|
1598 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
|
|
1599 BITS_PER_UNIT, true))
|
|
1600 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
|
145
|
1601 BITS_PER_UNIT, true, RETURN_BEGIN);
|
111
|
1602 else if (use_after_return_class >= 5
|
|
1603 || !set_storage_via_setmem (shadow_mem,
|
|
1604 GEN_INT (sz),
|
|
1605 gen_int_mode (c, QImode),
|
|
1606 BITS_PER_UNIT, BITS_PER_UNIT,
|
|
1607 -1, sz, sz, sz))
|
|
1608 {
|
|
1609 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
|
|
1610 use_after_return_class);
|
|
1611 ret = init_one_libfunc (buf);
|
|
1612 rtx addr = convert_memory_address (ptr_mode, base);
|
|
1613 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
|
|
1614 emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
|
|
1615 GEN_INT (asan_frame_size + base_align_bias),
|
|
1616 TYPE_MODE (pointer_sized_int_node),
|
|
1617 orig_addr, ptr_mode);
|
|
1618 }
|
|
1619 lab = gen_label_rtx ();
|
|
1620 emit_jump (lab);
|
|
1621 emit_label (lab2);
|
|
1622 }
|
|
1623
|
|
1624 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
|
|
1625 set_mem_alias_set (shadow_mem, asan_shadow_set);
|
|
1626
|
|
1627 if (STRICT_ALIGNMENT)
|
|
1628 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
|
|
1629
|
|
1630 prev_offset = base_offset;
|
|
1631 last_offset = base_offset;
|
|
1632 last_size = 0;
|
145
|
1633 last_size_aligned = 0;
|
111
|
1634 for (l = length; l; l -= 2)
|
|
1635 {
|
|
1636 offset = base_offset + ((offsets[l - 1] - base_offset)
|
145
|
1637 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
|
|
1638 if (last_offset + last_size_aligned < offset)
|
111
|
1639 {
|
|
1640 shadow_mem = adjust_address (shadow_mem, VOIDmode,
|
|
1641 (last_offset - prev_offset)
|
|
1642 >> ASAN_SHADOW_SHIFT);
|
|
1643 prev_offset = last_offset;
|
145
|
1644 asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
|
111
|
1645 last_offset = offset;
|
|
1646 last_size = 0;
|
|
1647 }
|
145
|
1648 else
|
|
1649 last_size = offset - last_offset;
|
111
|
1650 last_size += base_offset + ((offsets[l - 2] - base_offset)
|
145
|
1651 & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
|
111
|
1652 - offset;
|
|
1653
|
|
1654 /* Unpoison shadow memory that corresponds to a variable that is
|
|
1655 is subject of use-after-return sanitization. */
|
|
1656 if (l > 2)
|
|
1657 {
|
|
1658 decl = decls[l / 2 - 2];
|
|
1659 if (asan_handled_variables != NULL
|
|
1660 && asan_handled_variables->contains (decl))
|
|
1661 {
|
|
1662 HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
|
|
1663 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1664 {
|
|
1665 const char *n = (DECL_NAME (decl)
|
|
1666 ? IDENTIFIER_POINTER (DECL_NAME (decl))
|
|
1667 : "<unknown>");
|
|
1668 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
|
|
1669 "%s (%" PRId64 " B)\n", n, size);
|
|
1670 }
|
|
1671
|
145
|
1672 last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
|
111
|
1673 }
|
|
1674 }
|
145
|
1675 last_size_aligned
|
|
1676 = ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
|
|
1677 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
|
111
|
1678 }
|
145
|
1679 if (last_size_aligned)
|
111
|
1680 {
|
|
1681 shadow_mem = adjust_address (shadow_mem, VOIDmode,
|
|
1682 (last_offset - prev_offset)
|
|
1683 >> ASAN_SHADOW_SHIFT);
|
145
|
1684 asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
|
111
|
1685 }
|
|
1686
|
|
1687 /* Clean-up set with instrumented stack variables. */
|
|
1688 delete asan_handled_variables;
|
|
1689 asan_handled_variables = NULL;
|
|
1690 delete asan_used_labels;
|
|
1691 asan_used_labels = NULL;
|
|
1692
|
|
1693 do_pending_stack_adjust ();
|
|
1694 if (lab)
|
|
1695 emit_label (lab);
|
|
1696
|
|
1697 insns = get_insns ();
|
|
1698 end_sequence ();
|
|
1699 return insns;
|
|
1700 }
|
|
1701
|
|
1702 /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
|
|
1703 to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
|
|
1704 indicates whether we're emitting new instructions sequence or not. */
|
|
1705
|
|
1706 rtx_insn *
|
|
1707 asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
|
|
1708 {
|
|
1709 if (before)
|
|
1710 push_to_sequence (before);
|
|
1711 else
|
|
1712 start_sequence ();
|
|
1713 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
|
|
1714 top = convert_memory_address (ptr_mode, top);
|
|
1715 bot = convert_memory_address (ptr_mode, bot);
|
145
|
1716 emit_library_call (ret, LCT_NORMAL, ptr_mode,
|
|
1717 top, ptr_mode, bot, ptr_mode);
|
111
|
1718
|
|
1719 do_pending_stack_adjust ();
|
|
1720 rtx_insn *insns = get_insns ();
|
|
1721 end_sequence ();
|
|
1722 return insns;
|
|
1723 }
|
|
1724
|
|
1725 /* Return true if DECL, a global var, might be overridden and needs
|
|
1726 therefore a local alias. */
|
|
1727
|
|
1728 static bool
|
|
1729 asan_needs_local_alias (tree decl)
|
|
1730 {
|
|
1731 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
|
|
1732 }
|
|
1733
|
|
1734 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
|
|
1735 therefore doesn't need protection. */
|
|
1736
|
|
1737 static bool
|
|
1738 is_odr_indicator (tree decl)
|
|
1739 {
|
|
1740 return (DECL_ARTIFICIAL (decl)
|
|
1741 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
|
|
1742 }
|
|
1743
|
|
1744 /* Return true if DECL is a VAR_DECL that should be protected
|
|
1745 by Address Sanitizer, by appending a red zone with protected
|
|
1746 shadow memory after it and aligning it to at least
|
|
1747 ASAN_RED_ZONE_SIZE bytes. */
|
|
1748
|
|
1749 bool
|
131
|
1750 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
|
111
|
1751 {
|
145
|
1752 if (!param_asan_globals)
|
111
|
1753 return false;
|
|
1754
|
|
1755 rtx rtl, symbol;
|
|
1756
|
|
1757 if (TREE_CODE (decl) == STRING_CST)
|
|
1758 {
|
|
1759 /* Instrument all STRING_CSTs except those created
|
|
1760 by asan_pp_string here. */
|
|
1761 if (shadow_ptr_types[0] != NULL_TREE
|
|
1762 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
|
|
1763 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
|
|
1764 return false;
|
|
1765 return true;
|
|
1766 }
|
|
1767 if (!VAR_P (decl)
|
|
1768 /* TLS vars aren't statically protectable. */
|
|
1769 || DECL_THREAD_LOCAL_P (decl)
|
|
1770 /* Externs will be protected elsewhere. */
|
|
1771 || DECL_EXTERNAL (decl)
|
131
|
1772 /* PR sanitizer/81697: For architectures that use section anchors first
|
|
1773 call to asan_protect_global may occur before DECL_RTL (decl) is set.
|
|
1774 We should ignore DECL_RTL_SET_P then, because otherwise the first call
|
|
1775 to asan_protect_global will return FALSE and the following calls on the
|
|
1776 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
|
|
1777 up with inconsistency at runtime. */
|
|
1778 || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
|
111
|
1779 /* Comdat vars pose an ABI problem, we can't know if
|
|
1780 the var that is selected by the linker will have
|
|
1781 padding or not. */
|
|
1782 || DECL_ONE_ONLY (decl)
|
|
1783 /* Similarly for common vars. People can use -fno-common.
|
|
1784 Note: Linux kernel is built with -fno-common, so we do instrument
|
|
1785 globals there even if it is C. */
|
|
1786 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
|
|
1787 /* Don't protect if using user section, often vars placed
|
|
1788 into user section from multiple TUs are then assumed
|
|
1789 to be an array of such vars, putting padding in there
|
|
1790 breaks this assumption. */
|
|
1791 || (DECL_SECTION_NAME (decl) != NULL
|
|
1792 && !symtab_node::get (decl)->implicit_section
|
|
1793 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
|
|
1794 || DECL_SIZE (decl) == 0
|
|
1795 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
|
131
|
1796 || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
|
111
|
1797 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
|
|
1798 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
|
|
1799 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
|
|
1800 || is_odr_indicator (decl))
|
|
1801 return false;
|
|
1802
|
131
|
1803 if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
|
|
1804 {
|
|
1805
|
|
1806 rtl = DECL_RTL (decl);
|
|
1807 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
|
|
1808 return false;
|
|
1809 symbol = XEXP (rtl, 0);
|
|
1810
|
|
1811 if (CONSTANT_POOL_ADDRESS_P (symbol)
|
|
1812 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
|
|
1813 return false;
|
|
1814 }
|
111
|
1815
|
|
1816 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
|
|
1817 return false;
|
|
1818
|
|
1819 if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
|
|
1820 return false;
|
|
1821
|
|
1822 return true;
|
|
1823 }
|
|
1824
|
|
1825 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
|
|
1826 IS_STORE is either 1 (for a store) or 0 (for a load). */
|
|
1827
|
|
1828 static tree
|
|
1829 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
|
|
1830 int *nargs)
|
|
1831 {
|
|
1832 static enum built_in_function report[2][2][6]
|
|
1833 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
|
|
1834 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
|
|
1835 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
|
|
1836 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
|
|
1837 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
|
|
1838 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
|
|
1839 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
|
|
1840 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
|
|
1841 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
|
|
1842 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
|
|
1843 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
|
|
1844 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
|
|
1845 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
|
|
1846 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
|
|
1847 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
|
|
1848 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
|
|
1849 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
|
|
1850 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
|
|
1851 if (size_in_bytes == -1)
|
|
1852 {
|
|
1853 *nargs = 2;
|
|
1854 return builtin_decl_implicit (report[recover_p][is_store][5]);
|
|
1855 }
|
|
1856 *nargs = 1;
|
|
1857 int size_log2 = exact_log2 (size_in_bytes);
|
|
1858 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
|
|
1859 }
|
|
1860
|
|
1861 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
|
|
1862 IS_STORE is either 1 (for a store) or 0 (for a load). */
|
|
1863
|
|
1864 static tree
|
|
1865 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
|
|
1866 int *nargs)
|
|
1867 {
|
|
1868 static enum built_in_function check[2][2][6]
|
|
1869 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
|
|
1870 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
|
|
1871 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
|
|
1872 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
|
|
1873 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
|
|
1874 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
|
|
1875 { { BUILT_IN_ASAN_LOAD1_NOABORT,
|
|
1876 BUILT_IN_ASAN_LOAD2_NOABORT,
|
|
1877 BUILT_IN_ASAN_LOAD4_NOABORT,
|
|
1878 BUILT_IN_ASAN_LOAD8_NOABORT,
|
|
1879 BUILT_IN_ASAN_LOAD16_NOABORT,
|
|
1880 BUILT_IN_ASAN_LOADN_NOABORT },
|
|
1881 { BUILT_IN_ASAN_STORE1_NOABORT,
|
|
1882 BUILT_IN_ASAN_STORE2_NOABORT,
|
|
1883 BUILT_IN_ASAN_STORE4_NOABORT,
|
|
1884 BUILT_IN_ASAN_STORE8_NOABORT,
|
|
1885 BUILT_IN_ASAN_STORE16_NOABORT,
|
|
1886 BUILT_IN_ASAN_STOREN_NOABORT } } };
|
|
1887 if (size_in_bytes == -1)
|
|
1888 {
|
|
1889 *nargs = 2;
|
|
1890 return builtin_decl_implicit (check[recover_p][is_store][5]);
|
|
1891 }
|
|
1892 *nargs = 1;
|
|
1893 int size_log2 = exact_log2 (size_in_bytes);
|
|
1894 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
|
|
1895 }
|
|
1896
|
|
1897 /* Split the current basic block and create a condition statement
|
|
1898 insertion point right before or after the statement pointed to by
|
|
1899 ITER. Return an iterator to the point at which the caller might
|
|
1900 safely insert the condition statement.
|
|
1901
|
|
1902 THEN_BLOCK must be set to the address of an uninitialized instance
|
|
1903 of basic_block. The function will then set *THEN_BLOCK to the
|
|
1904 'then block' of the condition statement to be inserted by the
|
|
1905 caller.
|
|
1906
|
|
1907 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
|
|
1908 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
|
|
1909
|
|
1910 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
|
|
1911 block' of the condition statement to be inserted by the caller.
|
|
1912
|
|
1913 Note that *FALLTHROUGH_BLOCK is a new block that contains the
|
|
1914 statements starting from *ITER, and *THEN_BLOCK is a new empty
|
|
1915 block.
|
|
1916
|
|
1917 *ITER is adjusted to point to always point to the first statement
|
|
1918 of the basic block * FALLTHROUGH_BLOCK. That statement is the
|
|
1919 same as what ITER was pointing to prior to calling this function,
|
|
1920 if BEFORE_P is true; otherwise, it is its following statement. */
|
|
1921
|
|
1922 gimple_stmt_iterator
|
|
1923 create_cond_insert_point (gimple_stmt_iterator *iter,
|
|
1924 bool before_p,
|
|
1925 bool then_more_likely_p,
|
|
1926 bool create_then_fallthru_edge,
|
|
1927 basic_block *then_block,
|
|
1928 basic_block *fallthrough_block)
|
|
1929 {
|
|
1930 gimple_stmt_iterator gsi = *iter;
|
|
1931
|
|
1932 if (!gsi_end_p (gsi) && before_p)
|
|
1933 gsi_prev (&gsi);
|
|
1934
|
|
1935 basic_block cur_bb = gsi_bb (*iter);
|
|
1936
|
|
1937 edge e = split_block (cur_bb, gsi_stmt (gsi));
|
|
1938
|
|
1939 /* Get a hold on the 'condition block', the 'then block' and the
|
|
1940 'else block'. */
|
|
1941 basic_block cond_bb = e->src;
|
|
1942 basic_block fallthru_bb = e->dest;
|
|
1943 basic_block then_bb = create_empty_bb (cond_bb);
|
|
1944 if (current_loops)
|
|
1945 {
|
|
1946 add_bb_to_loop (then_bb, cond_bb->loop_father);
|
|
1947 loops_state_set (LOOPS_NEED_FIXUP);
|
|
1948 }
|
|
1949
|
|
1950 /* Set up the newly created 'then block'. */
|
|
1951 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
|
|
1952 profile_probability fallthrough_probability
|
|
1953 = then_more_likely_p
|
|
1954 ? profile_probability::very_unlikely ()
|
|
1955 : profile_probability::very_likely ();
|
|
1956 e->probability = fallthrough_probability.invert ();
|
131
|
1957 then_bb->count = e->count ();
|
111
|
1958 if (create_then_fallthru_edge)
|
|
1959 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
|
|
1960
|
|
1961 /* Set up the fallthrough basic block. */
|
|
1962 e = find_edge (cond_bb, fallthru_bb);
|
|
1963 e->flags = EDGE_FALSE_VALUE;
|
|
1964 e->probability = fallthrough_probability;
|
|
1965
|
|
1966 /* Update dominance info for the newly created then_bb; note that
|
|
1967 fallthru_bb's dominance info has already been updated by
|
|
1968 split_bock. */
|
|
1969 if (dom_info_available_p (CDI_DOMINATORS))
|
|
1970 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
|
|
1971
|
|
1972 *then_block = then_bb;
|
|
1973 *fallthrough_block = fallthru_bb;
|
|
1974 *iter = gsi_start_bb (fallthru_bb);
|
|
1975
|
|
1976 return gsi_last_bb (cond_bb);
|
|
1977 }
|
|
1978
|
|
1979 /* Insert an if condition followed by a 'then block' right before the
|
|
1980 statement pointed to by ITER. The fallthrough block -- which is the
|
|
1981 else block of the condition as well as the destination of the
|
|
1982 outcoming edge of the 'then block' -- starts with the statement
|
|
1983 pointed to by ITER.
|
|
1984
|
|
1985 COND is the condition of the if.
|
|
1986
|
|
1987 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
|
|
1988 'then block' is higher than the probability of the edge to the
|
|
1989 fallthrough block.
|
|
1990
|
|
1991 Upon completion of the function, *THEN_BB is set to the newly
|
|
1992 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
|
|
1993 fallthrough block.
|
|
1994
|
|
1995 *ITER is adjusted to still point to the same statement it was
|
|
1996 pointing to initially. */
|
|
1997
|
|
1998 static void
|
|
1999 insert_if_then_before_iter (gcond *cond,
|
|
2000 gimple_stmt_iterator *iter,
|
|
2001 bool then_more_likely_p,
|
|
2002 basic_block *then_bb,
|
|
2003 basic_block *fallthrough_bb)
|
|
2004 {
|
|
2005 gimple_stmt_iterator cond_insert_point =
|
|
2006 create_cond_insert_point (iter,
|
|
2007 /*before_p=*/true,
|
|
2008 then_more_likely_p,
|
|
2009 /*create_then_fallthru_edge=*/true,
|
|
2010 then_bb,
|
|
2011 fallthrough_bb);
|
|
2012 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
|
|
2013 }
|
|
2014
|
|
2015 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
|
|
2016 If RETURN_ADDRESS is set to true, return memory location instread
|
|
2017 of a value in the shadow memory. */
|
|
2018
|
|
2019 static tree
|
|
2020 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
|
|
2021 tree base_addr, tree shadow_ptr_type,
|
|
2022 bool return_address = false)
|
|
2023 {
|
|
2024 tree t, uintptr_type = TREE_TYPE (base_addr);
|
|
2025 tree shadow_type = TREE_TYPE (shadow_ptr_type);
|
|
2026 gimple *g;
|
|
2027
|
|
2028 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
|
|
2029 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
|
|
2030 base_addr, t);
|
|
2031 gimple_set_location (g, location);
|
|
2032 gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
2033
|
|
2034 t = build_int_cst (uintptr_type, asan_shadow_offset ());
|
|
2035 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
|
|
2036 gimple_assign_lhs (g), t);
|
|
2037 gimple_set_location (g, location);
|
|
2038 gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
2039
|
|
2040 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
|
|
2041 gimple_assign_lhs (g));
|
|
2042 gimple_set_location (g, location);
|
|
2043 gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
2044
|
|
2045 if (!return_address)
|
|
2046 {
|
|
2047 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
|
|
2048 build_int_cst (shadow_ptr_type, 0));
|
|
2049 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
|
|
2050 gimple_set_location (g, location);
|
|
2051 gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
|
2052 }
|
|
2053
|
|
2054 return gimple_assign_lhs (g);
|
|
2055 }
|
|
2056
|
|
2057 /* BASE can already be an SSA_NAME; in that case, do not create a
|
|
2058 new SSA_NAME for it. */
|
|
2059
|
|
2060 static tree
|
|
2061 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
|
|
2062 bool before_p)
|
|
2063 {
|
|
2064 if (TREE_CODE (base) == SSA_NAME)
|
|
2065 return base;
|
|
2066 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
|
|
2067 TREE_CODE (base), base);
|
|
2068 gimple_set_location (g, loc);
|
|
2069 if (before_p)
|
|
2070 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
2071 else
|
|
2072 gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
2073 return gimple_assign_lhs (g);
|
|
2074 }
|
|
2075
|
|
2076 /* LEN can already have necessary size and precision;
|
|
2077 in that case, do not create a new variable. */
|
|
2078
|
|
2079 tree
|
|
2080 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
|
|
2081 bool before_p)
|
|
2082 {
|
|
2083 if (ptrofftype_p (len))
|
|
2084 return len;
|
|
2085 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
2086 NOP_EXPR, len);
|
|
2087 gimple_set_location (g, loc);
|
|
2088 if (before_p)
|
|
2089 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
2090 else
|
|
2091 gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
2092 return gimple_assign_lhs (g);
|
|
2093 }
|
|
2094
|
|
2095 /* Instrument the memory access instruction BASE. Insert new
|
|
2096 statements before or after ITER.
|
|
2097
|
|
2098 Note that the memory access represented by BASE can be either an
|
|
2099 SSA_NAME, or a non-SSA expression. LOCATION is the source code
|
|
2100 location. IS_STORE is TRUE for a store, FALSE for a load.
|
|
2101 BEFORE_P is TRUE for inserting the instrumentation code before
|
|
2102 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
|
|
2103 for a scalar memory access and FALSE for memory region access.
|
|
2104 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
|
|
2105 length. ALIGN tells alignment of accessed memory object.
|
|
2106
|
|
2107 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
|
|
2108 memory region have already been instrumented.
|
|
2109
|
|
2110 If BEFORE_P is TRUE, *ITER is arranged to still point to the
|
|
2111 statement it was pointing to prior to calling this function,
|
|
2112 otherwise, it points to the statement logically following it. */
|
|
2113
|
|
2114 static void
|
|
2115 build_check_stmt (location_t loc, tree base, tree len,
|
|
2116 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
|
|
2117 bool is_non_zero_len, bool before_p, bool is_store,
|
|
2118 bool is_scalar_access, unsigned int align = 0)
|
|
2119 {
|
|
2120 gimple_stmt_iterator gsi = *iter;
|
|
2121 gimple *g;
|
|
2122
|
|
2123 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
|
|
2124
|
|
2125 gsi = *iter;
|
|
2126
|
|
2127 base = unshare_expr (base);
|
|
2128 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
|
|
2129
|
|
2130 if (len)
|
|
2131 {
|
|
2132 len = unshare_expr (len);
|
|
2133 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
|
|
2134 }
|
|
2135 else
|
|
2136 {
|
|
2137 gcc_assert (size_in_bytes != -1);
|
|
2138 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
|
|
2139 }
|
|
2140
|
|
2141 if (size_in_bytes > 1)
|
|
2142 {
|
|
2143 if ((size_in_bytes & (size_in_bytes - 1)) != 0
|
|
2144 || size_in_bytes > 16)
|
|
2145 is_scalar_access = false;
|
|
2146 else if (align && align < size_in_bytes * BITS_PER_UNIT)
|
|
2147 {
|
|
2148 /* On non-strict alignment targets, if
|
|
2149 16-byte access is just 8-byte aligned,
|
|
2150 this will result in misaligned shadow
|
|
2151 memory 2 byte load, but otherwise can
|
|
2152 be handled using one read. */
|
|
2153 if (size_in_bytes != 16
|
|
2154 || STRICT_ALIGNMENT
|
|
2155 || align < 8 * BITS_PER_UNIT)
|
|
2156 is_scalar_access = false;
|
|
2157 }
|
|
2158 }
|
|
2159
|
|
2160 HOST_WIDE_INT flags = 0;
|
|
2161 if (is_store)
|
|
2162 flags |= ASAN_CHECK_STORE;
|
|
2163 if (is_non_zero_len)
|
|
2164 flags |= ASAN_CHECK_NON_ZERO_LEN;
|
|
2165 if (is_scalar_access)
|
|
2166 flags |= ASAN_CHECK_SCALAR_ACCESS;
|
|
2167
|
|
2168 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
|
|
2169 build_int_cst (integer_type_node, flags),
|
|
2170 base, len,
|
|
2171 build_int_cst (integer_type_node,
|
|
2172 align / BITS_PER_UNIT));
|
|
2173 gimple_set_location (g, loc);
|
|
2174 if (before_p)
|
|
2175 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
|
|
2176 else
|
|
2177 {
|
|
2178 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
2179 gsi_next (&gsi);
|
|
2180 *iter = gsi;
|
|
2181 }
|
|
2182 }
|
|
2183
|
|
2184 /* If T represents a memory access, add instrumentation code before ITER.
|
|
2185 LOCATION is source code location.
|
|
2186 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
|
|
2187
|
|
2188 static void
|
|
2189 instrument_derefs (gimple_stmt_iterator *iter, tree t,
|
|
2190 location_t location, bool is_store)
|
|
2191 {
|
145
|
2192 if (is_store && !param_asan_instrument_writes)
|
111
|
2193 return;
|
145
|
2194 if (!is_store && !param_asan_instrument_reads)
|
111
|
2195 return;
|
|
2196
|
|
2197 tree type, base;
|
|
2198 HOST_WIDE_INT size_in_bytes;
|
|
2199 if (location == UNKNOWN_LOCATION)
|
|
2200 location = EXPR_LOCATION (t);
|
|
2201
|
|
2202 type = TREE_TYPE (t);
|
|
2203 switch (TREE_CODE (t))
|
|
2204 {
|
|
2205 case ARRAY_REF:
|
|
2206 case COMPONENT_REF:
|
|
2207 case INDIRECT_REF:
|
|
2208 case MEM_REF:
|
|
2209 case VAR_DECL:
|
|
2210 case BIT_FIELD_REF:
|
|
2211 break;
|
|
2212 /* FALLTHRU */
|
|
2213 default:
|
|
2214 return;
|
|
2215 }
|
|
2216
|
|
2217 size_in_bytes = int_size_in_bytes (type);
|
|
2218 if (size_in_bytes <= 0)
|
|
2219 return;
|
|
2220
|
131
|
2221 poly_int64 bitsize, bitpos;
|
111
|
2222 tree offset;
|
|
2223 machine_mode mode;
|
|
2224 int unsignedp, reversep, volatilep = 0;
|
|
2225 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
|
|
2226 &unsignedp, &reversep, &volatilep);
|
|
2227
|
|
2228 if (TREE_CODE (t) == COMPONENT_REF
|
|
2229 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
|
|
2230 {
|
|
2231 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
|
|
2232 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
|
|
2233 TREE_OPERAND (t, 0), repr,
|
|
2234 TREE_OPERAND (t, 2)),
|
|
2235 location, is_store);
|
|
2236 return;
|
|
2237 }
|
|
2238
|
131
|
2239 if (!multiple_p (bitpos, BITS_PER_UNIT)
|
|
2240 || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
|
111
|
2241 return;
|
|
2242
|
|
2243 if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
|
|
2244 return;
|
|
2245
|
131
|
2246 poly_int64 decl_size;
|
111
|
2247 if (VAR_P (inner)
|
|
2248 && offset == NULL_TREE
|
|
2249 && DECL_SIZE (inner)
|
131
|
2250 && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
|
|
2251 && known_subrange_p (bitpos, bitsize, 0, decl_size))
|
111
|
2252 {
|
|
2253 if (DECL_THREAD_LOCAL_P (inner))
|
|
2254 return;
|
145
|
2255 if (!param_asan_globals && is_global_var (inner))
|
111
|
2256 return;
|
|
2257 if (!TREE_STATIC (inner))
|
|
2258 {
|
|
2259 /* Automatic vars in the current function will be always
|
|
2260 accessible. */
|
|
2261 if (decl_function_context (inner) == current_function_decl
|
|
2262 && (!asan_sanitize_use_after_scope ()
|
|
2263 || !TREE_ADDRESSABLE (inner)))
|
|
2264 return;
|
|
2265 }
|
|
2266 /* Always instrument external vars, they might be dynamically
|
|
2267 initialized. */
|
|
2268 else if (!DECL_EXTERNAL (inner))
|
|
2269 {
|
|
2270 /* For static vars if they are known not to be dynamically
|
|
2271 initialized, they will be always accessible. */
|
|
2272 varpool_node *vnode = varpool_node::get (inner);
|
|
2273 if (vnode && !vnode->dynamically_initialized)
|
|
2274 return;
|
|
2275 }
|
|
2276 }
|
|
2277
|
|
2278 base = build_fold_addr_expr (t);
|
|
2279 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
|
|
2280 {
|
|
2281 unsigned int align = get_object_alignment (t);
|
|
2282 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
|
|
2283 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
|
|
2284 is_store, /*is_scalar_access*/true, align);
|
|
2285 update_mem_ref_hash_table (base, size_in_bytes);
|
|
2286 update_mem_ref_hash_table (t, size_in_bytes);
|
|
2287 }
|
|
2288
|
|
2289 }
|
|
2290
|
|
2291 /* Insert a memory reference into the hash table if access length
|
|
2292 can be determined in compile time. */
|
|
2293
|
|
2294 static void
|
|
2295 maybe_update_mem_ref_hash_table (tree base, tree len)
|
|
2296 {
|
|
2297 if (!POINTER_TYPE_P (TREE_TYPE (base))
|
|
2298 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
|
|
2299 return;
|
|
2300
|
|
2301 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
|
2302
|
|
2303 if (size_in_bytes != -1)
|
|
2304 update_mem_ref_hash_table (base, size_in_bytes);
|
|
2305 }
|
|
2306
|
|
2307 /* Instrument an access to a contiguous memory region that starts at
|
|
2308 the address pointed to by BASE, over a length of LEN (expressed in
|
|
2309 the sizeof (*BASE) bytes). ITER points to the instruction before
|
|
2310 which the instrumentation instructions must be inserted. LOCATION
|
|
2311 is the source location that the instrumentation instructions must
|
|
2312 have. If IS_STORE is true, then the memory access is a store;
|
|
2313 otherwise, it's a load. */
|
|
2314
|
|
2315 static void
|
|
2316 instrument_mem_region_access (tree base, tree len,
|
|
2317 gimple_stmt_iterator *iter,
|
|
2318 location_t location, bool is_store)
|
|
2319 {
|
|
2320 if (!POINTER_TYPE_P (TREE_TYPE (base))
|
|
2321 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
|
|
2322 || integer_zerop (len))
|
|
2323 return;
|
|
2324
|
|
2325 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
|
2326
|
|
2327 if ((size_in_bytes == -1)
|
|
2328 || !has_mem_ref_been_instrumented (base, size_in_bytes))
|
|
2329 {
|
|
2330 build_check_stmt (location, base, len, size_in_bytes, iter,
|
|
2331 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
|
|
2332 is_store, /*is_scalar_access*/false, /*align*/0);
|
|
2333 }
|
|
2334
|
|
2335 maybe_update_mem_ref_hash_table (base, len);
|
|
2336 *iter = gsi_for_stmt (gsi_stmt (*iter));
|
|
2337 }
|
|
2338
|
|
2339 /* Instrument the call to a built-in memory access function that is
|
|
2340 pointed to by the iterator ITER.
|
|
2341
|
|
2342 Upon completion, return TRUE iff *ITER has been advanced to the
|
|
2343 statement following the one it was originally pointing to. */
|
|
2344
|
|
2345 static bool
|
|
2346 instrument_builtin_call (gimple_stmt_iterator *iter)
|
|
2347 {
|
145
|
2348 if (!param_asan_memintrin)
|
111
|
2349 return false;
|
|
2350
|
|
2351 bool iter_advanced_p = false;
|
|
2352 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
|
|
2353
|
|
2354 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
|
|
2355
|
|
2356 location_t loc = gimple_location (call);
|
|
2357
|
|
2358 asan_mem_ref src0, src1, dest;
|
|
2359 asan_mem_ref_init (&src0, NULL, 1);
|
|
2360 asan_mem_ref_init (&src1, NULL, 1);
|
|
2361 asan_mem_ref_init (&dest, NULL, 1);
|
|
2362
|
|
2363 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
|
|
2364 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
|
|
2365 dest_is_deref = false, intercepted_p = true;
|
|
2366
|
|
2367 if (get_mem_refs_of_builtin_call (call,
|
|
2368 &src0, &src0_len, &src0_is_store,
|
|
2369 &src1, &src1_len, &src1_is_store,
|
|
2370 &dest, &dest_len, &dest_is_store,
|
|
2371 &dest_is_deref, &intercepted_p, iter))
|
|
2372 {
|
|
2373 if (dest_is_deref)
|
|
2374 {
|
|
2375 instrument_derefs (iter, dest.start, loc, dest_is_store);
|
|
2376 gsi_next (iter);
|
|
2377 iter_advanced_p = true;
|
|
2378 }
|
|
2379 else if (!intercepted_p
|
|
2380 && (src0_len || src1_len || dest_len))
|
|
2381 {
|
|
2382 if (src0.start != NULL_TREE)
|
|
2383 instrument_mem_region_access (src0.start, src0_len,
|
|
2384 iter, loc, /*is_store=*/false);
|
|
2385 if (src1.start != NULL_TREE)
|
|
2386 instrument_mem_region_access (src1.start, src1_len,
|
|
2387 iter, loc, /*is_store=*/false);
|
|
2388 if (dest.start != NULL_TREE)
|
|
2389 instrument_mem_region_access (dest.start, dest_len,
|
|
2390 iter, loc, /*is_store=*/true);
|
|
2391
|
|
2392 *iter = gsi_for_stmt (call);
|
|
2393 gsi_next (iter);
|
|
2394 iter_advanced_p = true;
|
|
2395 }
|
|
2396 else
|
|
2397 {
|
|
2398 if (src0.start != NULL_TREE)
|
|
2399 maybe_update_mem_ref_hash_table (src0.start, src0_len);
|
|
2400 if (src1.start != NULL_TREE)
|
|
2401 maybe_update_mem_ref_hash_table (src1.start, src1_len);
|
|
2402 if (dest.start != NULL_TREE)
|
|
2403 maybe_update_mem_ref_hash_table (dest.start, dest_len);
|
|
2404 }
|
|
2405 }
|
|
2406 return iter_advanced_p;
|
|
2407 }
|
|
2408
|
|
2409 /* Instrument the assignment statement ITER if it is subject to
|
|
2410 instrumentation. Return TRUE iff instrumentation actually
|
|
2411 happened. In that case, the iterator ITER is advanced to the next
|
|
2412 logical expression following the one initially pointed to by ITER,
|
|
2413 and the relevant memory reference that which access has been
|
|
2414 instrumented is added to the memory references hash table. */
|
|
2415
|
|
2416 static bool
|
|
2417 maybe_instrument_assignment (gimple_stmt_iterator *iter)
|
|
2418 {
|
|
2419 gimple *s = gsi_stmt (*iter);
|
|
2420
|
|
2421 gcc_assert (gimple_assign_single_p (s));
|
|
2422
|
|
2423 tree ref_expr = NULL_TREE;
|
|
2424 bool is_store, is_instrumented = false;
|
|
2425
|
|
2426 if (gimple_store_p (s))
|
|
2427 {
|
|
2428 ref_expr = gimple_assign_lhs (s);
|
|
2429 is_store = true;
|
|
2430 instrument_derefs (iter, ref_expr,
|
|
2431 gimple_location (s),
|
|
2432 is_store);
|
|
2433 is_instrumented = true;
|
|
2434 }
|
|
2435
|
|
2436 if (gimple_assign_load_p (s))
|
|
2437 {
|
|
2438 ref_expr = gimple_assign_rhs1 (s);
|
|
2439 is_store = false;
|
|
2440 instrument_derefs (iter, ref_expr,
|
|
2441 gimple_location (s),
|
|
2442 is_store);
|
|
2443 is_instrumented = true;
|
|
2444 }
|
|
2445
|
|
2446 if (is_instrumented)
|
|
2447 gsi_next (iter);
|
|
2448
|
|
2449 return is_instrumented;
|
|
2450 }
|
|
2451
|
|
2452 /* Instrument the function call pointed to by the iterator ITER, if it
|
|
2453 is subject to instrumentation. At the moment, the only function
|
|
2454 calls that are instrumented are some built-in functions that access
|
|
2455 memory. Look at instrument_builtin_call to learn more.
|
|
2456
|
|
2457 Upon completion return TRUE iff *ITER was advanced to the statement
|
|
2458 following the one it was originally pointing to. */
|
|
2459
|
|
2460 static bool
|
|
2461 maybe_instrument_call (gimple_stmt_iterator *iter)
|
|
2462 {
|
|
2463 gimple *stmt = gsi_stmt (*iter);
|
|
2464 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
|
|
2465
|
|
2466 if (is_builtin && instrument_builtin_call (iter))
|
|
2467 return true;
|
|
2468
|
|
2469 if (gimple_call_noreturn_p (stmt))
|
|
2470 {
|
|
2471 if (is_builtin)
|
|
2472 {
|
|
2473 tree callee = gimple_call_fndecl (stmt);
|
|
2474 switch (DECL_FUNCTION_CODE (callee))
|
|
2475 {
|
|
2476 case BUILT_IN_UNREACHABLE:
|
|
2477 case BUILT_IN_TRAP:
|
|
2478 /* Don't instrument these. */
|
|
2479 return false;
|
|
2480 default:
|
|
2481 break;
|
|
2482 }
|
|
2483 }
|
|
2484 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
|
|
2485 gimple *g = gimple_build_call (decl, 0);
|
|
2486 gimple_set_location (g, gimple_location (stmt));
|
|
2487 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
2488 }
|
|
2489
|
|
2490 bool instrumented = false;
|
|
2491 if (gimple_store_p (stmt))
|
|
2492 {
|
|
2493 tree ref_expr = gimple_call_lhs (stmt);
|
|
2494 instrument_derefs (iter, ref_expr,
|
|
2495 gimple_location (stmt),
|
|
2496 /*is_store=*/true);
|
|
2497
|
|
2498 instrumented = true;
|
|
2499 }
|
|
2500
|
|
2501 /* Walk through gimple_call arguments and check them id needed. */
|
|
2502 unsigned args_num = gimple_call_num_args (stmt);
|
|
2503 for (unsigned i = 0; i < args_num; ++i)
|
|
2504 {
|
|
2505 tree arg = gimple_call_arg (stmt, i);
|
|
2506 /* If ARG is not a non-aggregate register variable, compiler in general
|
|
2507 creates temporary for it and pass it as argument to gimple call.
|
|
2508 But in some cases, e.g. when we pass by value a small structure that
|
|
2509 fits to register, compiler can avoid extra overhead by pulling out
|
|
2510 these temporaries. In this case, we should check the argument. */
|
|
2511 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
|
|
2512 {
|
|
2513 instrument_derefs (iter, arg,
|
|
2514 gimple_location (stmt),
|
|
2515 /*is_store=*/false);
|
|
2516 instrumented = true;
|
|
2517 }
|
|
2518 }
|
|
2519 if (instrumented)
|
|
2520 gsi_next (iter);
|
|
2521 return instrumented;
|
|
2522 }
|
|
2523
|
|
2524 /* Walk each instruction of all basic block and instrument those that
|
|
2525 represent memory references: loads, stores, or function calls.
|
|
2526 In a given basic block, this function avoids instrumenting memory
|
|
2527 references that have already been instrumented. */
|
|
2528
|
|
2529 static void
|
|
2530 transform_statements (void)
|
|
2531 {
|
|
2532 basic_block bb, last_bb = NULL;
|
|
2533 gimple_stmt_iterator i;
|
|
2534 int saved_last_basic_block = last_basic_block_for_fn (cfun);
|
|
2535
|
|
2536 FOR_EACH_BB_FN (bb, cfun)
|
|
2537 {
|
|
2538 basic_block prev_bb = bb;
|
|
2539
|
|
2540 if (bb->index >= saved_last_basic_block) continue;
|
|
2541
|
|
2542 /* Flush the mem ref hash table, if current bb doesn't have
|
|
2543 exactly one predecessor, or if that predecessor (skipping
|
|
2544 over asan created basic blocks) isn't the last processed
|
|
2545 basic block. Thus we effectively flush on extended basic
|
|
2546 block boundaries. */
|
|
2547 while (single_pred_p (prev_bb))
|
|
2548 {
|
|
2549 prev_bb = single_pred (prev_bb);
|
|
2550 if (prev_bb->index < saved_last_basic_block)
|
|
2551 break;
|
|
2552 }
|
|
2553 if (prev_bb != last_bb)
|
|
2554 empty_mem_ref_hash_table ();
|
|
2555 last_bb = bb;
|
|
2556
|
|
2557 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
|
|
2558 {
|
|
2559 gimple *s = gsi_stmt (i);
|
|
2560
|
|
2561 if (has_stmt_been_instrumented_p (s))
|
|
2562 gsi_next (&i);
|
|
2563 else if (gimple_assign_single_p (s)
|
|
2564 && !gimple_clobber_p (s)
|
|
2565 && maybe_instrument_assignment (&i))
|
|
2566 /* Nothing to do as maybe_instrument_assignment advanced
|
|
2567 the iterator I. */;
|
|
2568 else if (is_gimple_call (s) && maybe_instrument_call (&i))
|
|
2569 /* Nothing to do as maybe_instrument_call
|
|
2570 advanced the iterator I. */;
|
|
2571 else
|
|
2572 {
|
|
2573 /* No instrumentation happened.
|
|
2574
|
|
2575 If the current instruction is a function call that
|
|
2576 might free something, let's forget about the memory
|
|
2577 references that got instrumented. Otherwise we might
|
|
2578 miss some instrumentation opportunities. Do the same
|
|
2579 for a ASAN_MARK poisoning internal function. */
|
|
2580 if (is_gimple_call (s)
|
|
2581 && (!nonfreeing_call_p (s)
|
|
2582 || asan_mark_p (s, ASAN_MARK_POISON)))
|
|
2583 empty_mem_ref_hash_table ();
|
|
2584
|
|
2585 gsi_next (&i);
|
|
2586 }
|
|
2587 }
|
|
2588 }
|
|
2589 free_mem_ref_resources ();
|
|
2590 }
|
|
2591
|
|
2592 /* Build
|
|
2593 __asan_before_dynamic_init (module_name)
|
|
2594 or
|
|
2595 __asan_after_dynamic_init ()
|
|
2596 call. */
|
|
2597
|
|
2598 tree
|
|
2599 asan_dynamic_init_call (bool after_p)
|
|
2600 {
|
|
2601 if (shadow_ptr_types[0] == NULL_TREE)
|
|
2602 asan_init_shadow_ptr_types ();
|
|
2603
|
|
2604 tree fn = builtin_decl_implicit (after_p
|
|
2605 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
|
|
2606 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
|
|
2607 tree module_name_cst = NULL_TREE;
|
|
2608 if (!after_p)
|
|
2609 {
|
|
2610 pretty_printer module_name_pp;
|
|
2611 pp_string (&module_name_pp, main_input_filename);
|
|
2612
|
|
2613 module_name_cst = asan_pp_string (&module_name_pp);
|
|
2614 module_name_cst = fold_convert (const_ptr_type_node,
|
|
2615 module_name_cst);
|
|
2616 }
|
|
2617
|
|
2618 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
|
|
2619 }
|
|
2620
|
|
2621 /* Build
|
|
2622 struct __asan_global
|
|
2623 {
|
|
2624 const void *__beg;
|
|
2625 uptr __size;
|
|
2626 uptr __size_with_redzone;
|
|
2627 const void *__name;
|
|
2628 const void *__module_name;
|
|
2629 uptr __has_dynamic_init;
|
|
2630 __asan_global_source_location *__location;
|
|
2631 char *__odr_indicator;
|
|
2632 } type. */
|
|
2633
|
|
2634 static tree
|
|
2635 asan_global_struct (void)
|
|
2636 {
|
|
2637 static const char *field_names[]
|
|
2638 = { "__beg", "__size", "__size_with_redzone",
|
|
2639 "__name", "__module_name", "__has_dynamic_init", "__location",
|
|
2640 "__odr_indicator" };
|
|
2641 tree fields[ARRAY_SIZE (field_names)], ret;
|
|
2642 unsigned i;
|
|
2643
|
|
2644 ret = make_node (RECORD_TYPE);
|
|
2645 for (i = 0; i < ARRAY_SIZE (field_names); i++)
|
|
2646 {
|
|
2647 fields[i]
|
|
2648 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
|
|
2649 get_identifier (field_names[i]),
|
|
2650 (i == 0 || i == 3) ? const_ptr_type_node
|
|
2651 : pointer_sized_int_node);
|
|
2652 DECL_CONTEXT (fields[i]) = ret;
|
|
2653 if (i)
|
|
2654 DECL_CHAIN (fields[i - 1]) = fields[i];
|
|
2655 }
|
|
2656 tree type_decl = build_decl (input_location, TYPE_DECL,
|
|
2657 get_identifier ("__asan_global"), ret);
|
|
2658 DECL_IGNORED_P (type_decl) = 1;
|
|
2659 DECL_ARTIFICIAL (type_decl) = 1;
|
|
2660 TYPE_FIELDS (ret) = fields[0];
|
|
2661 TYPE_NAME (ret) = type_decl;
|
|
2662 TYPE_STUB_DECL (ret) = type_decl;
|
|
2663 layout_type (ret);
|
|
2664 return ret;
|
|
2665 }
|
|
2666
|
|
2667 /* Create and return odr indicator symbol for DECL.
|
|
2668 TYPE is __asan_global struct type as returned by asan_global_struct. */
|
|
2669
|
|
2670 static tree
|
|
2671 create_odr_indicator (tree decl, tree type)
|
|
2672 {
|
|
2673 char *name;
|
|
2674 tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
|
|
2675 tree decl_name
|
|
2676 = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
|
|
2677 : DECL_NAME (decl));
|
|
2678 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
|
|
2679 if (decl_name == NULL_TREE)
|
|
2680 return build_int_cst (uptr, 0);
|
|
2681 const char *dname = IDENTIFIER_POINTER (decl_name);
|
|
2682 if (HAS_DECL_ASSEMBLER_NAME_P (decl))
|
|
2683 dname = targetm.strip_name_encoding (dname);
|
|
2684 size_t len = strlen (dname) + sizeof ("__odr_asan_");
|
|
2685 name = XALLOCAVEC (char, len);
|
|
2686 snprintf (name, len, "__odr_asan_%s", dname);
|
|
2687 #ifndef NO_DOT_IN_LABEL
|
|
2688 name[sizeof ("__odr_asan") - 1] = '.';
|
|
2689 #elif !defined(NO_DOLLAR_IN_LABEL)
|
|
2690 name[sizeof ("__odr_asan") - 1] = '$';
|
|
2691 #endif
|
|
2692 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
|
|
2693 char_type_node);
|
|
2694 TREE_ADDRESSABLE (var) = 1;
|
|
2695 TREE_READONLY (var) = 0;
|
|
2696 TREE_THIS_VOLATILE (var) = 1;
|
|
2697 DECL_GIMPLE_REG_P (var) = 0;
|
|
2698 DECL_ARTIFICIAL (var) = 1;
|
|
2699 DECL_IGNORED_P (var) = 1;
|
|
2700 TREE_STATIC (var) = 1;
|
|
2701 TREE_PUBLIC (var) = 1;
|
|
2702 DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
|
|
2703 DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
|
|
2704
|
|
2705 TREE_USED (var) = 1;
|
|
2706 tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
|
|
2707 build_int_cst (unsigned_type_node, 0));
|
|
2708 TREE_CONSTANT (ctor) = 1;
|
|
2709 TREE_STATIC (ctor) = 1;
|
|
2710 DECL_INITIAL (var) = ctor;
|
|
2711 DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
|
|
2712 NULL, DECL_ATTRIBUTES (var));
|
|
2713 make_decl_rtl (var);
|
|
2714 varpool_node::finalize_decl (var);
|
|
2715 return fold_convert (uptr, build_fold_addr_expr (var));
|
|
2716 }
|
|
2717
|
|
2718 /* Return true if DECL, a global var, might be overridden and needs
|
|
2719 an additional odr indicator symbol. */
|
|
2720
|
|
2721 static bool
|
|
2722 asan_needs_odr_indicator_p (tree decl)
|
|
2723 {
|
|
2724 /* Don't emit ODR indicators for kernel because:
|
|
2725 a) Kernel is written in C thus doesn't need ODR indicators.
|
|
2726 b) Some kernel code may have assumptions about symbols containing specific
|
|
2727 patterns in their names. Since ODR indicators contain original names
|
|
2728 of symbols they are emitted for, these assumptions would be broken for
|
|
2729 ODR indicator symbols. */
|
|
2730 return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
|
|
2731 && !DECL_ARTIFICIAL (decl)
|
|
2732 && !DECL_WEAK (decl)
|
|
2733 && TREE_PUBLIC (decl));
|
|
2734 }
|
|
2735
|
|
2736 /* Append description of a single global DECL into vector V.
|
|
2737 TYPE is __asan_global struct type as returned by asan_global_struct. */
|
|
2738
|
|
2739 static void
|
|
2740 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
|
|
2741 {
|
|
2742 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
|
|
2743 unsigned HOST_WIDE_INT size;
|
|
2744 tree str_cst, module_name_cst, refdecl = decl;
|
|
2745 vec<constructor_elt, va_gc> *vinner = NULL;
|
|
2746
|
|
2747 pretty_printer asan_pp, module_name_pp;
|
|
2748
|
|
2749 if (DECL_NAME (decl))
|
|
2750 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
|
|
2751 else
|
|
2752 pp_string (&asan_pp, "<unknown>");
|
|
2753 str_cst = asan_pp_string (&asan_pp);
|
|
2754
|
|
2755 pp_string (&module_name_pp, main_input_filename);
|
|
2756 module_name_cst = asan_pp_string (&module_name_pp);
|
|
2757
|
|
2758 if (asan_needs_local_alias (decl))
|
|
2759 {
|
|
2760 char buf[20];
|
|
2761 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
|
|
2762 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
|
|
2763 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
|
|
2764 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
|
|
2765 TREE_READONLY (refdecl) = TREE_READONLY (decl);
|
|
2766 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
|
|
2767 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
|
|
2768 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
|
|
2769 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
|
|
2770 TREE_STATIC (refdecl) = 1;
|
|
2771 TREE_PUBLIC (refdecl) = 0;
|
|
2772 TREE_USED (refdecl) = 1;
|
|
2773 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
|
|
2774 }
|
|
2775
|
|
2776 tree odr_indicator_ptr
|
|
2777 = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
|
|
2778 : build_int_cst (uptr, 0));
|
|
2779 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
2780 fold_convert (const_ptr_type_node,
|
|
2781 build_fold_addr_expr (refdecl)));
|
|
2782 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
|
|
2783 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
|
|
2784 size += asan_red_zone_size (size);
|
|
2785 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
|
|
2786 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
2787 fold_convert (const_ptr_type_node, str_cst));
|
|
2788 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
2789 fold_convert (const_ptr_type_node, module_name_cst));
|
|
2790 varpool_node *vnode = varpool_node::get (decl);
|
|
2791 int has_dynamic_init = 0;
|
|
2792 /* FIXME: Enable initialization order fiasco detection in LTO mode once
|
|
2793 proper fix for PR 79061 will be applied. */
|
|
2794 if (!in_lto_p)
|
|
2795 has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
|
|
2796 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
|
|
2797 build_int_cst (uptr, has_dynamic_init));
|
|
2798 tree locptr = NULL_TREE;
|
|
2799 location_t loc = DECL_SOURCE_LOCATION (decl);
|
|
2800 expanded_location xloc = expand_location (loc);
|
|
2801 if (xloc.file != NULL)
|
|
2802 {
|
|
2803 static int lasanloccnt = 0;
|
|
2804 char buf[25];
|
|
2805 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
|
|
2806 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
|
|
2807 ubsan_get_source_location_type ());
|
|
2808 TREE_STATIC (var) = 1;
|
|
2809 TREE_PUBLIC (var) = 0;
|
|
2810 DECL_ARTIFICIAL (var) = 1;
|
|
2811 DECL_IGNORED_P (var) = 1;
|
|
2812 pretty_printer filename_pp;
|
|
2813 pp_string (&filename_pp, xloc.file);
|
|
2814 tree str = asan_pp_string (&filename_pp);
|
|
2815 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
|
|
2816 NULL_TREE, str, NULL_TREE,
|
|
2817 build_int_cst (unsigned_type_node,
|
|
2818 xloc.line), NULL_TREE,
|
|
2819 build_int_cst (unsigned_type_node,
|
|
2820 xloc.column));
|
|
2821 TREE_CONSTANT (ctor) = 1;
|
|
2822 TREE_STATIC (ctor) = 1;
|
|
2823 DECL_INITIAL (var) = ctor;
|
|
2824 varpool_node::finalize_decl (var);
|
|
2825 locptr = fold_convert (uptr, build_fold_addr_expr (var));
|
|
2826 }
|
|
2827 else
|
|
2828 locptr = build_int_cst (uptr, 0);
|
|
2829 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
|
|
2830 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
|
|
2831 init = build_constructor (type, vinner);
|
|
2832 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
|
|
2833 }
|
|
2834
|
|
2835 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
|
|
2836 void
|
|
2837 initialize_sanitizer_builtins (void)
|
|
2838 {
|
|
2839 tree decl;
|
|
2840
|
|
2841 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
|
|
2842 return;
|
|
2843
|
|
2844 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
|
|
2845 tree BT_FN_VOID_PTR
|
|
2846 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
|
|
2847 tree BT_FN_VOID_CONST_PTR
|
|
2848 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
|
|
2849 tree BT_FN_VOID_PTR_PTR
|
|
2850 = build_function_type_list (void_type_node, ptr_type_node,
|
|
2851 ptr_type_node, NULL_TREE);
|
|
2852 tree BT_FN_VOID_PTR_PTR_PTR
|
|
2853 = build_function_type_list (void_type_node, ptr_type_node,
|
|
2854 ptr_type_node, ptr_type_node, NULL_TREE);
|
|
2855 tree BT_FN_VOID_PTR_PTRMODE
|
|
2856 = build_function_type_list (void_type_node, ptr_type_node,
|
|
2857 pointer_sized_int_node, NULL_TREE);
|
|
2858 tree BT_FN_VOID_INT
|
|
2859 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
|
|
2860 tree BT_FN_SIZE_CONST_PTR_INT
|
|
2861 = build_function_type_list (size_type_node, const_ptr_type_node,
|
|
2862 integer_type_node, NULL_TREE);
|
|
2863
|
|
2864 tree BT_FN_VOID_UINT8_UINT8
|
|
2865 = build_function_type_list (void_type_node, unsigned_char_type_node,
|
|
2866 unsigned_char_type_node, NULL_TREE);
|
|
2867 tree BT_FN_VOID_UINT16_UINT16
|
|
2868 = build_function_type_list (void_type_node, uint16_type_node,
|
|
2869 uint16_type_node, NULL_TREE);
|
|
2870 tree BT_FN_VOID_UINT32_UINT32
|
|
2871 = build_function_type_list (void_type_node, uint32_type_node,
|
|
2872 uint32_type_node, NULL_TREE);
|
|
2873 tree BT_FN_VOID_UINT64_UINT64
|
|
2874 = build_function_type_list (void_type_node, uint64_type_node,
|
|
2875 uint64_type_node, NULL_TREE);
|
|
2876 tree BT_FN_VOID_FLOAT_FLOAT
|
|
2877 = build_function_type_list (void_type_node, float_type_node,
|
|
2878 float_type_node, NULL_TREE);
|
|
2879 tree BT_FN_VOID_DOUBLE_DOUBLE
|
|
2880 = build_function_type_list (void_type_node, double_type_node,
|
|
2881 double_type_node, NULL_TREE);
|
|
2882 tree BT_FN_VOID_UINT64_PTR
|
|
2883 = build_function_type_list (void_type_node, uint64_type_node,
|
|
2884 ptr_type_node, NULL_TREE);
|
|
2885
|
|
2886 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
|
|
2887 tree BT_FN_IX_CONST_VPTR_INT[5];
|
|
2888 tree BT_FN_IX_VPTR_IX_INT[5];
|
|
2889 tree BT_FN_VOID_VPTR_IX_INT[5];
|
|
2890 tree vptr
|
|
2891 = build_pointer_type (build_qualified_type (void_type_node,
|
|
2892 TYPE_QUAL_VOLATILE));
|
|
2893 tree cvptr
|
|
2894 = build_pointer_type (build_qualified_type (void_type_node,
|
|
2895 TYPE_QUAL_VOLATILE
|
|
2896 |TYPE_QUAL_CONST));
|
|
2897 tree boolt
|
|
2898 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
|
|
2899 int i;
|
|
2900 for (i = 0; i < 5; i++)
|
|
2901 {
|
|
2902 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
|
|
2903 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
|
|
2904 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
|
|
2905 integer_type_node, integer_type_node,
|
|
2906 NULL_TREE);
|
|
2907 BT_FN_IX_CONST_VPTR_INT[i]
|
|
2908 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
|
|
2909 BT_FN_IX_VPTR_IX_INT[i]
|
|
2910 = build_function_type_list (ix, vptr, ix, integer_type_node,
|
|
2911 NULL_TREE);
|
|
2912 BT_FN_VOID_VPTR_IX_INT[i]
|
|
2913 = build_function_type_list (void_type_node, vptr, ix,
|
|
2914 integer_type_node, NULL_TREE);
|
|
2915 }
|
|
2916 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
|
|
2917 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
|
|
2918 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
|
|
2919 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
|
|
2920 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
|
|
2921 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
|
|
2922 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
|
|
2923 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
|
|
2924 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
|
|
2925 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
|
|
2926 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
|
|
2927 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
|
|
2928 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
|
|
2929 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
|
|
2930 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
|
|
2931 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
|
|
2932 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
|
|
2933 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
|
|
2934 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
|
|
2935 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
|
|
2936 #undef ATTR_NOTHROW_LEAF_LIST
|
|
2937 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
|
|
2938 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
|
|
2939 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
|
|
2940 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
|
|
2941 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
|
|
2942 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
|
|
2943 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
|
|
2944 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
|
|
2945 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
|
|
2946 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
|
|
2947 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
|
|
2948 #undef ATTR_COLD_NOTHROW_LEAF_LIST
|
|
2949 #define ATTR_COLD_NOTHROW_LEAF_LIST \
|
|
2950 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
|
|
2951 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
|
|
2952 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
|
|
2953 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
|
|
2954 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
|
|
2955 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
|
|
2956 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
|
|
2957 #undef ATTR_PURE_NOTHROW_LEAF_LIST
|
|
2958 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
|
|
2959 #undef DEF_BUILTIN_STUB
|
|
2960 #define DEF_BUILTIN_STUB(ENUM, NAME)
|
131
|
2961 #undef DEF_SANITIZER_BUILTIN_1
|
|
2962 #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \
|
111
|
2963 do { \
|
|
2964 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
|
|
2965 BUILT_IN_NORMAL, NAME, NULL_TREE); \
|
|
2966 set_call_expr_flags (decl, ATTRS); \
|
|
2967 set_builtin_decl (ENUM, decl, true); \
|
131
|
2968 } while (0)
|
|
2969 #undef DEF_SANITIZER_BUILTIN
|
|
2970 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
|
|
2971 DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
|
111
|
2972
|
|
2973 #include "sanitizer.def"
|
|
2974
|
|
2975 /* -fsanitize=object-size uses __builtin_object_size, but that might
|
|
2976 not be available for e.g. Fortran at this point. We use
|
|
2977 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
|
|
2978 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
|
|
2979 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
|
131
|
2980 DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
|
|
2981 BT_FN_SIZE_CONST_PTR_INT,
|
|
2982 ATTR_PURE_NOTHROW_LEAF_LIST);
|
|
2983
|
|
2984 #undef DEF_SANITIZER_BUILTIN_1
|
111
|
2985 #undef DEF_SANITIZER_BUILTIN
|
|
2986 #undef DEF_BUILTIN_STUB
|
|
2987 }
|
|
2988
|
|
2989 /* Called via htab_traverse. Count number of emitted
|
|
2990 STRING_CSTs in the constant hash table. */
|
|
2991
|
|
2992 int
|
|
2993 count_string_csts (constant_descriptor_tree **slot,
|
|
2994 unsigned HOST_WIDE_INT *data)
|
|
2995 {
|
|
2996 struct constant_descriptor_tree *desc = *slot;
|
|
2997 if (TREE_CODE (desc->value) == STRING_CST
|
|
2998 && TREE_ASM_WRITTEN (desc->value)
|
|
2999 && asan_protect_global (desc->value))
|
|
3000 ++*data;
|
|
3001 return 1;
|
|
3002 }
|
|
3003
|
|
3004 /* Helper structure to pass two parameters to
|
|
3005 add_string_csts. */
|
|
3006
|
|
3007 struct asan_add_string_csts_data
|
|
3008 {
|
|
3009 tree type;
|
|
3010 vec<constructor_elt, va_gc> *v;
|
|
3011 };
|
|
3012
|
|
3013 /* Called via hash_table::traverse. Call asan_add_global
|
|
3014 on emitted STRING_CSTs from the constant hash table. */
|
|
3015
|
|
3016 int
|
|
3017 add_string_csts (constant_descriptor_tree **slot,
|
|
3018 asan_add_string_csts_data *aascd)
|
|
3019 {
|
|
3020 struct constant_descriptor_tree *desc = *slot;
|
|
3021 if (TREE_CODE (desc->value) == STRING_CST
|
|
3022 && TREE_ASM_WRITTEN (desc->value)
|
|
3023 && asan_protect_global (desc->value))
|
|
3024 {
|
|
3025 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
|
|
3026 aascd->type, aascd->v);
|
|
3027 }
|
|
3028 return 1;
|
|
3029 }
|
|
3030
|
|
3031 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
|
|
3032 invoke ggc_collect. */
|
|
3033 static GTY(()) tree asan_ctor_statements;
|
|
3034
|
|
3035 /* Module-level instrumentation.
|
|
3036 - Insert __asan_init_vN() into the list of CTORs.
|
|
3037 - TODO: insert redzones around globals.
|
|
3038 */
|
|
3039
|
|
3040 void
|
|
3041 asan_finish_file (void)
|
|
3042 {
|
|
3043 varpool_node *vnode;
|
|
3044 unsigned HOST_WIDE_INT gcount = 0;
|
|
3045
|
|
3046 if (shadow_ptr_types[0] == NULL_TREE)
|
|
3047 asan_init_shadow_ptr_types ();
|
|
3048 /* Avoid instrumenting code in the asan ctors/dtors.
|
|
3049 We don't need to insert padding after the description strings,
|
|
3050 nor after .LASAN* array. */
|
|
3051 flag_sanitize &= ~SANITIZE_ADDRESS;
|
|
3052
|
|
3053 /* For user-space we want asan constructors to run first.
|
|
3054 Linux kernel does not support priorities other than default, and the only
|
|
3055 other user of constructors is coverage. So we run with the default
|
|
3056 priority. */
|
|
3057 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
|
|
3058 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
|
|
3059
|
|
3060 if (flag_sanitize & SANITIZE_USER_ADDRESS)
|
|
3061 {
|
|
3062 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
|
|
3063 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
|
|
3064 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
|
|
3065 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
|
|
3066 }
|
|
3067 FOR_EACH_DEFINED_VARIABLE (vnode)
|
|
3068 if (TREE_ASM_WRITTEN (vnode->decl)
|
|
3069 && asan_protect_global (vnode->decl))
|
|
3070 ++gcount;
|
|
3071 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
|
|
3072 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
|
|
3073 (&gcount);
|
|
3074 if (gcount)
|
|
3075 {
|
|
3076 tree type = asan_global_struct (), var, ctor;
|
|
3077 tree dtor_statements = NULL_TREE;
|
|
3078 vec<constructor_elt, va_gc> *v;
|
|
3079 char buf[20];
|
|
3080
|
|
3081 type = build_array_type_nelts (type, gcount);
|
|
3082 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
|
|
3083 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
|
|
3084 type);
|
|
3085 TREE_STATIC (var) = 1;
|
|
3086 TREE_PUBLIC (var) = 0;
|
|
3087 DECL_ARTIFICIAL (var) = 1;
|
|
3088 DECL_IGNORED_P (var) = 1;
|
|
3089 vec_alloc (v, gcount);
|
|
3090 FOR_EACH_DEFINED_VARIABLE (vnode)
|
|
3091 if (TREE_ASM_WRITTEN (vnode->decl)
|
|
3092 && asan_protect_global (vnode->decl))
|
|
3093 asan_add_global (vnode->decl, TREE_TYPE (type), v);
|
|
3094 struct asan_add_string_csts_data aascd;
|
|
3095 aascd.type = TREE_TYPE (type);
|
|
3096 aascd.v = v;
|
|
3097 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
|
|
3098 (&aascd);
|
|
3099 ctor = build_constructor (type, v);
|
|
3100 TREE_CONSTANT (ctor) = 1;
|
|
3101 TREE_STATIC (ctor) = 1;
|
|
3102 DECL_INITIAL (var) = ctor;
|
131
|
3103 SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
|
|
3104 ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
|
|
3105
|
111
|
3106 varpool_node::finalize_decl (var);
|
|
3107
|
|
3108 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
|
|
3109 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
|
|
3110 append_to_statement_list (build_call_expr (fn, 2,
|
|
3111 build_fold_addr_expr (var),
|
|
3112 gcount_tree),
|
|
3113 &asan_ctor_statements);
|
|
3114
|
|
3115 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
|
|
3116 append_to_statement_list (build_call_expr (fn, 2,
|
|
3117 build_fold_addr_expr (var),
|
|
3118 gcount_tree),
|
|
3119 &dtor_statements);
|
|
3120 cgraph_build_static_cdtor ('D', dtor_statements, priority);
|
|
3121 }
|
|
3122 if (asan_ctor_statements)
|
|
3123 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
|
|
3124 flag_sanitize |= SANITIZE_ADDRESS;
|
|
3125 }
|
|
3126
|
|
3127 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
|
|
3128 on SHADOW address. Newly added statements will be added to ITER with
|
|
3129 given location LOC. We mark SIZE bytes in shadow memory, where
|
|
3130 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
|
|
3131 end of a variable. */
|
|
3132
|
|
3133 static void
|
|
3134 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
|
|
3135 tree shadow,
|
|
3136 unsigned HOST_WIDE_INT base_addr_offset,
|
|
3137 bool is_clobber, unsigned size,
|
|
3138 unsigned last_chunk_size)
|
|
3139 {
|
|
3140 tree shadow_ptr_type;
|
|
3141
|
|
3142 switch (size)
|
|
3143 {
|
|
3144 case 1:
|
|
3145 shadow_ptr_type = shadow_ptr_types[0];
|
|
3146 break;
|
|
3147 case 2:
|
|
3148 shadow_ptr_type = shadow_ptr_types[1];
|
|
3149 break;
|
|
3150 case 4:
|
|
3151 shadow_ptr_type = shadow_ptr_types[2];
|
|
3152 break;
|
|
3153 default:
|
|
3154 gcc_unreachable ();
|
|
3155 }
|
|
3156
|
|
3157 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
|
|
3158 unsigned HOST_WIDE_INT val = 0;
|
|
3159 unsigned last_pos = size;
|
|
3160 if (last_chunk_size && !is_clobber)
|
|
3161 last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
|
|
3162 for (unsigned i = 0; i < size; ++i)
|
|
3163 {
|
|
3164 unsigned char shadow_c = c;
|
|
3165 if (i == last_pos)
|
|
3166 shadow_c = last_chunk_size;
|
|
3167 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
|
|
3168 }
|
|
3169
|
|
3170 /* Handle last chunk in unpoisoning. */
|
|
3171 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
|
|
3172
|
|
3173 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
|
|
3174 build_int_cst (shadow_ptr_type, base_addr_offset));
|
|
3175
|
|
3176 gimple *g = gimple_build_assign (dest, magic);
|
|
3177 gimple_set_location (g, loc);
|
|
3178 gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
3179 }
|
|
3180
|
|
3181 /* Expand the ASAN_MARK builtins. */
|
|
3182
|
|
3183 bool
|
|
3184 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
|
|
3185 {
|
|
3186 gimple *g = gsi_stmt (*iter);
|
|
3187 location_t loc = gimple_location (g);
|
|
3188 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
|
|
3189 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
|
|
3190
|
|
3191 tree base = gimple_call_arg (g, 1);
|
|
3192 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
|
|
3193 tree decl = TREE_OPERAND (base, 0);
|
|
3194
|
|
3195 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
|
|
3196 if (TREE_CODE (decl) == COMPONENT_REF
|
|
3197 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
|
|
3198 decl = TREE_OPERAND (decl, 0);
|
|
3199
|
|
3200 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
|
|
3201
|
|
3202 if (is_poison)
|
|
3203 {
|
|
3204 if (asan_handled_variables == NULL)
|
|
3205 asan_handled_variables = new hash_set<tree> (16);
|
|
3206 asan_handled_variables->add (decl);
|
|
3207 }
|
|
3208 tree len = gimple_call_arg (g, 2);
|
|
3209
|
|
3210 gcc_assert (tree_fits_shwi_p (len));
|
|
3211 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
|
|
3212 gcc_assert (size_in_bytes);
|
|
3213
|
|
3214 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
3215 NOP_EXPR, base);
|
|
3216 gimple_set_location (g, loc);
|
|
3217 gsi_replace (iter, g, false);
|
|
3218 tree base_addr = gimple_assign_lhs (g);
|
|
3219
|
|
3220 /* Generate direct emission if size_in_bytes is small. */
|
145
|
3221 if (size_in_bytes
|
|
3222 <= (unsigned)param_use_after_scope_direct_emission_threshold)
|
111
|
3223 {
|
145
|
3224 const unsigned HOST_WIDE_INT shadow_size
|
|
3225 = shadow_mem_size (size_in_bytes);
|
|
3226 const unsigned int shadow_align
|
|
3227 = (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
|
111
|
3228
|
|
3229 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
|
|
3230 shadow_ptr_types[0], true);
|
|
3231
|
|
3232 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
|
|
3233 {
|
|
3234 unsigned size = 1;
|
145
|
3235 if (shadow_size - offset >= 4
|
|
3236 && (!STRICT_ALIGNMENT || shadow_align >= 4))
|
111
|
3237 size = 4;
|
145
|
3238 else if (shadow_size - offset >= 2
|
|
3239 && (!STRICT_ALIGNMENT || shadow_align >= 2))
|
111
|
3240 size = 2;
|
|
3241
|
|
3242 unsigned HOST_WIDE_INT last_chunk_size = 0;
|
|
3243 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
|
|
3244 if (s > size_in_bytes)
|
|
3245 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
|
|
3246
|
|
3247 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
|
|
3248 size, last_chunk_size);
|
|
3249 offset += size;
|
|
3250 }
|
|
3251 }
|
|
3252 else
|
|
3253 {
|
|
3254 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
3255 NOP_EXPR, len);
|
|
3256 gimple_set_location (g, loc);
|
|
3257 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
3258 tree sz_arg = gimple_assign_lhs (g);
|
|
3259
|
|
3260 tree fun
|
|
3261 = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
|
|
3262 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
|
|
3263 g = gimple_build_call (fun, 2, base_addr, sz_arg);
|
|
3264 gimple_set_location (g, loc);
|
|
3265 gsi_insert_after (iter, g, GSI_NEW_STMT);
|
|
3266 }
|
|
3267
|
|
3268 return false;
|
|
3269 }
|
|
3270
|
|
3271 /* Expand the ASAN_{LOAD,STORE} builtins. */
|
|
3272
|
|
3273 bool
|
|
3274 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
|
|
3275 {
|
|
3276 gimple *g = gsi_stmt (*iter);
|
|
3277 location_t loc = gimple_location (g);
|
|
3278 bool recover_p;
|
|
3279 if (flag_sanitize & SANITIZE_USER_ADDRESS)
|
|
3280 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
|
|
3281 else
|
|
3282 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
|
|
3283
|
|
3284 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
|
|
3285 gcc_assert (flags < ASAN_CHECK_LAST);
|
|
3286 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
|
|
3287 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
|
|
3288 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
|
|
3289
|
|
3290 tree base = gimple_call_arg (g, 1);
|
|
3291 tree len = gimple_call_arg (g, 2);
|
|
3292 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
|
|
3293
|
|
3294 HOST_WIDE_INT size_in_bytes
|
|
3295 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
|
|
3296
|
|
3297 if (use_calls)
|
|
3298 {
|
|
3299 /* Instrument using callbacks. */
|
|
3300 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
3301 NOP_EXPR, base);
|
|
3302 gimple_set_location (g, loc);
|
|
3303 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
3304 tree base_addr = gimple_assign_lhs (g);
|
|
3305
|
|
3306 int nargs;
|
|
3307 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
|
|
3308 if (nargs == 1)
|
|
3309 g = gimple_build_call (fun, 1, base_addr);
|
|
3310 else
|
|
3311 {
|
|
3312 gcc_assert (nargs == 2);
|
|
3313 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
3314 NOP_EXPR, len);
|
|
3315 gimple_set_location (g, loc);
|
|
3316 gsi_insert_before (iter, g, GSI_SAME_STMT);
|
|
3317 tree sz_arg = gimple_assign_lhs (g);
|
|
3318 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
|
|
3319 }
|
|
3320 gimple_set_location (g, loc);
|
|
3321 gsi_replace (iter, g, false);
|
|
3322 return false;
|
|
3323 }
|
|
3324
|
|
3325 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
|
|
3326
|
|
3327 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
|
|
3328 tree shadow_type = TREE_TYPE (shadow_ptr_type);
|
|
3329
|
|
3330 gimple_stmt_iterator gsi = *iter;
|
|
3331
|
|
3332 if (!is_non_zero_len)
|
|
3333 {
|
|
3334 /* So, the length of the memory area to asan-protect is
|
|
3335 non-constant. Let's guard the generated instrumentation code
|
|
3336 like:
|
|
3337
|
|
3338 if (len != 0)
|
|
3339 {
|
|
3340 //asan instrumentation code goes here.
|
|
3341 }
|
|
3342 // falltrough instructions, starting with *ITER. */
|
|
3343
|
|
3344 g = gimple_build_cond (NE_EXPR,
|
|
3345 len,
|
|
3346 build_int_cst (TREE_TYPE (len), 0),
|
|
3347 NULL_TREE, NULL_TREE);
|
|
3348 gimple_set_location (g, loc);
|
|
3349
|
|
3350 basic_block then_bb, fallthrough_bb;
|
|
3351 insert_if_then_before_iter (as_a <gcond *> (g), iter,
|
|
3352 /*then_more_likely_p=*/true,
|
|
3353 &then_bb, &fallthrough_bb);
|
|
3354 /* Note that fallthrough_bb starts with the statement that was
|
|
3355 pointed to by ITER. */
|
|
3356
|
|
3357 /* The 'then block' of the 'if (len != 0) condition is where
|
|
3358 we'll generate the asan instrumentation code now. */
|
|
3359 gsi = gsi_last_bb (then_bb);
|
|
3360 }
|
|
3361
|
|
3362 /* Get an iterator on the point where we can add the condition
|
|
3363 statement for the instrumentation. */
|
|
3364 basic_block then_bb, else_bb;
|
|
3365 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
|
|
3366 /*then_more_likely_p=*/false,
|
|
3367 /*create_then_fallthru_edge*/recover_p,
|
|
3368 &then_bb,
|
|
3369 &else_bb);
|
|
3370
|
|
3371 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
3372 NOP_EXPR, base);
|
|
3373 gimple_set_location (g, loc);
|
|
3374 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
|
|
3375 tree base_addr = gimple_assign_lhs (g);
|
|
3376
|
|
3377 tree t = NULL_TREE;
|
|
3378 if (real_size_in_bytes >= 8)
|
|
3379 {
|
|
3380 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
|
|
3381 shadow_ptr_type);
|
|
3382 t = shadow;
|
|
3383 }
|
|
3384 else
|
|
3385 {
|
|
3386 /* Slow path for 1, 2 and 4 byte accesses. */
|
|
3387 /* Test (shadow != 0)
|
|
3388 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
|
|
3389 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
|
|
3390 shadow_ptr_type);
|
|
3391 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
|
|
3392 gimple_seq seq = NULL;
|
|
3393 gimple_seq_add_stmt (&seq, shadow_test);
|
|
3394 /* Aligned (>= 8 bytes) can test just
|
|
3395 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
|
|
3396 to be 0. */
|
|
3397 if (align < 8)
|
|
3398 {
|
|
3399 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
|
|
3400 base_addr, 7));
|
|
3401 gimple_seq_add_stmt (&seq,
|
|
3402 build_type_cast (shadow_type,
|
|
3403 gimple_seq_last (seq)));
|
|
3404 if (real_size_in_bytes > 1)
|
|
3405 gimple_seq_add_stmt (&seq,
|
|
3406 build_assign (PLUS_EXPR,
|
|
3407 gimple_seq_last (seq),
|
|
3408 real_size_in_bytes - 1));
|
|
3409 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
|
|
3410 }
|
|
3411 else
|
|
3412 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
|
|
3413 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
|
|
3414 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
|
|
3415 gimple_seq_last (seq)));
|
|
3416 t = gimple_assign_lhs (gimple_seq_last (seq));
|
|
3417 gimple_seq_set_location (seq, loc);
|
|
3418 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
|
|
3419
|
|
3420 /* For non-constant, misaligned or otherwise weird access sizes,
|
|
3421 check first and last byte. */
|
|
3422 if (size_in_bytes == -1)
|
|
3423 {
|
|
3424 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
3425 MINUS_EXPR, len,
|
|
3426 build_int_cst (pointer_sized_int_node, 1));
|
|
3427 gimple_set_location (g, loc);
|
|
3428 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
3429 tree last = gimple_assign_lhs (g);
|
|
3430 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
|
|
3431 PLUS_EXPR, base_addr, last);
|
|
3432 gimple_set_location (g, loc);
|
|
3433 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
3434 tree base_end_addr = gimple_assign_lhs (g);
|
|
3435
|
|
3436 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
|
|
3437 shadow_ptr_type);
|
|
3438 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
|
|
3439 gimple_seq seq = NULL;
|
|
3440 gimple_seq_add_stmt (&seq, shadow_test);
|
|
3441 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
|
|
3442 base_end_addr, 7));
|
|
3443 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
|
|
3444 gimple_seq_last (seq)));
|
|
3445 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
|
|
3446 gimple_seq_last (seq),
|
|
3447 shadow));
|
|
3448 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
|
|
3449 gimple_seq_last (seq)));
|
|
3450 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
|
|
3451 gimple_seq_last (seq)));
|
|
3452 t = gimple_assign_lhs (gimple_seq_last (seq));
|
|
3453 gimple_seq_set_location (seq, loc);
|
|
3454 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
|
|
3455 }
|
|
3456 }
|
|
3457
|
|
3458 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
|
|
3459 NULL_TREE, NULL_TREE);
|
|
3460 gimple_set_location (g, loc);
|
|
3461 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
3462
|
|
3463 /* Generate call to the run-time library (e.g. __asan_report_load8). */
|
|
3464 gsi = gsi_start_bb (then_bb);
|
|
3465 int nargs;
|
|
3466 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
|
|
3467 g = gimple_build_call (fun, nargs, base_addr, len);
|
|
3468 gimple_set_location (g, loc);
|
|
3469 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
|
|
3470
|
|
3471 gsi_remove (iter, true);
|
|
3472 *iter = gsi_start_bb (else_bb);
|
|
3473
|
|
3474 return true;
|
|
3475 }
|
|
3476
|
|
3477 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
|
|
3478 into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
|
|
3479
|
|
3480 static tree
|
|
3481 create_asan_shadow_var (tree var_decl,
|
|
3482 hash_map<tree, tree> &shadow_vars_mapping)
|
|
3483 {
|
|
3484 tree *slot = shadow_vars_mapping.get (var_decl);
|
|
3485 if (slot == NULL)
|
|
3486 {
|
|
3487 tree shadow_var = copy_node (var_decl);
|
|
3488
|
|
3489 copy_body_data id;
|
|
3490 memset (&id, 0, sizeof (copy_body_data));
|
|
3491 id.src_fn = id.dst_fn = current_function_decl;
|
|
3492 copy_decl_for_dup_finish (&id, var_decl, shadow_var);
|
|
3493
|
|
3494 DECL_ARTIFICIAL (shadow_var) = 1;
|
|
3495 DECL_IGNORED_P (shadow_var) = 1;
|
|
3496 DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
|
|
3497 gimple_add_tmp_var (shadow_var);
|
|
3498
|
|
3499 shadow_vars_mapping.put (var_decl, shadow_var);
|
|
3500 return shadow_var;
|
|
3501 }
|
|
3502 else
|
|
3503 return *slot;
|
|
3504 }
|
|
3505
|
|
3506 /* Expand ASAN_POISON ifn. */
|
|
3507
|
|
3508 bool
|
|
3509 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
|
|
3510 bool *need_commit_edge_insert,
|
|
3511 hash_map<tree, tree> &shadow_vars_mapping)
|
|
3512 {
|
|
3513 gimple *g = gsi_stmt (*iter);
|
|
3514 tree poisoned_var = gimple_call_lhs (g);
|
|
3515 if (!poisoned_var || has_zero_uses (poisoned_var))
|
|
3516 {
|
|
3517 gsi_remove (iter, true);
|
|
3518 return true;
|
|
3519 }
|
|
3520
|
|
3521 if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
|
|
3522 SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
|
|
3523 create_tmp_var (TREE_TYPE (poisoned_var)));
|
|
3524
|
|
3525 tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
|
|
3526 shadow_vars_mapping);
|
|
3527
|
|
3528 bool recover_p;
|
|
3529 if (flag_sanitize & SANITIZE_USER_ADDRESS)
|
|
3530 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
|
|
3531 else
|
|
3532 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
|
|
3533 tree size = DECL_SIZE_UNIT (shadow_var);
|
|
3534 gimple *poison_call
|
|
3535 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
|
|
3536 build_int_cst (integer_type_node,
|
|
3537 ASAN_MARK_POISON),
|
|
3538 build_fold_addr_expr (shadow_var), size);
|
|
3539
|
|
3540 gimple *use;
|
|
3541 imm_use_iterator imm_iter;
|
|
3542 FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
|
|
3543 {
|
|
3544 if (is_gimple_debug (use))
|
|
3545 continue;
|
|
3546
|
|
3547 int nargs;
|
|
3548 bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
|
|
3549 tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
|
|
3550 &nargs);
|
|
3551
|
|
3552 gcall *call = gimple_build_call (fun, 1,
|
|
3553 build_fold_addr_expr (shadow_var));
|
|
3554 gimple_set_location (call, gimple_location (use));
|
|
3555 gimple *call_to_insert = call;
|
|
3556
|
|
3557 /* The USE can be a gimple PHI node. If so, insert the call on
|
|
3558 all edges leading to the PHI node. */
|
|
3559 if (is_a <gphi *> (use))
|
|
3560 {
|
|
3561 gphi *phi = dyn_cast<gphi *> (use);
|
|
3562 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
|
|
3563 if (gimple_phi_arg_def (phi, i) == poisoned_var)
|
|
3564 {
|
|
3565 edge e = gimple_phi_arg_edge (phi, i);
|
|
3566
|
|
3567 /* Do not insert on an edge we can't split. */
|
|
3568 if (e->flags & EDGE_ABNORMAL)
|
|
3569 continue;
|
|
3570
|
|
3571 if (call_to_insert == NULL)
|
|
3572 call_to_insert = gimple_copy (call);
|
|
3573
|
|
3574 gsi_insert_seq_on_edge (e, call_to_insert);
|
|
3575 *need_commit_edge_insert = true;
|
|
3576 call_to_insert = NULL;
|
|
3577 }
|
|
3578 }
|
|
3579 else
|
|
3580 {
|
|
3581 gimple_stmt_iterator gsi = gsi_for_stmt (use);
|
|
3582 if (store_p)
|
|
3583 gsi_replace (&gsi, call, true);
|
|
3584 else
|
|
3585 gsi_insert_before (&gsi, call, GSI_NEW_STMT);
|
|
3586 }
|
|
3587 }
|
|
3588
|
|
3589 SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
|
|
3590 SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
|
|
3591 gsi_replace (iter, poison_call, false);
|
|
3592
|
|
3593 return true;
|
|
3594 }
|
|
3595
|
|
3596 /* Instrument the current function. */
|
|
3597
|
|
3598 static unsigned int
|
|
3599 asan_instrument (void)
|
|
3600 {
|
|
3601 if (shadow_ptr_types[0] == NULL_TREE)
|
|
3602 asan_init_shadow_ptr_types ();
|
|
3603 transform_statements ();
|
|
3604 last_alloca_addr = NULL_TREE;
|
|
3605 return 0;
|
|
3606 }
|
|
3607
|
|
3608 static bool
|
|
3609 gate_asan (void)
|
|
3610 {
|
|
3611 return sanitize_flags_p (SANITIZE_ADDRESS);
|
|
3612 }
|
|
3613
|
|
3614 namespace {
|
|
3615
|
|
3616 const pass_data pass_data_asan =
|
|
3617 {
|
|
3618 GIMPLE_PASS, /* type */
|
|
3619 "asan", /* name */
|
|
3620 OPTGROUP_NONE, /* optinfo_flags */
|
|
3621 TV_NONE, /* tv_id */
|
|
3622 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
|
|
3623 0, /* properties_provided */
|
|
3624 0, /* properties_destroyed */
|
|
3625 0, /* todo_flags_start */
|
|
3626 TODO_update_ssa, /* todo_flags_finish */
|
|
3627 };
|
|
3628
|
|
3629 class pass_asan : public gimple_opt_pass
|
|
3630 {
|
|
3631 public:
|
|
3632 pass_asan (gcc::context *ctxt)
|
|
3633 : gimple_opt_pass (pass_data_asan, ctxt)
|
|
3634 {}
|
|
3635
|
|
3636 /* opt_pass methods: */
|
|
3637 opt_pass * clone () { return new pass_asan (m_ctxt); }
|
|
3638 virtual bool gate (function *) { return gate_asan (); }
|
|
3639 virtual unsigned int execute (function *) { return asan_instrument (); }
|
|
3640
|
|
3641 }; // class pass_asan
|
|
3642
|
|
3643 } // anon namespace
|
|
3644
|
|
3645 gimple_opt_pass *
|
|
3646 make_pass_asan (gcc::context *ctxt)
|
|
3647 {
|
|
3648 return new pass_asan (ctxt);
|
|
3649 }
|
|
3650
|
|
3651 namespace {
|
|
3652
|
|
3653 const pass_data pass_data_asan_O0 =
|
|
3654 {
|
|
3655 GIMPLE_PASS, /* type */
|
|
3656 "asan0", /* name */
|
|
3657 OPTGROUP_NONE, /* optinfo_flags */
|
|
3658 TV_NONE, /* tv_id */
|
|
3659 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
|
|
3660 0, /* properties_provided */
|
|
3661 0, /* properties_destroyed */
|
|
3662 0, /* todo_flags_start */
|
|
3663 TODO_update_ssa, /* todo_flags_finish */
|
|
3664 };
|
|
3665
|
|
3666 class pass_asan_O0 : public gimple_opt_pass
|
|
3667 {
|
|
3668 public:
|
|
3669 pass_asan_O0 (gcc::context *ctxt)
|
|
3670 : gimple_opt_pass (pass_data_asan_O0, ctxt)
|
|
3671 {}
|
|
3672
|
|
3673 /* opt_pass methods: */
|
|
3674 virtual bool gate (function *) { return !optimize && gate_asan (); }
|
|
3675 virtual unsigned int execute (function *) { return asan_instrument (); }
|
|
3676
|
|
3677 }; // class pass_asan_O0
|
|
3678
|
|
3679 } // anon namespace
|
|
3680
|
|
3681 gimple_opt_pass *
|
|
3682 make_pass_asan_O0 (gcc::context *ctxt)
|
|
3683 {
|
|
3684 return new pass_asan_O0 (ctxt);
|
|
3685 }
|
|
3686
|
|
3687 #include "gt-asan.h"
|