comparison gcc/asan.c @ 132:d34655255c78

update gcc-8.2
author mir3636
date Thu, 25 Oct 2018 10:21:07 +0900
parents 84e7813d76e9
children 1830386684a0
comparison
equal deleted inserted replaced
130:e108057fa461 132:d34655255c78
1 /* AddressSanitizer, a fast memory error detector. 1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2017 Free Software Foundation, Inc. 2 Copyright (C) 2012-2018 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com> 3 Contributed by Kostya Serebryany <kcc@google.com>
4 4
5 This file is part of GCC. 5 This file is part of GCC.
6 6
7 GCC is free software; you can redistribute it and/or modify it under 7 GCC is free software; you can redistribute it and/or modify it under
251 static tree last_alloca_addr; 251 static tree last_alloca_addr;
252 252
253 /* Set of variable declarations that are going to be guarded by 253 /* Set of variable declarations that are going to be guarded by
254 use-after-scope sanitizer. */ 254 use-after-scope sanitizer. */
255 255
256 static hash_set<tree> *asan_handled_variables = NULL; 256 hash_set<tree> *asan_handled_variables = NULL;
257 257
258 hash_set <tree> *asan_used_labels = NULL; 258 hash_set <tree> *asan_used_labels = NULL;
259 259
260 /* Sets shadow offset to value in string VAL. */ 260 /* Sets shadow offset to value in string VAL. */
261 261
552 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); 552 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
553 gsi_insert_on_edge_immediate (e, g); 553 gsi_insert_on_edge_immediate (e, g);
554 return last_alloca_addr; 554 return last_alloca_addr;
555 } 555 }
556 556
557 /* Insert __asan_allocas_unpoison (top, bottom) call after 557 /* Insert __asan_allocas_unpoison (top, bottom) call before
558 __builtin_stack_restore (new_sp) call. 558 __builtin_stack_restore (new_sp) call.
559 The pseudocode of this routine should look like this: 559 The pseudocode of this routine should look like this:
560 __builtin_stack_restore (new_sp);
561 top = last_alloca_addr; 560 top = last_alloca_addr;
562 bot = new_sp; 561 bot = new_sp;
563 __asan_allocas_unpoison (top, bot); 562 __asan_allocas_unpoison (top, bot);
564 last_alloca_addr = new_sp; 563 last_alloca_addr = new_sp;
564 __builtin_stack_restore (new_sp);
565 In general, we can't use new_sp as bot parameter because on some 565 In general, we can't use new_sp as bot parameter because on some
566 architectures SP has non zero offset from dynamic stack area. Moreover, on 566 architectures SP has non zero offset from dynamic stack area. Moreover, on
567 some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each 567 some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
568 particular function only after all callees were expanded to rtl. 568 particular function only after all callees were expanded to rtl.
569 The most noticeable example is PowerPC{,64}, see 569 The most noticeable example is PowerPC{,64}, see
570 http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK. 570 http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
571 To overcome the issue we use following trick: pass new_sp as a second 571 To overcome the issue we use following trick: pass new_sp as a second
572 parameter to __asan_allocas_unpoison and rewrite it during expansion with 572 parameter to __asan_allocas_unpoison and rewrite it during expansion with
573 virtual_dynamic_stack_rtx later in expand_asan_emit_allocas_unpoison 573 new_sp + (virtual_dynamic_stack_rtx - sp) later in
574 function. 574 expand_asan_emit_allocas_unpoison function. */
575 */
576 575
577 static void 576 static void
578 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter) 577 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
579 { 578 {
580 if (!iter || !asan_sanitize_allocas_p ()) 579 if (!iter || !asan_sanitize_allocas_p ())
582 581
583 tree last_alloca = get_last_alloca_addr (); 582 tree last_alloca = get_last_alloca_addr ();
584 tree restored_stack = gimple_call_arg (call, 0); 583 tree restored_stack = gimple_call_arg (call, 0);
585 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON); 584 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
586 gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack); 585 gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
587 gsi_insert_after (iter, g, GSI_NEW_STMT); 586 gsi_insert_before (iter, g, GSI_SAME_STMT);
588 g = gimple_build_assign (last_alloca, restored_stack); 587 g = gimple_build_assign (last_alloca, restored_stack);
589 gsi_insert_after (iter, g, GSI_NEW_STMT); 588 gsi_insert_before (iter, g, GSI_SAME_STMT);
590 } 589 }
591 590
592 /* Deploy and poison redzones around __builtin_alloca call. To do this, we 591 /* Deploy and poison redzones around __builtin_alloca call. To do this, we
593 should replace this call with another one with changed parameters and 592 should replace this call with another one with changed parameters and
594 replace all its uses with new address, so 593 replace all its uses with new address, so
1226 of SIZE bytes. */ 1225 of SIZE bytes. */
1227 1226
1228 static unsigned HOST_WIDE_INT 1227 static unsigned HOST_WIDE_INT
1229 shadow_mem_size (unsigned HOST_WIDE_INT size) 1228 shadow_mem_size (unsigned HOST_WIDE_INT size)
1230 { 1229 {
1230 /* It must be possible to align stack variables to granularity
1231 of shadow memory. */
1232 gcc_assert (BITS_PER_UNIT
1233 * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1234
1231 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY; 1235 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1232 } 1236 }
1233 1237
1234 /* Insert code to protect stack vars. The prologue sequence should be emitted 1238 /* Insert code to protect stack vars. The prologue sequence should be emitted
1235 directly, epilogue sequence returned. BASE is the register holding the 1239 directly, epilogue sequence returned. BASE is the register holding the
1263 int use_after_return_class = -1; 1267 int use_after_return_class = -1;
1264 1268
1265 if (shadow_ptr_types[0] == NULL_TREE) 1269 if (shadow_ptr_types[0] == NULL_TREE)
1266 asan_init_shadow_ptr_types (); 1270 asan_init_shadow_ptr_types ();
1267 1271
1272 expanded_location cfun_xloc
1273 = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1274
1268 /* First of all, prepare the description string. */ 1275 /* First of all, prepare the description string. */
1269 pretty_printer asan_pp; 1276 pretty_printer asan_pp;
1270 1277
1271 pp_decimal_int (&asan_pp, length / 2 - 1); 1278 pp_decimal_int (&asan_pp, length / 2 - 1);
1272 pp_space (&asan_pp); 1279 pp_space (&asan_pp);
1275 tree decl = decls[l / 2 - 1]; 1282 tree decl = decls[l / 2 - 1];
1276 pp_wide_integer (&asan_pp, offsets[l] - base_offset); 1283 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1277 pp_space (&asan_pp); 1284 pp_space (&asan_pp);
1278 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]); 1285 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1279 pp_space (&asan_pp); 1286 pp_space (&asan_pp);
1287
1288 expanded_location xloc
1289 = expand_location (DECL_SOURCE_LOCATION (decl));
1290 char location[32];
1291
1292 if (xloc.file == cfun_xloc.file)
1293 sprintf (location, ":%d", xloc.line);
1294 else
1295 location[0] = '\0';
1296
1280 if (DECL_P (decl) && DECL_NAME (decl)) 1297 if (DECL_P (decl) && DECL_NAME (decl))
1281 { 1298 {
1282 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl))); 1299 unsigned idlen
1300 = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1301 pp_decimal_int (&asan_pp, idlen);
1283 pp_space (&asan_pp); 1302 pp_space (&asan_pp);
1284 pp_tree_identifier (&asan_pp, DECL_NAME (decl)); 1303 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1304 pp_string (&asan_pp, location);
1285 } 1305 }
1286 else 1306 else
1287 pp_string (&asan_pp, "9 <unknown>"); 1307 pp_string (&asan_pp, "9 <unknown>");
1288 pp_space (&asan_pp); 1308
1309 if (l > 2)
1310 pp_space (&asan_pp);
1289 } 1311 }
1290 str_cst = asan_pp_string (&asan_pp); 1312 str_cst = asan_pp_string (&asan_pp);
1291 1313
1292 /* Emit the prologue sequence. */ 1314 /* Emit the prologue sequence. */
1293 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase 1315 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1384 DECL_INITIAL (decl) = decl; 1406 DECL_INITIAL (decl) = decl;
1385 TREE_ASM_WRITTEN (decl) = 1; 1407 TREE_ASM_WRITTEN (decl) = 1;
1386 TREE_ASM_WRITTEN (id) = 1; 1408 TREE_ASM_WRITTEN (id) = 1;
1387 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl))); 1409 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1388 shadow_base = expand_binop (Pmode, lshr_optab, base, 1410 shadow_base = expand_binop (Pmode, lshr_optab, base,
1389 GEN_INT (ASAN_SHADOW_SHIFT), 1411 gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
1390 NULL_RTX, 1, OPTAB_DIRECT); 1412 NULL_RTX, 1, OPTAB_DIRECT);
1391 shadow_base 1413 shadow_base
1392 = plus_constant (Pmode, shadow_base, 1414 = plus_constant (Pmode, shadow_base,
1393 asan_shadow_offset () 1415 asan_shadow_offset ()
1394 + (base_align_bias >> ASAN_SHADOW_SHIFT)); 1416 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1603 by Address Sanitizer, by appending a red zone with protected 1625 by Address Sanitizer, by appending a red zone with protected
1604 shadow memory after it and aligning it to at least 1626 shadow memory after it and aligning it to at least
1605 ASAN_RED_ZONE_SIZE bytes. */ 1627 ASAN_RED_ZONE_SIZE bytes. */
1606 1628
1607 bool 1629 bool
1608 asan_protect_global (tree decl) 1630 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
1609 { 1631 {
1610 if (!ASAN_GLOBALS) 1632 if (!ASAN_GLOBALS)
1611 return false; 1633 return false;
1612 1634
1613 rtx rtl, symbol; 1635 rtx rtl, symbol;
1625 if (!VAR_P (decl) 1647 if (!VAR_P (decl)
1626 /* TLS vars aren't statically protectable. */ 1648 /* TLS vars aren't statically protectable. */
1627 || DECL_THREAD_LOCAL_P (decl) 1649 || DECL_THREAD_LOCAL_P (decl)
1628 /* Externs will be protected elsewhere. */ 1650 /* Externs will be protected elsewhere. */
1629 || DECL_EXTERNAL (decl) 1651 || DECL_EXTERNAL (decl)
1630 || !DECL_RTL_SET_P (decl) 1652 /* PR sanitizer/81697: For architectures that use section anchors first
1653 call to asan_protect_global may occur before DECL_RTL (decl) is set.
1654 We should ignore DECL_RTL_SET_P then, because otherwise the first call
1655 to asan_protect_global will return FALSE and the following calls on the
1656 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
1657 up with inconsistency at runtime. */
1658 || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
1631 /* Comdat vars pose an ABI problem, we can't know if 1659 /* Comdat vars pose an ABI problem, we can't know if
1632 the var that is selected by the linker will have 1660 the var that is selected by the linker will have
1633 padding or not. */ 1661 padding or not. */
1634 || DECL_ONE_ONLY (decl) 1662 || DECL_ONE_ONLY (decl)
1635 /* Similarly for common vars. People can use -fno-common. 1663 /* Similarly for common vars. People can use -fno-common.
1643 || (DECL_SECTION_NAME (decl) != NULL 1671 || (DECL_SECTION_NAME (decl) != NULL
1644 && !symtab_node::get (decl)->implicit_section 1672 && !symtab_node::get (decl)->implicit_section
1645 && !section_sanitized_p (DECL_SECTION_NAME (decl))) 1673 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1646 || DECL_SIZE (decl) == 0 1674 || DECL_SIZE (decl) == 0
1647 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT 1675 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1676 || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1648 || !valid_constant_size_p (DECL_SIZE_UNIT (decl)) 1677 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1649 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE 1678 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1650 || TREE_TYPE (decl) == ubsan_get_source_location_type () 1679 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1651 || is_odr_indicator (decl)) 1680 || is_odr_indicator (decl))
1652 return false; 1681 return false;
1653 1682
1654 rtl = DECL_RTL (decl); 1683 if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
1655 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF) 1684 {
1656 return false; 1685
1657 symbol = XEXP (rtl, 0); 1686 rtl = DECL_RTL (decl);
1658 1687 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1659 if (CONSTANT_POOL_ADDRESS_P (symbol) 1688 return false;
1660 || TREE_CONSTANT_POOL_ADDRESS_P (symbol)) 1689 symbol = XEXP (rtl, 0);
1661 return false; 1690
1691 if (CONSTANT_POOL_ADDRESS_P (symbol)
1692 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1693 return false;
1694 }
1662 1695
1663 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))) 1696 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1664 return false; 1697 return false;
1665 1698
1666 if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl)) 1699 if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
1799 profile_probability fallthrough_probability 1832 profile_probability fallthrough_probability
1800 = then_more_likely_p 1833 = then_more_likely_p
1801 ? profile_probability::very_unlikely () 1834 ? profile_probability::very_unlikely ()
1802 : profile_probability::very_likely (); 1835 : profile_probability::very_likely ();
1803 e->probability = fallthrough_probability.invert (); 1836 e->probability = fallthrough_probability.invert ();
1837 then_bb->count = e->count ();
1804 if (create_then_fallthru_edge) 1838 if (create_then_fallthru_edge)
1805 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU); 1839 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1806 1840
1807 /* Set up the fallthrough basic block. */ 1841 /* Set up the fallthrough basic block. */
1808 e = find_edge (cond_bb, fallthru_bb); 1842 e = find_edge (cond_bb, fallthru_bb);
2062 2096
2063 size_in_bytes = int_size_in_bytes (type); 2097 size_in_bytes = int_size_in_bytes (type);
2064 if (size_in_bytes <= 0) 2098 if (size_in_bytes <= 0)
2065 return; 2099 return;
2066 2100
2067 HOST_WIDE_INT bitsize, bitpos; 2101 poly_int64 bitsize, bitpos;
2068 tree offset; 2102 tree offset;
2069 machine_mode mode; 2103 machine_mode mode;
2070 int unsignedp, reversep, volatilep = 0; 2104 int unsignedp, reversep, volatilep = 0;
2071 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode, 2105 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2072 &unsignedp, &reversep, &volatilep); 2106 &unsignedp, &reversep, &volatilep);
2080 TREE_OPERAND (t, 2)), 2114 TREE_OPERAND (t, 2)),
2081 location, is_store); 2115 location, is_store);
2082 return; 2116 return;
2083 } 2117 }
2084 2118
2085 if (bitpos % BITS_PER_UNIT 2119 if (!multiple_p (bitpos, BITS_PER_UNIT)
2086 || bitsize != size_in_bytes * BITS_PER_UNIT) 2120 || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2087 return; 2121 return;
2088 2122
2089 if (VAR_P (inner) && DECL_HARD_REGISTER (inner)) 2123 if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2090 return; 2124 return;
2091 2125
2126 poly_int64 decl_size;
2092 if (VAR_P (inner) 2127 if (VAR_P (inner)
2093 && offset == NULL_TREE 2128 && offset == NULL_TREE
2094 && bitpos >= 0
2095 && DECL_SIZE (inner) 2129 && DECL_SIZE (inner)
2096 && tree_fits_shwi_p (DECL_SIZE (inner)) 2130 && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2097 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner))) 2131 && known_subrange_p (bitpos, bitsize, 0, decl_size))
2098 { 2132 {
2099 if (DECL_THREAD_LOCAL_P (inner)) 2133 if (DECL_THREAD_LOCAL_P (inner))
2100 return; 2134 return;
2101 if (!ASAN_GLOBALS && is_global_var (inner)) 2135 if (!ASAN_GLOBALS && is_global_var (inner))
2102 return; 2136 return;
2802 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST 2836 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2803 #undef ATTR_PURE_NOTHROW_LEAF_LIST 2837 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2804 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST 2838 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2805 #undef DEF_BUILTIN_STUB 2839 #undef DEF_BUILTIN_STUB
2806 #define DEF_BUILTIN_STUB(ENUM, NAME) 2840 #define DEF_BUILTIN_STUB(ENUM, NAME)
2807 #undef DEF_SANITIZER_BUILTIN 2841 #undef DEF_SANITIZER_BUILTIN_1
2808 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \ 2842 #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \
2809 do { \ 2843 do { \
2810 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \ 2844 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2811 BUILT_IN_NORMAL, NAME, NULL_TREE); \ 2845 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2812 set_call_expr_flags (decl, ATTRS); \ 2846 set_call_expr_flags (decl, ATTRS); \
2813 set_builtin_decl (ENUM, decl, true); \ 2847 set_builtin_decl (ENUM, decl, true); \
2814 } while (0); 2848 } while (0)
2849 #undef DEF_SANITIZER_BUILTIN
2850 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2851 DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
2815 2852
2816 #include "sanitizer.def" 2853 #include "sanitizer.def"
2817 2854
2818 /* -fsanitize=object-size uses __builtin_object_size, but that might 2855 /* -fsanitize=object-size uses __builtin_object_size, but that might
2819 not be available for e.g. Fortran at this point. We use 2856 not be available for e.g. Fortran at this point. We use
2820 DEF_SANITIZER_BUILTIN here only as a convenience macro. */ 2857 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2821 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) 2858 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2822 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE)) 2859 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2823 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size", 2860 DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
2824 BT_FN_SIZE_CONST_PTR_INT, 2861 BT_FN_SIZE_CONST_PTR_INT,
2825 ATTR_PURE_NOTHROW_LEAF_LIST) 2862 ATTR_PURE_NOTHROW_LEAF_LIST);
2826 2863
2864 #undef DEF_SANITIZER_BUILTIN_1
2827 #undef DEF_SANITIZER_BUILTIN 2865 #undef DEF_SANITIZER_BUILTIN
2828 #undef DEF_BUILTIN_STUB 2866 #undef DEF_BUILTIN_STUB
2829 } 2867 }
2830 2868
2831 /* Called via htab_traverse. Count number of emitted 2869 /* Called via htab_traverse. Count number of emitted
2940 (&aascd); 2978 (&aascd);
2941 ctor = build_constructor (type, v); 2979 ctor = build_constructor (type, v);
2942 TREE_CONSTANT (ctor) = 1; 2980 TREE_CONSTANT (ctor) = 1;
2943 TREE_STATIC (ctor) = 1; 2981 TREE_STATIC (ctor) = 1;
2944 DECL_INITIAL (var) = ctor; 2982 DECL_INITIAL (var) = ctor;
2983 SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
2984 ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
2985
2945 varpool_node::finalize_decl (var); 2986 varpool_node::finalize_decl (var);
2946 2987
2947 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS); 2988 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2948 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount); 2989 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2949 append_to_statement_list (build_call_expr (fn, 2, 2990 append_to_statement_list (build_call_expr (fn, 2,