111
|
1 /* UndefinedBehaviorSanitizer, undefined behavior detector.
|
131
|
2 Copyright (C) 2014-2018 Free Software Foundation, Inc.
|
111
|
3 Contributed by Jakub Jelinek <jakub@redhat.com>
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify it under
|
|
8 the terms of the GNU General Public License as published by the Free
|
|
9 Software Foundation; either version 3, or (at your option) any later
|
|
10 version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
15 for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "cp-tree.h"
|
|
25 #include "ubsan.h"
|
|
26 #include "stringpool.h"
|
|
27 #include "attribs.h"
|
|
28 #include "asan.h"
|
|
29
|
|
30 /* Test if we should instrument vptr access. */
|
|
31
|
|
32 static bool
|
|
33 cp_ubsan_instrument_vptr_p (tree type)
|
|
34 {
|
|
35 if (!flag_rtti || flag_sanitize_undefined_trap_on_error)
|
|
36 return false;
|
|
37
|
|
38 if (!sanitize_flags_p (SANITIZE_VPTR))
|
|
39 return false;
|
|
40
|
|
41 if (current_function_decl == NULL_TREE)
|
|
42 return false;
|
|
43
|
|
44 if (type)
|
|
45 {
|
|
46 type = TYPE_MAIN_VARIANT (type);
|
|
47 if (!CLASS_TYPE_P (type) || !CLASSTYPE_VTABLES (type))
|
|
48 return false;
|
|
49 }
|
|
50
|
|
51 return true;
|
|
52 }
|
|
53
|
|
54 /* Helper function for
|
|
55 cp_ubsan_maybe_instrument_{member_{call,access},downcast}.
|
|
56 Instrument vptr access. */
|
|
57
|
|
58 static tree
|
|
59 cp_ubsan_instrument_vptr (location_t loc, tree op, tree type, bool is_addr,
|
|
60 enum ubsan_null_ckind ckind)
|
|
61 {
|
|
62 type = TYPE_MAIN_VARIANT (type);
|
|
63 const char *mangled = mangle_type_string (type);
|
|
64 hashval_t str_hash1 = htab_hash_string (mangled);
|
|
65 hashval_t str_hash2 = iterative_hash (mangled, strlen (mangled), 0);
|
|
66 tree str_hash = wide_int_to_tree (uint64_type_node,
|
|
67 wi::uhwi (((uint64_t) str_hash1 << 32)
|
|
68 | str_hash2, 64));
|
|
69 if (!is_addr)
|
|
70 op = build_fold_addr_expr_loc (loc, op);
|
|
71 op = save_expr (op);
|
|
72 tree vptr = fold_build3_loc (loc, COMPONENT_REF,
|
|
73 TREE_TYPE (TYPE_VFIELD (type)),
|
|
74 build_fold_indirect_ref_loc (loc, op),
|
|
75 TYPE_VFIELD (type), NULL_TREE);
|
|
76 vptr = fold_convert_loc (loc, pointer_sized_int_node, vptr);
|
|
77 vptr = fold_convert_loc (loc, uint64_type_node, vptr);
|
|
78 if (ckind == UBSAN_DOWNCAST_POINTER)
|
|
79 {
|
|
80 tree cond = build2_loc (loc, NE_EXPR, boolean_type_node, op,
|
|
81 build_zero_cst (TREE_TYPE (op)));
|
|
82 /* This is a compiler generated comparison, don't emit
|
|
83 e.g. -Wnonnull-compare warning for it. */
|
|
84 TREE_NO_WARNING (cond) = 1;
|
|
85 vptr = build3_loc (loc, COND_EXPR, uint64_type_node, cond,
|
|
86 vptr, build_int_cst (uint64_type_node, 0));
|
|
87 }
|
|
88 tree ti_decl = get_tinfo_decl (type);
|
|
89 mark_used (ti_decl);
|
|
90 tree ptype = build_pointer_type (type);
|
|
91 tree call
|
|
92 = build_call_expr_internal_loc (loc, IFN_UBSAN_VPTR,
|
|
93 void_type_node, 5, op, vptr, str_hash,
|
|
94 build_address (ti_decl),
|
|
95 build_int_cst (ptype, ckind));
|
|
96 TREE_SIDE_EFFECTS (call) = 1;
|
|
97 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (op), call, op);
|
|
98 }
|
|
99
|
|
100 /* Helper function for
|
|
101 cp_ubsan_maybe_instrument_{member_{call,access},downcast}.
|
|
102 Instrument vptr access if it should be instrumented, otherwise return
|
|
103 NULL_TREE. */
|
|
104
|
|
105 static tree
|
|
106 cp_ubsan_maybe_instrument_vptr (location_t loc, tree op, tree type,
|
|
107 bool is_addr, enum ubsan_null_ckind ckind)
|
|
108 {
|
|
109 if (!cp_ubsan_instrument_vptr_p (type))
|
|
110 return NULL_TREE;
|
|
111 return cp_ubsan_instrument_vptr (loc, op, type, is_addr, ckind);
|
|
112 }
|
|
113
|
|
114 /* Instrument a member call (but not constructor call) if needed. */
|
|
115
|
|
116 void
|
|
117 cp_ubsan_maybe_instrument_member_call (tree stmt)
|
|
118 {
|
|
119 if (call_expr_nargs (stmt) == 0)
|
|
120 return;
|
|
121 tree *opp = &CALL_EXPR_ARG (stmt, 0);
|
|
122 tree op = *opp;
|
|
123 if (op == error_mark_node
|
131
|
124 || !INDIRECT_TYPE_P (TREE_TYPE (op)))
|
111
|
125 return;
|
|
126 while (TREE_CODE (op) == COMPOUND_EXPR)
|
|
127 {
|
|
128 opp = &TREE_OPERAND (op, 1);
|
|
129 op = *opp;
|
|
130 }
|
|
131 op = cp_ubsan_maybe_instrument_vptr (EXPR_LOCATION (stmt), op,
|
|
132 TREE_TYPE (TREE_TYPE (op)),
|
|
133 true, UBSAN_MEMBER_CALL);
|
|
134 if (op)
|
|
135 *opp = op;
|
|
136 }
|
|
137
|
|
138 /* Data passed to cp_ubsan_check_member_access_r. */
|
|
139
|
|
140 struct cp_ubsan_check_member_access_data
|
|
141 {
|
|
142 hash_set<tree> *pset;
|
|
143 bool is_addr;
|
|
144 };
|
|
145
|
|
146 static tree cp_ubsan_check_member_access_r (tree *, int *, void *);
|
|
147
|
|
148 /* Instrument a member access. */
|
|
149
|
|
150 static bool
|
|
151 cp_ubsan_maybe_instrument_member_access
|
|
152 (tree stmt, cp_ubsan_check_member_access_data *ucmd)
|
|
153 {
|
|
154 if (DECL_ARTIFICIAL (TREE_OPERAND (stmt, 1)))
|
|
155 return false;
|
|
156
|
|
157 tree base = TREE_OPERAND (stmt, 0);
|
|
158 if (!cp_ubsan_instrument_vptr_p (TREE_TYPE (base)))
|
|
159 return false;
|
|
160
|
|
161 cp_walk_tree (&base, cp_ubsan_check_member_access_r, ucmd, ucmd->pset);
|
|
162
|
|
163 base = cp_ubsan_instrument_vptr (EXPR_LOCATION (stmt), base,
|
|
164 TREE_TYPE (base), false,
|
|
165 UBSAN_MEMBER_ACCESS);
|
|
166 TREE_OPERAND (stmt, 0)
|
|
167 = build_fold_indirect_ref_loc (EXPR_LOCATION (stmt), base);
|
|
168 return true;
|
|
169 }
|
|
170
|
|
171 /* Attempt to instrument member accesses inside of the function.
|
|
172 cp_ubsan_maybe_instrument_member_access should be called on COMPONENT_REFs
|
|
173 in the GENERIC IL, but only when the field is actually accessed, not
|
|
174 merely when it's address is taken. Therefore we track in is_addr field
|
|
175 whether in the current context we are processing address taken
|
|
176 handled components or not. E.g. for &x->y[w->z] we want to call
|
|
177 cp_ubsan_maybe_instrument_member_access on *w.z COMPONENT_REF, but
|
|
178 not on *x.y. */
|
|
179
|
|
180 static tree
|
|
181 cp_ubsan_check_member_access_r (tree *stmt_p, int *walk_subtrees, void *data)
|
|
182 {
|
|
183 tree stmt = *stmt_p, t;
|
|
184 cp_ubsan_check_member_access_data *ucmd
|
|
185 = (cp_ubsan_check_member_access_data *) data;
|
|
186 switch (TREE_CODE (stmt))
|
|
187 {
|
|
188 case ADDR_EXPR:
|
|
189 t = TREE_OPERAND (stmt, 0);
|
|
190 while ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
|
|
191 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
|
|
192 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
|
|
193 if (handled_component_p (t))
|
|
194 {
|
|
195 *walk_subtrees = 0;
|
|
196 ucmd->is_addr = true;
|
|
197 cp_walk_tree (&t, cp_ubsan_check_member_access_r,
|
|
198 data, ucmd->pset);
|
|
199 ucmd->is_addr = false;
|
|
200 }
|
|
201 break;
|
|
202 case MEM_REF:
|
|
203 case INDIRECT_REF:
|
|
204 t = TREE_OPERAND (stmt, 0);
|
|
205 if (TREE_CODE (t) == ADDR_EXPR)
|
|
206 {
|
|
207 *walk_subtrees = 0;
|
131
|
208 t = TREE_OPERAND (t, 0);
|
111
|
209 cp_walk_tree (&t, cp_ubsan_check_member_access_r, data, ucmd->pset);
|
|
210 }
|
|
211 break;
|
|
212 case COMPONENT_REF:
|
|
213 if (!ucmd->is_addr && cp_ubsan_maybe_instrument_member_access (stmt, ucmd))
|
|
214 {
|
|
215 *walk_subtrees = 0;
|
|
216 break;
|
|
217 }
|
|
218 /* FALLTHRU */
|
|
219 default:
|
|
220 if (ucmd->is_addr && handled_component_p (stmt))
|
|
221 {
|
|
222 int i, len = TREE_OPERAND_LENGTH (stmt);
|
|
223 *walk_subtrees = 0;
|
|
224 if (!handled_component_p (TREE_OPERAND (stmt, 0)))
|
|
225 ucmd->is_addr = false;
|
|
226 for (i = 0; i < len; i++)
|
|
227 {
|
|
228 cp_walk_tree (&TREE_OPERAND (stmt, i),
|
|
229 cp_ubsan_check_member_access_r, data, ucmd->pset);
|
|
230 ucmd->is_addr = false;
|
|
231 }
|
|
232 ucmd->is_addr = true;
|
|
233 }
|
|
234 break;
|
|
235 }
|
|
236 return NULL_TREE;
|
|
237 }
|
|
238
|
|
239 /* Instrument all member accesses inside GENERIC *T_P. */
|
|
240
|
|
241 void
|
|
242 cp_ubsan_instrument_member_accesses (tree *t_p)
|
|
243 {
|
|
244 if (cp_ubsan_instrument_vptr_p (NULL_TREE))
|
|
245 {
|
|
246 hash_set<tree> pset;
|
|
247 cp_ubsan_check_member_access_data ucmd;
|
|
248 ucmd.pset = &pset;
|
|
249 ucmd.is_addr = false;
|
|
250 cp_walk_tree (t_p, cp_ubsan_check_member_access_r, &ucmd, &pset);
|
|
251 }
|
|
252 }
|
|
253
|
|
254 /* Instrument downcast. */
|
|
255
|
|
256 tree
|
|
257 cp_ubsan_maybe_instrument_downcast (location_t loc, tree type,
|
|
258 tree intype, tree op)
|
|
259 {
|
131
|
260 if (!INDIRECT_TYPE_P (type)
|
|
261 || !INDIRECT_TYPE_P (intype)
|
|
262 || !INDIRECT_TYPE_P (TREE_TYPE (op))
|
111
|
263 || !CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (op)))
|
|
264 || !is_properly_derived_from (TREE_TYPE (type), TREE_TYPE (intype)))
|
|
265 return NULL_TREE;
|
|
266
|
|
267 return cp_ubsan_maybe_instrument_vptr (loc, op, TREE_TYPE (type), true,
|
131
|
268 TYPE_PTR_P (type)
|
111
|
269 ? UBSAN_DOWNCAST_POINTER
|
|
270 : UBSAN_DOWNCAST_REFERENCE);
|
|
271 }
|
|
272
|
|
273 /* Instrument cast to virtual base. */
|
|
274
|
|
275 tree
|
|
276 cp_ubsan_maybe_instrument_cast_to_vbase (location_t loc, tree type, tree op)
|
|
277 {
|
|
278 return cp_ubsan_maybe_instrument_vptr (loc, op, type, true,
|
|
279 UBSAN_CAST_TO_VBASE);
|
|
280 }
|
|
281
|
|
282 /* Called from initialize_vtbl_ptrs via dfs_walk. BINFO is the base
|
|
283 which we want to initialize the vtable pointer for, DATA is
|
|
284 TREE_LIST whose TREE_VALUE is the this ptr expression. */
|
|
285
|
|
286 static tree
|
|
287 cp_ubsan_dfs_initialize_vtbl_ptrs (tree binfo, void *data)
|
|
288 {
|
|
289 if (!TYPE_CONTAINS_VPTR_P (BINFO_TYPE (binfo)))
|
|
290 return dfs_skip_bases;
|
|
291
|
|
292 if (!BINFO_PRIMARY_P (binfo))
|
|
293 {
|
|
294 tree base_ptr = TREE_VALUE ((tree) data);
|
|
295
|
|
296 base_ptr = build_base_path (PLUS_EXPR, base_ptr, binfo, /*nonnull=*/1,
|
|
297 tf_warning_or_error);
|
|
298
|
|
299 /* Compute the location of the vtpr. */
|
|
300 tree vtbl_ptr
|
131
|
301 = build_vfield_ref (cp_build_fold_indirect_ref (base_ptr),
|
111
|
302 TREE_TYPE (binfo));
|
|
303 gcc_assert (vtbl_ptr != error_mark_node);
|
|
304
|
|
305 /* Assign NULL to the vptr. */
|
|
306 tree vtbl = build_zero_cst (TREE_TYPE (vtbl_ptr));
|
|
307 tree stmt = cp_build_modify_expr (input_location, vtbl_ptr, NOP_EXPR,
|
|
308 vtbl, tf_warning_or_error);
|
|
309 if (vptr_via_virtual_p (binfo))
|
|
310 /* If this vptr comes from a virtual base of the complete object, only
|
|
311 clear it if we're in charge of virtual bases. */
|
|
312 stmt = build_if_in_charge (stmt);
|
|
313 finish_expr_stmt (stmt);
|
|
314 }
|
|
315
|
|
316 return NULL_TREE;
|
|
317 }
|
|
318
|
|
319 /* Initialize all the vtable pointers in the object pointed to by
|
|
320 ADDR to NULL, so that we catch invalid calls to methods before
|
|
321 mem-initializers are completed. */
|
|
322
|
|
323 void
|
|
324 cp_ubsan_maybe_initialize_vtbl_ptrs (tree addr)
|
|
325 {
|
|
326 if (!cp_ubsan_instrument_vptr_p (NULL_TREE))
|
|
327 return;
|
|
328
|
|
329 tree type = TREE_TYPE (TREE_TYPE (addr));
|
|
330 tree list = build_tree_list (type, addr);
|
|
331 /* We cannot rely on the vtable being set up. We have to indirect via the
|
|
332 vtt_parm. */
|
|
333 int save_in_base_initializer = in_base_initializer;
|
|
334 in_base_initializer = 1;
|
|
335
|
|
336 /* Walk through the hierarchy, initializing the vptr in each base
|
|
337 class to NULL. */
|
|
338 dfs_walk_once (TYPE_BINFO (type), cp_ubsan_dfs_initialize_vtbl_ptrs,
|
|
339 NULL, list);
|
|
340
|
|
341 in_base_initializer = save_in_base_initializer;
|
|
342 }
|