xref: /netbsd-src/external/gpl3/gcc/dist/gcc/cp/cp-ubsan.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* UndefinedBehaviorSanitizer, undefined behavior detector.
2    Copyright (C) 2014-2022 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "cp-tree.h"
25 #include "ubsan.h"
26 #include "stringpool.h"
27 #include "attribs.h"
28 #include "asan.h"
29 
30 /* Test if we should instrument vptr access.  */
31 
32 static bool
cp_ubsan_instrument_vptr_p(tree type)33 cp_ubsan_instrument_vptr_p (tree type)
34 {
35   if (!flag_rtti || flag_sanitize_undefined_trap_on_error)
36     return false;
37 
38   if (!sanitize_flags_p (SANITIZE_VPTR))
39     return false;
40 
41   if (current_function_decl == NULL_TREE)
42     return false;
43 
44   if (type)
45     {
46       type = TYPE_MAIN_VARIANT (type);
47       if (!CLASS_TYPE_P (type) || !CLASSTYPE_VTABLES (type))
48 	return false;
49     }
50 
51   return true;
52 }
53 
54 /* Helper function for
55    cp_ubsan_maybe_instrument_{member_{call,access},downcast}.
56    Instrument vptr access.  */
57 
58 static tree
cp_ubsan_instrument_vptr(location_t loc,tree op,tree type,bool is_addr,enum ubsan_null_ckind ckind)59 cp_ubsan_instrument_vptr (location_t loc, tree op, tree type, bool is_addr,
60 			  enum ubsan_null_ckind ckind)
61 {
62   type = TYPE_MAIN_VARIANT (type);
63   const char *mangled = mangle_type_string (type);
64   hashval_t str_hash1 = htab_hash_string (mangled);
65   hashval_t str_hash2 = iterative_hash (mangled, strlen (mangled), 0);
66   tree str_hash = wide_int_to_tree (uint64_type_node,
67 				    wi::uhwi (((uint64_t) str_hash1 << 32)
68 					      | str_hash2, 64));
69   if (!is_addr)
70     op = build_fold_addr_expr_loc (loc, op);
71   op = save_expr (op);
72   tree vptr = fold_build3_loc (loc, COMPONENT_REF,
73 			       TREE_TYPE (TYPE_VFIELD (type)),
74 			       build_fold_indirect_ref_loc (loc, op),
75 			       TYPE_VFIELD (type), NULL_TREE);
76   vptr = fold_convert_loc (loc, pointer_sized_int_node, vptr);
77   vptr = fold_convert_loc (loc, uint64_type_node, vptr);
78   if (ckind == UBSAN_DOWNCAST_POINTER)
79     {
80       tree cond = build2_loc (loc, NE_EXPR, boolean_type_node, op,
81 			      build_zero_cst (TREE_TYPE (op)));
82       /* This is a compiler generated comparison, don't emit
83 	 e.g. -Wnonnull-compare warning for it.  */
84       suppress_warning (cond, OPT_Wnonnull_compare);
85       vptr = build3_loc (loc, COND_EXPR, uint64_type_node, cond,
86 			 vptr, build_int_cst (uint64_type_node, 0));
87     }
88   tree ti_decl = get_tinfo_decl (type);
89   mark_used (ti_decl);
90   tree ptype = build_pointer_type (type);
91   tree call
92     = build_call_expr_internal_loc (loc, IFN_UBSAN_VPTR,
93 				    void_type_node, 5, op, vptr, str_hash,
94 				    build_address (ti_decl),
95 				    build_int_cst (ptype, ckind));
96   TREE_SIDE_EFFECTS (call) = 1;
97   return fold_build2 (COMPOUND_EXPR, TREE_TYPE (op), call, op);
98 }
99 
100 /* Helper function for
101    cp_ubsan_maybe_instrument_{member_{call,access},downcast}.
102    Instrument vptr access if it should be instrumented, otherwise return
103    NULL_TREE.  */
104 
105 static tree
cp_ubsan_maybe_instrument_vptr(location_t loc,tree op,tree type,bool is_addr,enum ubsan_null_ckind ckind)106 cp_ubsan_maybe_instrument_vptr (location_t loc, tree op, tree type,
107 				bool is_addr, enum ubsan_null_ckind ckind)
108 {
109   if (!cp_ubsan_instrument_vptr_p (type))
110     return NULL_TREE;
111   return cp_ubsan_instrument_vptr (loc, op, type, is_addr, ckind);
112 }
113 
114 /* Instrument a member call (but not constructor call) if needed.  */
115 
116 void
cp_ubsan_maybe_instrument_member_call(tree stmt)117 cp_ubsan_maybe_instrument_member_call (tree stmt)
118 {
119   if (call_expr_nargs (stmt) == 0)
120     return;
121   tree op, *opp;
122 
123   tree fn = CALL_EXPR_FN (stmt);
124   if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
125     {
126       /* Virtual function call: Sanitize the use of the object pointer in the
127 	 OBJ_TYPE_REF, since the vtable reference will SEGV otherwise (95221).
128 	 OBJ_TYPE_REF_EXPR is ptr->vptr[N] and OBJ_TYPE_REF_OBJECT is ptr.  But
129 	 we can't be sure of finding OBJ_TYPE_REF_OBJECT in OBJ_TYPE_REF_EXPR
130 	 if the latter has been optimized, so we use a COMPOUND_EXPR below.  */
131       opp = &OBJ_TYPE_REF_EXPR (fn);
132       op = OBJ_TYPE_REF_OBJECT (fn);
133     }
134   else
135     {
136       /* Non-virtual call: Sanitize the 'this' argument.  */
137       opp = &CALL_EXPR_ARG (stmt, 0);
138       if (*opp == error_mark_node
139 	  || !INDIRECT_TYPE_P (TREE_TYPE (*opp)))
140 	return;
141       while (TREE_CODE (*opp) == COMPOUND_EXPR)
142 	opp = &TREE_OPERAND (*opp, 1);
143       op = *opp;
144     }
145   op = cp_ubsan_maybe_instrument_vptr (EXPR_LOCATION (stmt), op,
146 				       TREE_TYPE (TREE_TYPE (op)),
147 				       true, UBSAN_MEMBER_CALL);
148   if (!op)
149     /* No change.  */;
150   else if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
151     *opp = cp_build_compound_expr (op, *opp, tf_none);
152   else
153     *opp = op;
154 }
155 
156 /* Data passed to cp_ubsan_check_member_access_r.  */
157 
158 struct cp_ubsan_check_member_access_data
159 {
160   hash_set<tree> *pset;
161   bool is_addr;
162 };
163 
164 static tree cp_ubsan_check_member_access_r (tree *, int *, void *);
165 
166 /* Instrument a member access.  */
167 
168 static bool
cp_ubsan_maybe_instrument_member_access(tree stmt,cp_ubsan_check_member_access_data * ucmd)169 cp_ubsan_maybe_instrument_member_access
170      (tree stmt, cp_ubsan_check_member_access_data *ucmd)
171 {
172   if (DECL_ARTIFICIAL (TREE_OPERAND (stmt, 1)))
173     return false;
174 
175   tree base = TREE_OPERAND (stmt, 0);
176   if (!cp_ubsan_instrument_vptr_p (TREE_TYPE (base)))
177     return false;
178 
179   cp_walk_tree (&base, cp_ubsan_check_member_access_r, ucmd, ucmd->pset);
180 
181   base = cp_ubsan_instrument_vptr (EXPR_LOCATION (stmt), base,
182 				   TREE_TYPE (base), false,
183 				   UBSAN_MEMBER_ACCESS);
184   TREE_OPERAND (stmt, 0)
185     = build_fold_indirect_ref_loc (EXPR_LOCATION (stmt), base);
186   return true;
187 }
188 
189 /* Attempt to instrument member accesses inside of the function.
190    cp_ubsan_maybe_instrument_member_access should be called on COMPONENT_REFs
191    in the GENERIC IL, but only when the field is actually accessed, not
192    merely when it's address is taken.  Therefore we track in is_addr field
193    whether in the current context we are processing address taken
194    handled components or not.  E.g. for &x->y[w->z] we want to call
195    cp_ubsan_maybe_instrument_member_access on *w.z COMPONENT_REF, but
196    not on *x.y.  */
197 
198 static tree
cp_ubsan_check_member_access_r(tree * stmt_p,int * walk_subtrees,void * data)199 cp_ubsan_check_member_access_r (tree *stmt_p, int *walk_subtrees, void *data)
200 {
201   tree stmt = *stmt_p, t;
202   cp_ubsan_check_member_access_data *ucmd
203     = (cp_ubsan_check_member_access_data *) data;
204   switch (TREE_CODE (stmt))
205     {
206     case ADDR_EXPR:
207       t = TREE_OPERAND (stmt, 0);
208       while ((TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t))
209 	     && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
210 	t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
211       if (handled_component_p (t))
212 	{
213 	  *walk_subtrees = 0;
214 	  ucmd->is_addr = true;
215 	  cp_walk_tree (&t, cp_ubsan_check_member_access_r,
216 			data, ucmd->pset);
217 	  ucmd->is_addr = false;
218 	}
219       break;
220     case MEM_REF:
221     case INDIRECT_REF:
222       t = TREE_OPERAND (stmt, 0);
223       if (TREE_CODE (t) == ADDR_EXPR)
224 	{
225 	  *walk_subtrees = 0;
226 	  t = TREE_OPERAND (t, 0);
227 	  cp_walk_tree (&t, cp_ubsan_check_member_access_r, data, ucmd->pset);
228 	}
229       break;
230     case COMPONENT_REF:
231       if (!ucmd->is_addr && cp_ubsan_maybe_instrument_member_access (stmt, ucmd))
232 	{
233 	  *walk_subtrees = 0;
234 	  break;
235 	}
236       /* FALLTHRU */
237     default:
238       if (ucmd->is_addr && handled_component_p (stmt))
239 	{
240 	  int i, len = TREE_OPERAND_LENGTH (stmt);
241 	  *walk_subtrees = 0;
242 	  if (!handled_component_p (TREE_OPERAND (stmt, 0)))
243 	    ucmd->is_addr = false;
244 	  for (i = 0; i < len; i++)
245 	    {
246 	      cp_walk_tree (&TREE_OPERAND (stmt, i),
247 			    cp_ubsan_check_member_access_r, data, ucmd->pset);
248 	      ucmd->is_addr = false;
249 	    }
250 	  ucmd->is_addr = true;
251 	}
252       break;
253     }
254   return NULL_TREE;
255 }
256 
257 /* Instrument all member accesses inside GENERIC *T_P.  */
258 
259 void
cp_ubsan_instrument_member_accesses(tree * t_p)260 cp_ubsan_instrument_member_accesses (tree *t_p)
261 {
262   if (cp_ubsan_instrument_vptr_p (NULL_TREE))
263     {
264       hash_set<tree> pset;
265       cp_ubsan_check_member_access_data ucmd;
266       ucmd.pset = &pset;
267       ucmd.is_addr = false;
268       cp_walk_tree (t_p, cp_ubsan_check_member_access_r, &ucmd, &pset);
269     }
270 }
271 
272 /* Instrument downcast.  */
273 
274 tree
cp_ubsan_maybe_instrument_downcast(location_t loc,tree type,tree intype,tree op)275 cp_ubsan_maybe_instrument_downcast (location_t loc, tree type,
276 				    tree intype, tree op)
277 {
278   if (!INDIRECT_TYPE_P (type)
279       || !INDIRECT_TYPE_P (intype)
280       || !INDIRECT_TYPE_P (TREE_TYPE (op))
281       || !CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (op)))
282       || !is_properly_derived_from (TREE_TYPE (type), TREE_TYPE (intype)))
283     return NULL_TREE;
284 
285   return cp_ubsan_maybe_instrument_vptr (loc, op, TREE_TYPE (type), true,
286 					 TYPE_PTR_P (type)
287 					 ? UBSAN_DOWNCAST_POINTER
288 					 : UBSAN_DOWNCAST_REFERENCE);
289 }
290 
291 /* Instrument cast to virtual base.  */
292 
293 tree
cp_ubsan_maybe_instrument_cast_to_vbase(location_t loc,tree type,tree op)294 cp_ubsan_maybe_instrument_cast_to_vbase (location_t loc, tree type, tree op)
295 {
296   return cp_ubsan_maybe_instrument_vptr (loc, op, type, true,
297 					 UBSAN_CAST_TO_VBASE);
298 }
299 
300 /* Called from initialize_vtbl_ptrs via dfs_walk.  BINFO is the base
301    which we want to initialize the vtable pointer for, DATA is
302    TREE_LIST whose TREE_VALUE is the this ptr expression.  */
303 
304 static tree
cp_ubsan_dfs_initialize_vtbl_ptrs(tree binfo,void * data)305 cp_ubsan_dfs_initialize_vtbl_ptrs (tree binfo, void *data)
306 {
307   if (!TYPE_CONTAINS_VPTR_P (BINFO_TYPE (binfo)))
308     return dfs_skip_bases;
309 
310   if (!BINFO_PRIMARY_P (binfo))
311     {
312       tree base_ptr = TREE_VALUE ((tree) data);
313 
314       base_ptr = build_base_path (PLUS_EXPR, base_ptr, binfo, /*nonnull=*/1,
315 				  tf_warning_or_error);
316 
317       /* Compute the location of the vtpr.  */
318       tree vtbl_ptr
319 	= build_vfield_ref (cp_build_fold_indirect_ref (base_ptr),
320 			    TREE_TYPE (binfo));
321       gcc_assert (vtbl_ptr != error_mark_node);
322 
323       /* Assign NULL to the vptr.  */
324       tree vtbl = build_zero_cst (TREE_TYPE (vtbl_ptr));
325       tree stmt = cp_build_modify_expr (input_location, vtbl_ptr, NOP_EXPR,
326 					vtbl, tf_warning_or_error);
327       if (vptr_via_virtual_p (binfo))
328 	/* If this vptr comes from a virtual base of the complete object, only
329 	   clear it if we're in charge of virtual bases.  */
330 	stmt = build_if_in_charge (stmt);
331       finish_expr_stmt (stmt);
332     }
333 
334   return NULL_TREE;
335 }
336 
337 /* Initialize all the vtable pointers in the object pointed to by
338    ADDR to NULL, so that we catch invalid calls to methods before
339    mem-initializers are completed.  */
340 
341 void
cp_ubsan_maybe_initialize_vtbl_ptrs(tree addr)342 cp_ubsan_maybe_initialize_vtbl_ptrs (tree addr)
343 {
344   if (!cp_ubsan_instrument_vptr_p (NULL_TREE))
345     return;
346 
347   tree type = TREE_TYPE (TREE_TYPE (addr));
348   tree list = build_tree_list (type, addr);
349   /* We cannot rely on the vtable being set up.  We have to indirect via the
350      vtt_parm.  */
351   int save_in_base_initializer = in_base_initializer;
352   in_base_initializer = 1;
353 
354   /* Walk through the hierarchy, initializing the vptr in each base
355      class to NULL.  */
356   dfs_walk_once (TYPE_BINFO (type), cp_ubsan_dfs_initialize_vtbl_ptrs,
357 		 NULL, list);
358 
359   in_base_initializer = save_in_base_initializer;
360 }
361