xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cp/cp-gimplify.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2019 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 
39 /* Forward declarations.  */
40 
41 static tree cp_genericize_r (tree *, int *, void *);
42 static tree cp_fold_r (tree *, int *, void *);
43 static void cp_genericize_tree (tree*, bool);
44 static tree cp_fold (tree);
45 
46 /* Local declarations.  */
47 
48 enum bc_t { bc_break = 0, bc_continue = 1 };
49 
50 /* Stack of labels which are targets for "break" or "continue",
51    linked through TREE_CHAIN.  */
52 static tree bc_label[2];
53 
54 /* Begin a scope which can be exited by a break or continue statement.  BC
55    indicates which.
56 
57    Just creates a label with location LOCATION and pushes it into the current
58    context.  */
59 
60 static tree
61 begin_bc_block (enum bc_t bc, location_t location)
62 {
63   tree label = create_artificial_label (location);
64   DECL_CHAIN (label) = bc_label[bc];
65   bc_label[bc] = label;
66   if (bc == bc_break)
67     LABEL_DECL_BREAK (label) = true;
68   else
69     LABEL_DECL_CONTINUE (label) = true;
70   return label;
71 }
72 
73 /* Finish a scope which can be exited by a break or continue statement.
74    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
75    an expression for the contents of the scope.
76 
77    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
78    BLOCK.  Otherwise, just forget the label.  */
79 
80 static void
81 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 {
83   gcc_assert (label == bc_label[bc]);
84 
85   if (TREE_USED (label))
86     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 			      block);
88 
89   bc_label[bc] = DECL_CHAIN (label);
90   DECL_CHAIN (label) = NULL_TREE;
91 }
92 
93 /* Get the LABEL_EXPR to represent a break or continue statement
94    in the current block scope.  BC indicates which.  */
95 
96 static tree
97 get_bc_label (enum bc_t bc)
98 {
99   tree label = bc_label[bc];
100 
101   /* Mark the label used for finish_bc_block.  */
102   TREE_USED (label) = 1;
103   return label;
104 }
105 
106 /* Genericize a TRY_BLOCK.  */
107 
108 static void
109 genericize_try_block (tree *stmt_p)
110 {
111   tree body = TRY_STMTS (*stmt_p);
112   tree cleanup = TRY_HANDLERS (*stmt_p);
113 
114   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 }
116 
117 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
118 
119 static void
120 genericize_catch_block (tree *stmt_p)
121 {
122   tree type = HANDLER_TYPE (*stmt_p);
123   tree body = HANDLER_BODY (*stmt_p);
124 
125   /* FIXME should the caught type go in TREE_TYPE?  */
126   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 }
128 
129 /* A terser interface for building a representation of an exception
130    specification.  */
131 
132 static tree
133 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 {
135   tree t;
136 
137   /* FIXME should the allowed types go in TREE_TYPE?  */
138   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
139   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140 
141   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
142   append_to_statement_list (body, &TREE_OPERAND (t, 0));
143 
144   return t;
145 }
146 
147 /* Genericize an EH_SPEC_BLOCK by converting it to a
148    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
149 
150 static void
151 genericize_eh_spec_block (tree *stmt_p)
152 {
153   tree body = EH_SPEC_STMTS (*stmt_p);
154   tree allowed = EH_SPEC_RAISES (*stmt_p);
155   tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156 
157   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
158   TREE_NO_WARNING (*stmt_p) = true;
159   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
160 }
161 
162 /* Return the first non-compound statement in STMT.  */
163 
164 tree
165 first_stmt (tree stmt)
166 {
167   switch (TREE_CODE (stmt))
168     {
169     case STATEMENT_LIST:
170       if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
171 	return first_stmt (p->stmt);
172       return void_node;
173 
174     case BIND_EXPR:
175       return first_stmt (BIND_EXPR_BODY (stmt));
176 
177     default:
178       return stmt;
179     }
180 }
181 
182 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
183 
184 static void
185 genericize_if_stmt (tree *stmt_p)
186 {
187   tree stmt, cond, then_, else_;
188   location_t locus = EXPR_LOCATION (*stmt_p);
189 
190   stmt = *stmt_p;
191   cond = IF_COND (stmt);
192   then_ = THEN_CLAUSE (stmt);
193   else_ = ELSE_CLAUSE (stmt);
194 
195   if (then_ && else_)
196     {
197       tree ft = first_stmt (then_);
198       tree fe = first_stmt (else_);
199       br_predictor pr;
200       if (TREE_CODE (ft) == PREDICT_EXPR
201 	  && TREE_CODE (fe) == PREDICT_EXPR
202 	  && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
203 	  && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
204 	{
205 	  gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
206 	  richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
207 	  warning_at (&richloc, OPT_Wattributes,
208 		      "both branches of %<if%> statement marked as %qs",
209 		      pr == PRED_HOT_LABEL ? "likely" : "unlikely");
210 	}
211     }
212 
213   if (!then_)
214     then_ = build_empty_stmt (locus);
215   if (!else_)
216     else_ = build_empty_stmt (locus);
217 
218   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
219     stmt = then_;
220   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
221     stmt = else_;
222   else
223     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
224   if (!EXPR_HAS_LOCATION (stmt))
225     protected_set_expr_location (stmt, locus);
226   *stmt_p = stmt;
227 }
228 
229 /* Build a generic representation of one of the C loop forms.  COND is the
230    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
231    controlled by the loop.  INCR is the increment expression of a for-loop,
232    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
233    evaluated before the loop body as in while and for loops, or after the
234    loop body as in do-while loops.  */
235 
236 static void
237 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
238 		    tree incr, bool cond_is_first, int *walk_subtrees,
239 		    void *data)
240 {
241   tree blab, clab;
242   tree exit = NULL;
243   tree stmt_list = NULL;
244   tree debug_begin = NULL;
245 
246   if (EXPR_LOCATION (incr) == UNKNOWN_LOCATION)
247     protected_set_expr_location (incr, start_locus);
248 
249   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
250   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
251 
252   blab = begin_bc_block (bc_break, start_locus);
253   clab = begin_bc_block (bc_continue, start_locus);
254 
255   cp_walk_tree (&body, cp_genericize_r, data, NULL);
256   *walk_subtrees = 0;
257 
258   if (MAY_HAVE_DEBUG_MARKER_STMTS
259       && (!cond || !integer_zerop (cond)))
260     {
261       debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
262       SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
263     }
264 
265   if (cond && TREE_CODE (cond) != INTEGER_CST)
266     {
267       /* If COND is constant, don't bother building an exit.  If it's false,
268 	 we won't build a loop.  If it's true, any exits are in the body.  */
269       location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
270       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
271 			 get_bc_label (bc_break));
272       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
273 			      build_empty_stmt (cloc), exit);
274     }
275 
276   if (exit && cond_is_first)
277     {
278       append_to_statement_list (debug_begin, &stmt_list);
279       debug_begin = NULL_TREE;
280       append_to_statement_list (exit, &stmt_list);
281     }
282   append_to_statement_list (body, &stmt_list);
283   finish_bc_block (&stmt_list, bc_continue, clab);
284   if (incr)
285     {
286       if (MAY_HAVE_DEBUG_MARKER_STMTS)
287 	{
288 	  tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
289 	  SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
290 	  append_to_statement_list (d, &stmt_list);
291 	}
292       append_to_statement_list (incr, &stmt_list);
293     }
294   append_to_statement_list (debug_begin, &stmt_list);
295   if (exit && !cond_is_first)
296     append_to_statement_list (exit, &stmt_list);
297 
298   if (!stmt_list)
299     stmt_list = build_empty_stmt (start_locus);
300 
301   tree loop;
302   if (cond && integer_zerop (cond))
303     {
304       if (cond_is_first)
305 	loop = fold_build3_loc (start_locus, COND_EXPR,
306 				void_type_node, cond, stmt_list,
307 				build_empty_stmt (start_locus));
308       else
309 	loop = stmt_list;
310     }
311   else
312     {
313       location_t loc = start_locus;
314       if (!cond || integer_nonzerop (cond))
315 	loc = EXPR_LOCATION (expr_first (body));
316       if (loc == UNKNOWN_LOCATION)
317 	loc = start_locus;
318       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
319     }
320 
321   stmt_list = NULL;
322   append_to_statement_list (loop, &stmt_list);
323   finish_bc_block (&stmt_list, bc_break, blab);
324   if (!stmt_list)
325     stmt_list = build_empty_stmt (start_locus);
326 
327   *stmt_p = stmt_list;
328 }
329 
330 /* Genericize a FOR_STMT node *STMT_P.  */
331 
332 static void
333 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
334 {
335   tree stmt = *stmt_p;
336   tree expr = NULL;
337   tree loop;
338   tree init = FOR_INIT_STMT (stmt);
339 
340   if (init)
341     {
342       cp_walk_tree (&init, cp_genericize_r, data, NULL);
343       append_to_statement_list (init, &expr);
344     }
345 
346   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
347 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
348   append_to_statement_list (loop, &expr);
349   if (expr == NULL_TREE)
350     expr = loop;
351   *stmt_p = expr;
352 }
353 
354 /* Genericize a WHILE_STMT node *STMT_P.  */
355 
356 static void
357 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
358 {
359   tree stmt = *stmt_p;
360   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
361 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
362 }
363 
364 /* Genericize a DO_STMT node *STMT_P.  */
365 
366 static void
367 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
368 {
369   tree stmt = *stmt_p;
370   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
371 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
372 }
373 
374 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
375 
376 static void
377 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
378 {
379   tree stmt = *stmt_p;
380   tree break_block, body, cond, type;
381   location_t stmt_locus = EXPR_LOCATION (stmt);
382 
383   body = SWITCH_STMT_BODY (stmt);
384   if (!body)
385     body = build_empty_stmt (stmt_locus);
386   cond = SWITCH_STMT_COND (stmt);
387   type = SWITCH_STMT_TYPE (stmt);
388 
389   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
390 
391   break_block = begin_bc_block (bc_break, stmt_locus);
392 
393   cp_walk_tree (&body, cp_genericize_r, data, NULL);
394   cp_walk_tree (&type, cp_genericize_r, data, NULL);
395   *walk_subtrees = 0;
396 
397   if (TREE_USED (break_block))
398     SWITCH_BREAK_LABEL_P (break_block) = 1;
399   finish_bc_block (&body, bc_break, break_block);
400   *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
401   SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
402   gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
403 		       || !TREE_USED (break_block));
404 }
405 
406 /* Genericize a CONTINUE_STMT node *STMT_P.  */
407 
408 static void
409 genericize_continue_stmt (tree *stmt_p)
410 {
411   tree stmt_list = NULL;
412   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
413   tree label = get_bc_label (bc_continue);
414   location_t location = EXPR_LOCATION (*stmt_p);
415   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
416   append_to_statement_list_force (pred, &stmt_list);
417   append_to_statement_list (jump, &stmt_list);
418   *stmt_p = stmt_list;
419 }
420 
421 /* Genericize a BREAK_STMT node *STMT_P.  */
422 
423 static void
424 genericize_break_stmt (tree *stmt_p)
425 {
426   tree label = get_bc_label (bc_break);
427   location_t location = EXPR_LOCATION (*stmt_p);
428   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
429 }
430 
431 /* Genericize a OMP_FOR node *STMT_P.  */
432 
433 static void
434 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
435 {
436   tree stmt = *stmt_p;
437   location_t locus = EXPR_LOCATION (stmt);
438   tree clab = begin_bc_block (bc_continue, locus);
439 
440   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
441   if (TREE_CODE (stmt) != OMP_TASKLOOP)
442     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
443   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
444   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
445   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
446   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
447   *walk_subtrees = 0;
448 
449   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
450 }
451 
452 /* Hook into the middle of gimplifying an OMP_FOR node.  */
453 
454 static enum gimplify_status
455 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
456 {
457   tree for_stmt = *expr_p;
458   gimple_seq seq = NULL;
459 
460   /* Protect ourselves from recursion.  */
461   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
462     return GS_UNHANDLED;
463   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
464 
465   gimplify_and_add (for_stmt, &seq);
466   gimple_seq_add_seq (pre_p, seq);
467 
468   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
469 
470   return GS_ALL_DONE;
471 }
472 
473 /*  Gimplify an EXPR_STMT node.  */
474 
475 static void
476 gimplify_expr_stmt (tree *stmt_p)
477 {
478   tree stmt = EXPR_STMT_EXPR (*stmt_p);
479 
480   if (stmt == error_mark_node)
481     stmt = NULL;
482 
483   /* Gimplification of a statement expression will nullify the
484      statement if all its side effects are moved to *PRE_P and *POST_P.
485 
486      In this case we will not want to emit the gimplified statement.
487      However, we may still want to emit a warning, so we do that before
488      gimplification.  */
489   if (stmt && warn_unused_value)
490     {
491       if (!TREE_SIDE_EFFECTS (stmt))
492 	{
493 	  if (!IS_EMPTY_STMT (stmt)
494 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
495 	      && !TREE_NO_WARNING (stmt))
496 	    warning (OPT_Wunused_value, "statement with no effect");
497 	}
498       else
499 	warn_if_unused_value (stmt, input_location);
500     }
501 
502   if (stmt == NULL_TREE)
503     stmt = alloc_stmt_list ();
504 
505   *stmt_p = stmt;
506 }
507 
508 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
509 
510 static void
511 cp_gimplify_init_expr (tree *expr_p)
512 {
513   tree from = TREE_OPERAND (*expr_p, 1);
514   tree to = TREE_OPERAND (*expr_p, 0);
515   tree t;
516 
517   /* What about code that pulls out the temp and uses it elsewhere?  I
518      think that such code never uses the TARGET_EXPR as an initializer.  If
519      I'm wrong, we'll abort because the temp won't have any RTL.  In that
520      case, I guess we'll need to replace references somehow.  */
521   if (TREE_CODE (from) == TARGET_EXPR)
522     from = TARGET_EXPR_INITIAL (from);
523 
524   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
525      inside the TARGET_EXPR.  */
526   for (t = from; t; )
527     {
528       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
529 
530       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
531 	 replace the slot operand with our target.
532 
533 	 Should we add a target parm to gimplify_expr instead?  No, as in this
534 	 case we want to replace the INIT_EXPR.  */
535       if (TREE_CODE (sub) == AGGR_INIT_EXPR
536 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
537 	{
538 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
539 	    AGGR_INIT_EXPR_SLOT (sub) = to;
540 	  else
541 	    VEC_INIT_EXPR_SLOT (sub) = to;
542 	  *expr_p = from;
543 
544 	  /* The initialization is now a side-effect, so the container can
545 	     become void.  */
546 	  if (from != sub)
547 	    TREE_TYPE (from) = void_type_node;
548 	}
549 
550       /* Handle aggregate NSDMI.  */
551       replace_placeholders (sub, to);
552 
553       if (t == sub)
554 	break;
555       else
556 	t = TREE_OPERAND (t, 1);
557     }
558 
559 }
560 
561 /* Gimplify a MUST_NOT_THROW_EXPR.  */
562 
563 static enum gimplify_status
564 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
565 {
566   tree stmt = *expr_p;
567   tree temp = voidify_wrapper_expr (stmt, NULL);
568   tree body = TREE_OPERAND (stmt, 0);
569   gimple_seq try_ = NULL;
570   gimple_seq catch_ = NULL;
571   gimple *mnt;
572 
573   gimplify_and_add (body, &try_);
574   mnt = gimple_build_eh_must_not_throw (terminate_fn);
575   gimple_seq_add_stmt_without_update (&catch_, mnt);
576   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
577 
578   gimple_seq_add_stmt_without_update (pre_p, mnt);
579   if (temp)
580     {
581       *expr_p = temp;
582       return GS_OK;
583     }
584 
585   *expr_p = NULL;
586   return GS_ALL_DONE;
587 }
588 
589 /* Return TRUE if an operand (OP) of a given TYPE being copied is
590    really just an empty class copy.
591 
592    Check that the operand has a simple form so that TARGET_EXPRs and
593    non-empty CONSTRUCTORs get reduced properly, and we leave the
594    return slot optimization alone because it isn't a copy.  */
595 
596 static bool
597 simple_empty_class_p (tree type, tree op)
598 {
599   return
600     ((TREE_CODE (op) == COMPOUND_EXPR
601       && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
602      || TREE_CODE (op) == EMPTY_CLASS_EXPR
603      || is_gimple_lvalue (op)
604      || INDIRECT_REF_P (op)
605      || (TREE_CODE (op) == CONSTRUCTOR
606 	 && CONSTRUCTOR_NELTS (op) == 0
607 	 && !TREE_CLOBBER_P (op))
608      || (TREE_CODE (op) == CALL_EXPR
609 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
610     && is_really_empty_class (type, /*ignore_vptr*/true);
611 }
612 
613 /* Returns true if evaluating E as an lvalue has side-effects;
614    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
615    have side-effects until there is a read or write through it.  */
616 
617 static bool
618 lvalue_has_side_effects (tree e)
619 {
620   if (!TREE_SIDE_EFFECTS (e))
621     return false;
622   while (handled_component_p (e))
623     {
624       if (TREE_CODE (e) == ARRAY_REF
625 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
626 	return true;
627       e = TREE_OPERAND (e, 0);
628     }
629   if (DECL_P (e))
630     /* Just naming a variable has no side-effects.  */
631     return false;
632   else if (INDIRECT_REF_P (e))
633     /* Similarly, indirection has no side-effects.  */
634     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
635   else
636     /* For anything else, trust TREE_SIDE_EFFECTS.  */
637     return TREE_SIDE_EFFECTS (e);
638 }
639 
640 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
641 
642 int
643 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
644 {
645   int saved_stmts_are_full_exprs_p = 0;
646   location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
647   enum tree_code code = TREE_CODE (*expr_p);
648   enum gimplify_status ret;
649 
650   if (STATEMENT_CODE_P (code))
651     {
652       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
653       current_stmt_tree ()->stmts_are_full_exprs_p
654 	= STMT_IS_FULL_EXPR_P (*expr_p);
655     }
656 
657   switch (code)
658     {
659     case AGGR_INIT_EXPR:
660       simplify_aggr_init_expr (expr_p);
661       ret = GS_OK;
662       break;
663 
664     case VEC_INIT_EXPR:
665       {
666 	location_t loc = input_location;
667 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
668 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
669 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
670 	input_location = EXPR_LOCATION (*expr_p);
671 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
672 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
673 				  from_array,
674 				  tf_warning_or_error);
675 	hash_set<tree> pset;
676 	cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
677 	cp_genericize_tree (expr_p, false);
678 	ret = GS_OK;
679 	input_location = loc;
680       }
681       break;
682 
683     case THROW_EXPR:
684       /* FIXME communicate throw type to back end, probably by moving
685 	 THROW_EXPR into ../tree.def.  */
686       *expr_p = TREE_OPERAND (*expr_p, 0);
687       ret = GS_OK;
688       break;
689 
690     case MUST_NOT_THROW_EXPR:
691       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
692       break;
693 
694       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
695 	 LHS of an assignment might also be involved in the RHS, as in bug
696 	 25979.  */
697     case INIT_EXPR:
698       cp_gimplify_init_expr (expr_p);
699       if (TREE_CODE (*expr_p) != INIT_EXPR)
700 	return GS_OK;
701       /* Fall through.  */
702     case MODIFY_EXPR:
703     modify_expr_case:
704       {
705 	/* If the back end isn't clever enough to know that the lhs and rhs
706 	   types are the same, add an explicit conversion.  */
707 	tree op0 = TREE_OPERAND (*expr_p, 0);
708 	tree op1 = TREE_OPERAND (*expr_p, 1);
709 
710 	if (!error_operand_p (op0)
711 	    && !error_operand_p (op1)
712 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
713 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
714 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
715 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
716 					      TREE_TYPE (op0), op1);
717 
718 	else if (simple_empty_class_p (TREE_TYPE (op0), op1))
719 	  {
720 	    /* Remove any copies of empty classes.  Also drop volatile
721 	       variables on the RHS to avoid infinite recursion from
722 	       gimplify_expr trying to load the value.  */
723 	    if (TREE_SIDE_EFFECTS (op1))
724 	      {
725 		if (TREE_THIS_VOLATILE (op1)
726 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
727 		  op1 = build_fold_addr_expr (op1);
728 
729 		gimplify_and_add (op1, pre_p);
730 	      }
731 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
732 			   is_gimple_lvalue, fb_lvalue);
733 	    *expr_p = TREE_OPERAND (*expr_p, 0);
734 	  }
735 	/* P0145 says that the RHS is sequenced before the LHS.
736 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
737 	   isn't quite strong enough in two cases:
738 
739 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
740 	   mean it's evaluated after the LHS.
741 
742 	   2) the value calculation of the RHS is also sequenced before the
743 	   LHS, so for scalar assignment we need to preevaluate if the
744 	   RHS could be affected by LHS side-effects even if it has no
745 	   side-effects of its own.  We don't need this for classes because
746 	   class assignment takes its RHS by reference.  */
747        else if (flag_strong_eval_order > 1
748                 && TREE_CODE (*expr_p) == MODIFY_EXPR
749                 && lvalue_has_side_effects (op0)
750 		&& (TREE_CODE (op1) == CALL_EXPR
751 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
752 			&& !TREE_CONSTANT (op1))))
753 	 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
754       }
755       ret = GS_OK;
756       break;
757 
758     case EMPTY_CLASS_EXPR:
759       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
760       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
761       ret = GS_OK;
762       break;
763 
764     case BASELINK:
765       *expr_p = BASELINK_FUNCTIONS (*expr_p);
766       ret = GS_OK;
767       break;
768 
769     case TRY_BLOCK:
770       genericize_try_block (expr_p);
771       ret = GS_OK;
772       break;
773 
774     case HANDLER:
775       genericize_catch_block (expr_p);
776       ret = GS_OK;
777       break;
778 
779     case EH_SPEC_BLOCK:
780       genericize_eh_spec_block (expr_p);
781       ret = GS_OK;
782       break;
783 
784     case USING_STMT:
785       gcc_unreachable ();
786 
787     case FOR_STMT:
788     case WHILE_STMT:
789     case DO_STMT:
790     case SWITCH_STMT:
791     case CONTINUE_STMT:
792     case BREAK_STMT:
793       gcc_unreachable ();
794 
795     case OMP_FOR:
796     case OMP_SIMD:
797     case OMP_DISTRIBUTE:
798     case OMP_TASKLOOP:
799       ret = cp_gimplify_omp_for (expr_p, pre_p);
800       break;
801 
802     case EXPR_STMT:
803       gimplify_expr_stmt (expr_p);
804       ret = GS_OK;
805       break;
806 
807     case UNARY_PLUS_EXPR:
808       {
809 	tree arg = TREE_OPERAND (*expr_p, 0);
810 	tree type = TREE_TYPE (*expr_p);
811 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
812 					    : arg;
813 	ret = GS_OK;
814       }
815       break;
816 
817     case CALL_EXPR:
818       ret = GS_OK;
819       if (flag_strong_eval_order == 2
820 	  && CALL_EXPR_FN (*expr_p)
821 	  && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
822 	{
823 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
824 	  enum gimplify_status t
825 	    = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
826 			     is_gimple_call_addr, fb_rvalue);
827 	  if (t == GS_ERROR)
828 	    ret = GS_ERROR;
829 	  else if (is_gimple_variable (CALL_EXPR_FN (*expr_p))
830 		   && TREE_CODE (CALL_EXPR_FN (*expr_p)) != SSA_NAME)
831 	    CALL_EXPR_FN (*expr_p)
832 	      = get_initialized_tmp_var (CALL_EXPR_FN (*expr_p), pre_p,
833 					 NULL);
834 	  /* GIMPLE considers most pointer conversion useless, but for
835 	     calls we actually care about the exact function pointer type.  */
836 	  if (t != GS_ERROR && TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
837 	    CALL_EXPR_FN (*expr_p)
838 	      = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
839 	}
840       if (!CALL_EXPR_FN (*expr_p))
841 	/* Internal function call.  */;
842       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
843 	{
844 	  /* This is a call to a (compound) assignment operator that used
845 	     the operator syntax; gimplify the RHS first.  */
846 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
847 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
848 	  enum gimplify_status t
849 	    = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
850 	  if (t == GS_ERROR)
851 	    ret = GS_ERROR;
852 	}
853       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
854 	{
855 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
856 	     with __builtin_va_arg_pack().  */
857 	  int nargs = call_expr_nargs (*expr_p) - 1;
858 	  for (int i = 0; i < nargs; ++i)
859 	    {
860 	      enum gimplify_status t
861 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
862 	      if (t == GS_ERROR)
863 		ret = GS_ERROR;
864 	    }
865 	}
866       else if (flag_strong_eval_order
867 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
868 	{
869 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
870 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
871 	  if (INDIRECT_TYPE_P (fntype))
872 	    fntype = TREE_TYPE (fntype);
873 	  if (TREE_CODE (fntype) == METHOD_TYPE)
874 	    {
875 	      enum gimplify_status t
876 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
877 	      if (t == GS_ERROR)
878 		ret = GS_ERROR;
879 	    }
880 	}
881       if (ret != GS_ERROR)
882 	{
883 	  tree decl = cp_get_callee_fndecl_nofold (*expr_p);
884 	  if (decl
885 	      && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
886 				  BUILT_IN_FRONTEND))
887 	    *expr_p = boolean_false_node;
888 	}
889       break;
890 
891     case RETURN_EXPR:
892       if (TREE_OPERAND (*expr_p, 0)
893 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
894 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
895 	{
896 	  expr_p = &TREE_OPERAND (*expr_p, 0);
897 	  code = TREE_CODE (*expr_p);
898 	  /* Avoid going through the INIT_EXPR case, which can
899 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
900 	  goto modify_expr_case;
901 	}
902       /* Fall through.  */
903 
904     default:
905       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
906       break;
907     }
908 
909   /* Restore saved state.  */
910   if (STATEMENT_CODE_P (code))
911     current_stmt_tree ()->stmts_are_full_exprs_p
912       = saved_stmts_are_full_exprs_p;
913 
914   return ret;
915 }
916 
917 static inline bool
918 is_invisiref_parm (const_tree t)
919 {
920   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
921 	  && DECL_BY_REFERENCE (t));
922 }
923 
924 /* Return true if the uid in both int tree maps are equal.  */
925 
926 bool
927 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
928 {
929   return (a->uid == b->uid);
930 }
931 
932 /* Hash a UID in a cxx_int_tree_map.  */
933 
934 unsigned int
935 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
936 {
937   return item->uid;
938 }
939 
940 /* A stable comparison routine for use with splay trees and DECLs.  */
941 
942 static int
943 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
944 {
945   tree a = (tree) xa;
946   tree b = (tree) xb;
947 
948   return DECL_UID (a) - DECL_UID (b);
949 }
950 
951 /* OpenMP context during genericization.  */
952 
953 struct cp_genericize_omp_taskreg
954 {
955   bool is_parallel;
956   bool default_shared;
957   struct cp_genericize_omp_taskreg *outer;
958   splay_tree variables;
959 };
960 
961 /* Return true if genericization should try to determine if
962    DECL is firstprivate or shared within task regions.  */
963 
964 static bool
965 omp_var_to_track (tree decl)
966 {
967   tree type = TREE_TYPE (decl);
968   if (is_invisiref_parm (decl))
969     type = TREE_TYPE (type);
970   else if (TYPE_REF_P (type))
971     type = TREE_TYPE (type);
972   while (TREE_CODE (type) == ARRAY_TYPE)
973     type = TREE_TYPE (type);
974   if (type == error_mark_node || !CLASS_TYPE_P (type))
975     return false;
976   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
977     return false;
978   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
979     return false;
980   return true;
981 }
982 
983 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
984 
985 static void
986 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
987 {
988   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
989 					 (splay_tree_key) decl);
990   if (n == NULL)
991     {
992       int flags = OMP_CLAUSE_DEFAULT_SHARED;
993       if (omp_ctx->outer)
994 	omp_cxx_notice_variable (omp_ctx->outer, decl);
995       if (!omp_ctx->default_shared)
996 	{
997 	  struct cp_genericize_omp_taskreg *octx;
998 
999 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
1000 	    {
1001 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1002 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1003 		{
1004 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1005 		  break;
1006 		}
1007 	      if (octx->is_parallel)
1008 		break;
1009 	    }
1010 	  if (octx == NULL
1011 	      && (TREE_CODE (decl) == PARM_DECL
1012 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1013 		      && DECL_CONTEXT (decl) == current_function_decl)))
1014 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1015 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1016 	    {
1017 	      /* DECL is implicitly determined firstprivate in
1018 		 the current task construct.  Ensure copy ctor and
1019 		 dtor are instantiated, because during gimplification
1020 		 it will be already too late.  */
1021 	      tree type = TREE_TYPE (decl);
1022 	      if (is_invisiref_parm (decl))
1023 		type = TREE_TYPE (type);
1024 	      else if (TYPE_REF_P (type))
1025 		type = TREE_TYPE (type);
1026 	      while (TREE_CODE (type) == ARRAY_TYPE)
1027 		type = TREE_TYPE (type);
1028 	      get_copy_ctor (type, tf_none);
1029 	      get_dtor (type, tf_none);
1030 	    }
1031 	}
1032       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1033     }
1034 }
1035 
1036 /* Genericization context.  */
1037 
1038 struct cp_genericize_data
1039 {
1040   hash_set<tree> *p_set;
1041   vec<tree> bind_expr_stack;
1042   struct cp_genericize_omp_taskreg *omp_ctx;
1043   tree try_block;
1044   bool no_sanitize_p;
1045   bool handle_invisiref_parm_p;
1046 };
1047 
1048 /* Perform any pre-gimplification folding of C++ front end trees to
1049    GENERIC.
1050    Note:  The folding of none-omp cases is something to move into
1051      the middle-end.  As for now we have most foldings only on GENERIC
1052      in fold-const, we need to perform this before transformation to
1053      GIMPLE-form.  */
1054 
1055 static tree
1056 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1057 {
1058   tree stmt;
1059   enum tree_code code;
1060 
1061   *stmt_p = stmt = cp_fold (*stmt_p);
1062 
1063   if (((hash_set<tree> *) data)->add (stmt))
1064     {
1065       /* Don't walk subtrees of stmts we've already walked once, otherwise
1066 	 we can have exponential complexity with e.g. lots of nested
1067 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
1068 	 always the same tree, which the first time cp_fold_r has been
1069 	 called on it had the subtrees walked.  */
1070       *walk_subtrees = 0;
1071       return NULL;
1072     }
1073 
1074   code = TREE_CODE (stmt);
1075   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1076       || code == OMP_TASKLOOP || code == OACC_LOOP)
1077     {
1078       tree x;
1079       int i, n;
1080 
1081       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1082       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1083       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1084       x = OMP_FOR_COND (stmt);
1085       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1086 	{
1087 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1088 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1089 	}
1090       else if (x && TREE_CODE (x) == TREE_VEC)
1091 	{
1092 	  n = TREE_VEC_LENGTH (x);
1093 	  for (i = 0; i < n; i++)
1094 	    {
1095 	      tree o = TREE_VEC_ELT (x, i);
1096 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1097 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1098 	    }
1099 	}
1100       x = OMP_FOR_INCR (stmt);
1101       if (x && TREE_CODE (x) == TREE_VEC)
1102 	{
1103 	  n = TREE_VEC_LENGTH (x);
1104 	  for (i = 0; i < n; i++)
1105 	    {
1106 	      tree o = TREE_VEC_ELT (x, i);
1107 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1108 		o = TREE_OPERAND (o, 1);
1109 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1110 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1111 		{
1112 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1113 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1114 		}
1115 	    }
1116 	}
1117       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1118       *walk_subtrees = 0;
1119     }
1120 
1121   return NULL;
1122 }
1123 
1124 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1125    apparently that still causes optimization regressions.  */
1126 
1127 void
1128 cp_fold_function (tree fndecl)
1129 {
1130   hash_set<tree> pset;
1131   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1132 }
1133 
1134 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1135    to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1136    the middle-end (c++/88256).  */
1137 
1138 static tree
1139 predeclare_vla (tree expr)
1140 {
1141   tree type = TREE_TYPE (expr);
1142   if (type == error_mark_node)
1143     return expr;
1144 
1145   /* We need to strip pointers for gimplify_type_sizes.  */
1146   tree vla = type;
1147   while (POINTER_TYPE_P (vla))
1148     {
1149       if (TYPE_NAME (vla))
1150 	return expr;
1151       vla = TREE_TYPE (vla);
1152     }
1153   if (TYPE_NAME (vla) || !variably_modified_type_p (vla, NULL_TREE))
1154     return expr;
1155 
1156   tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1157   DECL_ARTIFICIAL (decl) = 1;
1158   TYPE_NAME (vla) = decl;
1159   tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1160   expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1161   return expr;
1162 }
1163 
1164 /* Perform any pre-gimplification lowering of C++ front end trees to
1165    GENERIC.  */
1166 
1167 static tree
1168 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1169 {
1170   tree stmt = *stmt_p;
1171   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1172   hash_set<tree> *p_set = wtd->p_set;
1173 
1174   /* If in an OpenMP context, note var uses.  */
1175   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1176       && (VAR_P (stmt)
1177 	  || TREE_CODE (stmt) == PARM_DECL
1178 	  || TREE_CODE (stmt) == RESULT_DECL)
1179       && omp_var_to_track (stmt))
1180     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1181 
1182   /* Don't dereference parms in a thunk, pass the references through. */
1183   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1184       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1185     {
1186       *walk_subtrees = 0;
1187       return NULL;
1188     }
1189 
1190   /* Dereference invisible reference parms.  */
1191   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1192     {
1193       *stmt_p = convert_from_reference (stmt);
1194       p_set->add (*stmt_p);
1195       *walk_subtrees = 0;
1196       return NULL;
1197     }
1198 
1199   /* Map block scope extern declarations to visible declarations with the
1200      same name and type in outer scopes if any.  */
1201   if (cp_function_chain->extern_decl_map
1202       && VAR_OR_FUNCTION_DECL_P (stmt)
1203       && DECL_EXTERNAL (stmt))
1204     {
1205       struct cxx_int_tree_map *h, in;
1206       in.uid = DECL_UID (stmt);
1207       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1208       if (h)
1209 	{
1210 	  *stmt_p = h->to;
1211 	  TREE_USED (h->to) |= TREE_USED (stmt);
1212 	  *walk_subtrees = 0;
1213 	  return NULL;
1214 	}
1215     }
1216 
1217   if (TREE_CODE (stmt) == INTEGER_CST
1218       && TYPE_REF_P (TREE_TYPE (stmt))
1219       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1220       && !wtd->no_sanitize_p)
1221     {
1222       ubsan_maybe_instrument_reference (stmt_p);
1223       if (*stmt_p != stmt)
1224 	{
1225 	  *walk_subtrees = 0;
1226 	  return NULL_TREE;
1227 	}
1228     }
1229 
1230   /* Other than invisiref parms, don't walk the same tree twice.  */
1231   if (p_set->contains (stmt))
1232     {
1233       *walk_subtrees = 0;
1234       return NULL_TREE;
1235     }
1236 
1237   switch (TREE_CODE (stmt))
1238     {
1239     case ADDR_EXPR:
1240       if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1241 	{
1242 	  /* If in an OpenMP context, note var uses.  */
1243 	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1244 	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1245 	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1246 	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1247 	  *walk_subtrees = 0;
1248 	}
1249       break;
1250 
1251     case RETURN_EXPR:
1252       if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1253 	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1254 	*walk_subtrees = 0;
1255       break;
1256 
1257     case OMP_CLAUSE:
1258       switch (OMP_CLAUSE_CODE (stmt))
1259 	{
1260 	case OMP_CLAUSE_LASTPRIVATE:
1261 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1262 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1263 	    {
1264 	      *walk_subtrees = 0;
1265 	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1266 		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1267 			      cp_genericize_r, data, NULL);
1268 	    }
1269 	  break;
1270 	case OMP_CLAUSE_PRIVATE:
1271 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1272 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1273 	    *walk_subtrees = 0;
1274 	  else if (wtd->omp_ctx != NULL)
1275 	    {
1276 	      /* Private clause doesn't cause any references to the
1277 		 var in outer contexts, avoid calling
1278 		 omp_cxx_notice_variable for it.  */
1279 	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1280 	      wtd->omp_ctx = NULL;
1281 	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1282 			    data, NULL);
1283 	      wtd->omp_ctx = old;
1284 	      *walk_subtrees = 0;
1285 	    }
1286 	  break;
1287 	case OMP_CLAUSE_SHARED:
1288 	case OMP_CLAUSE_FIRSTPRIVATE:
1289 	case OMP_CLAUSE_COPYIN:
1290 	case OMP_CLAUSE_COPYPRIVATE:
1291 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1292 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1293 	    *walk_subtrees = 0;
1294 	  break;
1295 	case OMP_CLAUSE_REDUCTION:
1296 	case OMP_CLAUSE_IN_REDUCTION:
1297 	case OMP_CLAUSE_TASK_REDUCTION:
1298 	  /* Don't dereference an invisiref in reduction clause's
1299 	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1300 	     still needs to be genericized.  */
1301 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1302 	    {
1303 	      *walk_subtrees = 0;
1304 	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1305 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1306 			      cp_genericize_r, data, NULL);
1307 	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1308 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1309 			      cp_genericize_r, data, NULL);
1310 	    }
1311 	  break;
1312 	default:
1313 	  break;
1314 	}
1315       break;
1316 
1317     /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1318        to lower this construct before scanning it, so we need to lower these
1319        before doing anything else.  */
1320     case CLEANUP_STMT:
1321       *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1322 			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1323 						   : TRY_FINALLY_EXPR,
1324 			    void_type_node,
1325 			    CLEANUP_BODY (stmt),
1326 			    CLEANUP_EXPR (stmt));
1327       break;
1328 
1329     case IF_STMT:
1330       genericize_if_stmt (stmt_p);
1331       /* *stmt_p has changed, tail recurse to handle it again.  */
1332       return cp_genericize_r (stmt_p, walk_subtrees, data);
1333 
1334     /* COND_EXPR might have incompatible types in branches if one or both
1335        arms are bitfields.  Fix it up now.  */
1336     case COND_EXPR:
1337       {
1338 	tree type_left
1339 	  = (TREE_OPERAND (stmt, 1)
1340 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1341 	     : NULL_TREE);
1342 	tree type_right
1343 	  = (TREE_OPERAND (stmt, 2)
1344 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1345 	     : NULL_TREE);
1346 	if (type_left
1347 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1348 					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1349 	  {
1350 	    TREE_OPERAND (stmt, 1)
1351 	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1352 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1353 						   type_left));
1354 	  }
1355 	if (type_right
1356 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1357 					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1358 	  {
1359 	    TREE_OPERAND (stmt, 2)
1360 	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1361 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1362 						   type_right));
1363 	  }
1364       }
1365       break;
1366 
1367     case BIND_EXPR:
1368       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1369 	{
1370 	  tree decl;
1371 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1372 	    if (VAR_P (decl)
1373 		&& !DECL_EXTERNAL (decl)
1374 		&& omp_var_to_track (decl))
1375 	      {
1376 		splay_tree_node n
1377 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1378 				       (splay_tree_key) decl);
1379 		if (n == NULL)
1380 		  splay_tree_insert (wtd->omp_ctx->variables,
1381 				     (splay_tree_key) decl,
1382 				     TREE_STATIC (decl)
1383 				     ? OMP_CLAUSE_DEFAULT_SHARED
1384 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1385 	      }
1386 	}
1387       if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1388 	{
1389 	  /* The point here is to not sanitize static initializers.  */
1390 	  bool no_sanitize_p = wtd->no_sanitize_p;
1391 	  wtd->no_sanitize_p = true;
1392 	  for (tree decl = BIND_EXPR_VARS (stmt);
1393 	       decl;
1394 	       decl = DECL_CHAIN (decl))
1395 	    if (VAR_P (decl)
1396 		&& TREE_STATIC (decl)
1397 		&& DECL_INITIAL (decl))
1398 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1399 	  wtd->no_sanitize_p = no_sanitize_p;
1400 	}
1401       wtd->bind_expr_stack.safe_push (stmt);
1402       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1403 		    cp_genericize_r, data, NULL);
1404       wtd->bind_expr_stack.pop ();
1405       break;
1406 
1407     case USING_STMT:
1408       {
1409 	tree block = NULL_TREE;
1410 
1411 	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1412 	   BLOCK, and append an IMPORTED_DECL to its
1413 	   BLOCK_VARS chained list.  */
1414 	if (wtd->bind_expr_stack.exists ())
1415 	  {
1416 	    int i;
1417 	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1418 	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1419 		break;
1420 	  }
1421 	if (block)
1422 	  {
1423 	    tree decl = TREE_OPERAND (stmt, 0);
1424 	    gcc_assert (decl);
1425 
1426 	    if (undeduced_auto_decl (decl))
1427 	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1428 	    else
1429 	      {
1430 		tree using_directive = make_node (IMPORTED_DECL);
1431 		TREE_TYPE (using_directive) = void_type_node;
1432 
1433 		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1434 		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1435 		BLOCK_VARS (block) = using_directive;
1436 	      }
1437 	  }
1438 	/* The USING_STMT won't appear in GENERIC.  */
1439 	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1440 	*walk_subtrees = 0;
1441       }
1442       break;
1443 
1444     case DECL_EXPR:
1445       if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1446 	{
1447 	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1448 	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1449 	  *walk_subtrees = 0;
1450 	}
1451       else
1452 	{
1453 	  tree d = DECL_EXPR_DECL (stmt);
1454 	  if (VAR_P (d))
1455 	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1456 	}
1457       break;
1458 
1459     case OMP_PARALLEL:
1460     case OMP_TASK:
1461     case OMP_TASKLOOP:
1462       {
1463 	struct cp_genericize_omp_taskreg omp_ctx;
1464 	tree c, decl;
1465 	splay_tree_node n;
1466 
1467 	*walk_subtrees = 0;
1468 	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1469 	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1470 	omp_ctx.default_shared = omp_ctx.is_parallel;
1471 	omp_ctx.outer = wtd->omp_ctx;
1472 	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1473 	wtd->omp_ctx = &omp_ctx;
1474 	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1475 	  switch (OMP_CLAUSE_CODE (c))
1476 	    {
1477 	    case OMP_CLAUSE_SHARED:
1478 	    case OMP_CLAUSE_PRIVATE:
1479 	    case OMP_CLAUSE_FIRSTPRIVATE:
1480 	    case OMP_CLAUSE_LASTPRIVATE:
1481 	      decl = OMP_CLAUSE_DECL (c);
1482 	      if (decl == error_mark_node || !omp_var_to_track (decl))
1483 		break;
1484 	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1485 	      if (n != NULL)
1486 		break;
1487 	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1488 				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1489 				 ? OMP_CLAUSE_DEFAULT_SHARED
1490 				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1491 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1492 		omp_cxx_notice_variable (omp_ctx.outer, decl);
1493 	      break;
1494 	    case OMP_CLAUSE_DEFAULT:
1495 	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1496 		omp_ctx.default_shared = true;
1497 	    default:
1498 	      break;
1499 	    }
1500 	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1501 	  genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1502 	else
1503 	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1504 	wtd->omp_ctx = omp_ctx.outer;
1505 	splay_tree_delete (omp_ctx.variables);
1506       }
1507       break;
1508 
1509     case TRY_BLOCK:
1510       {
1511         *walk_subtrees = 0;
1512         tree try_block = wtd->try_block;
1513         wtd->try_block = stmt;
1514         cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1515         wtd->try_block = try_block;
1516         cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1517       }
1518       break;
1519 
1520     case MUST_NOT_THROW_EXPR:
1521       /* MUST_NOT_THROW_COND might be something else with TM.  */
1522       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1523 	{
1524 	  *walk_subtrees = 0;
1525 	  tree try_block = wtd->try_block;
1526 	  wtd->try_block = stmt;
1527 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1528 	  wtd->try_block = try_block;
1529 	}
1530       break;
1531 
1532     case THROW_EXPR:
1533       {
1534 	location_t loc = location_of (stmt);
1535 	if (TREE_NO_WARNING (stmt))
1536 	  /* Never mind.  */;
1537 	else if (wtd->try_block)
1538 	  {
1539 	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1540 	      {
1541 		auto_diagnostic_group d;
1542 		if (warning_at (loc, OPT_Wterminate,
1543 				"throw will always call terminate()")
1544 		    && cxx_dialect >= cxx11
1545 		    && DECL_DESTRUCTOR_P (current_function_decl))
1546 		  inform (loc, "in C++11 destructors default to noexcept");
1547 	      }
1548 	  }
1549 	else
1550 	  {
1551 	    if (warn_cxx11_compat && cxx_dialect < cxx11
1552 		&& DECL_DESTRUCTOR_P (current_function_decl)
1553 		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1554 		    == NULL_TREE)
1555 		&& (get_defaulted_eh_spec (current_function_decl)
1556 		    == empty_except_spec))
1557 	      warning_at (loc, OPT_Wc__11_compat,
1558 			  "in C++11 this throw will terminate because "
1559 			  "destructors default to noexcept");
1560 	  }
1561       }
1562       break;
1563 
1564     case CONVERT_EXPR:
1565       gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1566       break;
1567 
1568     case FOR_STMT:
1569       genericize_for_stmt (stmt_p, walk_subtrees, data);
1570       break;
1571 
1572     case WHILE_STMT:
1573       genericize_while_stmt (stmt_p, walk_subtrees, data);
1574       break;
1575 
1576     case DO_STMT:
1577       genericize_do_stmt (stmt_p, walk_subtrees, data);
1578       break;
1579 
1580     case SWITCH_STMT:
1581       genericize_switch_stmt (stmt_p, walk_subtrees, data);
1582       break;
1583 
1584     case CONTINUE_STMT:
1585       genericize_continue_stmt (stmt_p);
1586       break;
1587 
1588     case BREAK_STMT:
1589       genericize_break_stmt (stmt_p);
1590       break;
1591 
1592     case OMP_FOR:
1593     case OMP_SIMD:
1594     case OMP_DISTRIBUTE:
1595     case OACC_LOOP:
1596       genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1597       break;
1598 
1599     case PTRMEM_CST:
1600       /* By the time we get here we're handing off to the back end, so we don't
1601 	 need or want to preserve PTRMEM_CST anymore.  */
1602       *stmt_p = cplus_expand_constant (stmt);
1603       *walk_subtrees = 0;
1604       break;
1605 
1606     case MEM_REF:
1607       /* For MEM_REF, make sure not to sanitize the second operand even
1608 	 if it has reference type.  It is just an offset with a type
1609 	 holding other information.  There is no other processing we
1610 	 need to do for INTEGER_CSTs, so just ignore the second argument
1611 	 unconditionally.  */
1612       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1613       *walk_subtrees = 0;
1614       break;
1615 
1616     case NOP_EXPR:
1617       *stmt_p = predeclare_vla (*stmt_p);
1618       if (!wtd->no_sanitize_p
1619 	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1620 	  && TYPE_REF_P (TREE_TYPE (stmt)))
1621 	ubsan_maybe_instrument_reference (stmt_p);
1622       break;
1623 
1624     case CALL_EXPR:
1625       if (!wtd->no_sanitize_p
1626 	  && sanitize_flags_p ((SANITIZE_NULL
1627 				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1628 	{
1629 	  tree fn = CALL_EXPR_FN (stmt);
1630 	  if (fn != NULL_TREE
1631 	      && !error_operand_p (fn)
1632 	      && INDIRECT_TYPE_P (TREE_TYPE (fn))
1633 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1634 	    {
1635 	      bool is_ctor
1636 		= TREE_CODE (fn) == ADDR_EXPR
1637 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1638 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1639 	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1640 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1641 	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1642 		cp_ubsan_maybe_instrument_member_call (stmt);
1643 	    }
1644 	  else if (fn == NULL_TREE
1645 		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1646 		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1647 		   && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1648 	    *walk_subtrees = 0;
1649 	}
1650       /* Fall through.  */
1651     case AGGR_INIT_EXPR:
1652       /* For calls to a multi-versioned function, overload resolution
1653 	 returns the function with the highest target priority, that is,
1654 	 the version that will checked for dispatching first.  If this
1655 	 version is inlinable, a direct call to this version can be made
1656 	 otherwise the call should go through the dispatcher.  */
1657       {
1658 	tree fn = cp_get_callee_fndecl_nofold (stmt);
1659 	if (fn && DECL_FUNCTION_VERSIONED (fn)
1660 	    && (current_function_decl == NULL
1661 		|| !targetm.target_option.can_inline_p (current_function_decl,
1662 							fn)))
1663 	  if (tree dis = get_function_version_dispatcher (fn))
1664 	    {
1665 	      mark_versions_used (dis);
1666 	      dis = build_address (dis);
1667 	      if (TREE_CODE (stmt) == CALL_EXPR)
1668 		CALL_EXPR_FN (stmt) = dis;
1669 	      else
1670 		AGGR_INIT_EXPR_FN (stmt) = dis;
1671 	    }
1672       }
1673       break;
1674 
1675     case TARGET_EXPR:
1676       if (TARGET_EXPR_INITIAL (stmt)
1677 	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1678 	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1679 	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1680       break;
1681 
1682     default:
1683       if (IS_TYPE_OR_DECL_P (stmt))
1684 	*walk_subtrees = 0;
1685       break;
1686     }
1687 
1688   p_set->add (*stmt_p);
1689 
1690   return NULL;
1691 }
1692 
1693 /* Lower C++ front end trees to GENERIC in T_P.  */
1694 
1695 static void
1696 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1697 {
1698   struct cp_genericize_data wtd;
1699 
1700   wtd.p_set = new hash_set<tree>;
1701   wtd.bind_expr_stack.create (0);
1702   wtd.omp_ctx = NULL;
1703   wtd.try_block = NULL_TREE;
1704   wtd.no_sanitize_p = false;
1705   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1706   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1707   delete wtd.p_set;
1708   wtd.bind_expr_stack.release ();
1709   if (sanitize_flags_p (SANITIZE_VPTR))
1710     cp_ubsan_instrument_member_accesses (t_p);
1711 }
1712 
1713 /* If a function that should end with a return in non-void
1714    function doesn't obviously end with return, add ubsan
1715    instrumentation code to verify it at runtime.  If -fsanitize=return
1716    is not enabled, instrument __builtin_unreachable.  */
1717 
1718 static void
1719 cp_maybe_instrument_return (tree fndecl)
1720 {
1721   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1722       || DECL_CONSTRUCTOR_P (fndecl)
1723       || DECL_DESTRUCTOR_P (fndecl)
1724       || !targetm.warn_func_return (fndecl))
1725     return;
1726 
1727   if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1728       /* Don't add __builtin_unreachable () if not optimizing, it will not
1729 	 improve any optimizations in that case, just break UB code.
1730 	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1731 	 UBSan covers this with ubsan_instrument_return above where sufficient
1732 	 information is provided, while the __builtin_unreachable () below
1733 	 if return sanitization is disabled will just result in hard to
1734 	 understand runtime error without location.  */
1735       && (!optimize
1736 	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1737     return;
1738 
1739   tree t = DECL_SAVED_TREE (fndecl);
1740   while (t)
1741     {
1742       switch (TREE_CODE (t))
1743 	{
1744 	case BIND_EXPR:
1745 	  t = BIND_EXPR_BODY (t);
1746 	  continue;
1747 	case TRY_FINALLY_EXPR:
1748 	case CLEANUP_POINT_EXPR:
1749 	  t = TREE_OPERAND (t, 0);
1750 	  continue;
1751 	case STATEMENT_LIST:
1752 	  {
1753 	    tree_stmt_iterator i = tsi_last (t);
1754 	    while (!tsi_end_p (i))
1755 	      {
1756 		tree p = tsi_stmt (i);
1757 		if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1758 		  break;
1759 		tsi_prev (&i);
1760 	      }
1761 	    if (!tsi_end_p (i))
1762 	      {
1763 		t = tsi_stmt (i);
1764 		continue;
1765 	      }
1766 	  }
1767 	  break;
1768 	case RETURN_EXPR:
1769 	  return;
1770 	default:
1771 	  break;
1772 	}
1773       break;
1774     }
1775   if (t == NULL_TREE)
1776     return;
1777   tree *p = &DECL_SAVED_TREE (fndecl);
1778   if (TREE_CODE (*p) == BIND_EXPR)
1779     p = &BIND_EXPR_BODY (*p);
1780 
1781   location_t loc = DECL_SOURCE_LOCATION (fndecl);
1782   if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1783     t = ubsan_instrument_return (loc);
1784   else
1785     {
1786       tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1787       t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1788     }
1789 
1790   append_to_statement_list (t, p);
1791 }
1792 
1793 void
1794 cp_genericize (tree fndecl)
1795 {
1796   tree t;
1797 
1798   /* Fix up the types of parms passed by invisible reference.  */
1799   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1800     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1801       {
1802 	/* If a function's arguments are copied to create a thunk,
1803 	   then DECL_BY_REFERENCE will be set -- but the type of the
1804 	   argument will be a pointer type, so we will never get
1805 	   here.  */
1806 	gcc_assert (!DECL_BY_REFERENCE (t));
1807 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1808 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1809 	DECL_BY_REFERENCE (t) = 1;
1810 	TREE_ADDRESSABLE (t) = 0;
1811 	relayout_decl (t);
1812       }
1813 
1814   /* Do the same for the return value.  */
1815   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1816     {
1817       t = DECL_RESULT (fndecl);
1818       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1819       DECL_BY_REFERENCE (t) = 1;
1820       TREE_ADDRESSABLE (t) = 0;
1821       relayout_decl (t);
1822       if (DECL_NAME (t))
1823 	{
1824 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1825 	  tree outer = outer_curly_brace_block (current_function_decl);
1826 	  tree var;
1827 
1828 	  if (outer)
1829 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1830 	      if (VAR_P (var)
1831 		  && DECL_NAME (t) == DECL_NAME (var)
1832 		  && DECL_HAS_VALUE_EXPR_P (var)
1833 		  && DECL_VALUE_EXPR (var) == t)
1834 		{
1835 		  tree val = convert_from_reference (t);
1836 		  SET_DECL_VALUE_EXPR (var, val);
1837 		  break;
1838 		}
1839 	}
1840     }
1841 
1842   /* If we're a clone, the body is already GIMPLE.  */
1843   if (DECL_CLONED_FUNCTION_P (fndecl))
1844     return;
1845 
1846   /* Allow cp_genericize calls to be nested.  */
1847   tree save_bc_label[2];
1848   save_bc_label[bc_break] = bc_label[bc_break];
1849   save_bc_label[bc_continue] = bc_label[bc_continue];
1850   bc_label[bc_break] = NULL_TREE;
1851   bc_label[bc_continue] = NULL_TREE;
1852 
1853   /* We do want to see every occurrence of the parms, so we can't just use
1854      walk_tree's hash functionality.  */
1855   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1856 
1857   cp_maybe_instrument_return (fndecl);
1858 
1859   /* Do everything else.  */
1860   c_genericize (fndecl);
1861 
1862   gcc_assert (bc_label[bc_break] == NULL);
1863   gcc_assert (bc_label[bc_continue] == NULL);
1864   bc_label[bc_break] = save_bc_label[bc_break];
1865   bc_label[bc_continue] = save_bc_label[bc_continue];
1866 }
1867 
1868 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1869    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1870    actually only takes one argument.  */
1871 
1872 static tree
1873 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1874 {
1875   tree defparm, parm, t;
1876   int i = 0;
1877   int nargs;
1878   tree *argarray;
1879 
1880   if (fn == NULL)
1881     return NULL;
1882 
1883   nargs = list_length (DECL_ARGUMENTS (fn));
1884   argarray = XALLOCAVEC (tree, nargs);
1885 
1886   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1887   if (arg2)
1888     defparm = TREE_CHAIN (defparm);
1889 
1890   bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1891   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1892     {
1893       tree inner_type = TREE_TYPE (arg1);
1894       tree start1, end1, p1;
1895       tree start2 = NULL, p2 = NULL;
1896       tree ret = NULL, lab;
1897 
1898       start1 = arg1;
1899       start2 = arg2;
1900       do
1901 	{
1902 	  inner_type = TREE_TYPE (inner_type);
1903 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1904 			   size_zero_node, NULL, NULL);
1905 	  if (arg2)
1906 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1907 			     size_zero_node, NULL, NULL);
1908 	}
1909       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1910       start1 = build_fold_addr_expr_loc (input_location, start1);
1911       if (arg2)
1912 	start2 = build_fold_addr_expr_loc (input_location, start2);
1913 
1914       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1915       end1 = fold_build_pointer_plus (start1, end1);
1916 
1917       p1 = create_tmp_var (TREE_TYPE (start1));
1918       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1919       append_to_statement_list (t, &ret);
1920 
1921       if (arg2)
1922 	{
1923 	  p2 = create_tmp_var (TREE_TYPE (start2));
1924 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1925 	  append_to_statement_list (t, &ret);
1926 	}
1927 
1928       lab = create_artificial_label (input_location);
1929       t = build1 (LABEL_EXPR, void_type_node, lab);
1930       append_to_statement_list (t, &ret);
1931 
1932       argarray[i++] = p1;
1933       if (arg2)
1934 	argarray[i++] = p2;
1935       /* Handle default arguments.  */
1936       for (parm = defparm; parm && parm != void_list_node;
1937 	   parm = TREE_CHAIN (parm), i++)
1938 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1939 					   TREE_PURPOSE (parm), fn,
1940 					   i - is_method, tf_warning_or_error);
1941       t = build_call_a (fn, i, argarray);
1942       t = fold_convert (void_type_node, t);
1943       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1944       append_to_statement_list (t, &ret);
1945 
1946       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1947       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1948       append_to_statement_list (t, &ret);
1949 
1950       if (arg2)
1951 	{
1952 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1953 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1954 	  append_to_statement_list (t, &ret);
1955 	}
1956 
1957       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1958       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1959       append_to_statement_list (t, &ret);
1960 
1961       return ret;
1962     }
1963   else
1964     {
1965       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1966       if (arg2)
1967 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1968       /* Handle default arguments.  */
1969       for (parm = defparm; parm && parm != void_list_node;
1970 	   parm = TREE_CHAIN (parm), i++)
1971 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1972 					   TREE_PURPOSE (parm), fn,
1973 					   i - is_method, tf_warning_or_error);
1974       t = build_call_a (fn, i, argarray);
1975       t = fold_convert (void_type_node, t);
1976       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1977     }
1978 }
1979 
1980 /* Return code to initialize DECL with its default constructor, or
1981    NULL if there's nothing to do.  */
1982 
1983 tree
1984 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1985 {
1986   tree info = CP_OMP_CLAUSE_INFO (clause);
1987   tree ret = NULL;
1988 
1989   if (info)
1990     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1991 
1992   return ret;
1993 }
1994 
1995 /* Return code to initialize DST with a copy constructor from SRC.  */
1996 
1997 tree
1998 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1999 {
2000   tree info = CP_OMP_CLAUSE_INFO (clause);
2001   tree ret = NULL;
2002 
2003   if (info)
2004     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2005   if (ret == NULL)
2006     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2007 
2008   return ret;
2009 }
2010 
2011 /* Similarly, except use an assignment operator instead.  */
2012 
2013 tree
2014 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2015 {
2016   tree info = CP_OMP_CLAUSE_INFO (clause);
2017   tree ret = NULL;
2018 
2019   if (info)
2020     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2021   if (ret == NULL)
2022     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2023 
2024   return ret;
2025 }
2026 
2027 /* Return code to destroy DECL.  */
2028 
2029 tree
2030 cxx_omp_clause_dtor (tree clause, tree decl)
2031 {
2032   tree info = CP_OMP_CLAUSE_INFO (clause);
2033   tree ret = NULL;
2034 
2035   if (info)
2036     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2037 
2038   return ret;
2039 }
2040 
2041 /* True if OpenMP should privatize what this DECL points to rather
2042    than the DECL itself.  */
2043 
2044 bool
2045 cxx_omp_privatize_by_reference (const_tree decl)
2046 {
2047   return (TYPE_REF_P (TREE_TYPE (decl))
2048 	  || is_invisiref_parm (decl));
2049 }
2050 
2051 /* Return true if DECL is const qualified var having no mutable member.  */
2052 bool
2053 cxx_omp_const_qual_no_mutable (tree decl)
2054 {
2055   tree type = TREE_TYPE (decl);
2056   if (TYPE_REF_P (type))
2057     {
2058       if (!is_invisiref_parm (decl))
2059 	return false;
2060       type = TREE_TYPE (type);
2061 
2062       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2063 	{
2064 	  /* NVR doesn't preserve const qualification of the
2065 	     variable's type.  */
2066 	  tree outer = outer_curly_brace_block (current_function_decl);
2067 	  tree var;
2068 
2069 	  if (outer)
2070 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2071 	      if (VAR_P (var)
2072 		  && DECL_NAME (decl) == DECL_NAME (var)
2073 		  && (TYPE_MAIN_VARIANT (type)
2074 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2075 		{
2076 		  if (TYPE_READONLY (TREE_TYPE (var)))
2077 		    type = TREE_TYPE (var);
2078 		  break;
2079 		}
2080 	}
2081     }
2082 
2083   if (type == error_mark_node)
2084     return false;
2085 
2086   /* Variables with const-qualified type having no mutable member
2087      are predetermined shared.  */
2088   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2089     return true;
2090 
2091   return false;
2092 }
2093 
2094 /* True if OpenMP sharing attribute of DECL is predetermined.  */
2095 
2096 enum omp_clause_default_kind
2097 cxx_omp_predetermined_sharing_1 (tree decl)
2098 {
2099   /* Static data members are predetermined shared.  */
2100   if (TREE_STATIC (decl))
2101     {
2102       tree ctx = CP_DECL_CONTEXT (decl);
2103       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2104 	return OMP_CLAUSE_DEFAULT_SHARED;
2105 
2106       if (c_omp_predefined_variable (decl))
2107 	return OMP_CLAUSE_DEFAULT_SHARED;
2108     }
2109 
2110   /* this may not be specified in data-sharing clauses, still we need
2111      to predetermined it firstprivate.  */
2112   if (decl == current_class_ptr)
2113     return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2114 
2115   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2116 }
2117 
2118 /* Likewise, but also include the artificial vars.  We don't want to
2119    disallow the artificial vars being mentioned in explicit clauses,
2120    as we use artificial vars e.g. for loop constructs with random
2121    access iterators other than pointers, but during gimplification
2122    we want to treat them as predetermined.  */
2123 
2124 enum omp_clause_default_kind
2125 cxx_omp_predetermined_sharing (tree decl)
2126 {
2127   enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2128   if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2129     return ret;
2130 
2131   /* Predetermine artificial variables holding integral values, those
2132      are usually result of gimplify_one_sizepos or SAVE_EXPR
2133      gimplification.  */
2134   if (VAR_P (decl)
2135       && DECL_ARTIFICIAL (decl)
2136       && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2137       && !(DECL_LANG_SPECIFIC (decl)
2138 	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2139     return OMP_CLAUSE_DEFAULT_SHARED;
2140 
2141   /* Similarly for typeinfo symbols.  */
2142   if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2143     return OMP_CLAUSE_DEFAULT_SHARED;
2144 
2145   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2146 }
2147 
2148 /* Finalize an implicitly determined clause.  */
2149 
2150 void
2151 cxx_omp_finish_clause (tree c, gimple_seq *)
2152 {
2153   tree decl, inner_type;
2154   bool make_shared = false;
2155 
2156   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2157     return;
2158 
2159   decl = OMP_CLAUSE_DECL (c);
2160   decl = require_complete_type (decl);
2161   inner_type = TREE_TYPE (decl);
2162   if (decl == error_mark_node)
2163     make_shared = true;
2164   else if (TYPE_REF_P (TREE_TYPE (decl)))
2165     inner_type = TREE_TYPE (inner_type);
2166 
2167   /* We're interested in the base element, not arrays.  */
2168   while (TREE_CODE (inner_type) == ARRAY_TYPE)
2169     inner_type = TREE_TYPE (inner_type);
2170 
2171   /* Check for special function availability by building a call to one.
2172      Save the results, because later we won't be in the right context
2173      for making these queries.  */
2174   if (!make_shared
2175       && CLASS_TYPE_P (inner_type)
2176       && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2177     make_shared = true;
2178 
2179   if (make_shared)
2180     {
2181       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2182       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2183       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2184     }
2185 }
2186 
2187 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2188    disregarded in OpenMP construct, because it is going to be
2189    remapped during OpenMP lowering.  SHARED is true if DECL
2190    is going to be shared, false if it is going to be privatized.  */
2191 
2192 bool
2193 cxx_omp_disregard_value_expr (tree decl, bool shared)
2194 {
2195   return !shared
2196 	 && VAR_P (decl)
2197 	 && DECL_HAS_VALUE_EXPR_P (decl)
2198 	 && DECL_ARTIFICIAL (decl)
2199 	 && DECL_LANG_SPECIFIC (decl)
2200 	 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2201 }
2202 
2203 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2204 
2205 tree
2206 cp_fold_maybe_rvalue (tree x, bool rval)
2207 {
2208   while (true)
2209     {
2210       x = cp_fold (x);
2211       if (rval)
2212 	x = mark_rvalue_use (x);
2213       if (rval && DECL_P (x)
2214 	  && !TYPE_REF_P (TREE_TYPE (x)))
2215 	{
2216 	  tree v = decl_constant_value (x);
2217 	  if (v != x && v != error_mark_node)
2218 	    {
2219 	      x = v;
2220 	      continue;
2221 	    }
2222 	}
2223       break;
2224     }
2225   return x;
2226 }
2227 
2228 /* Fold expression X which is used as an rvalue.  */
2229 
2230 tree
2231 cp_fold_rvalue (tree x)
2232 {
2233   return cp_fold_maybe_rvalue (x, true);
2234 }
2235 
2236 /* Perform folding on expression X.  */
2237 
2238 tree
2239 cp_fully_fold (tree x)
2240 {
2241   if (processing_template_decl)
2242     return x;
2243   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2244      have to call both.  */
2245   if (cxx_dialect >= cxx11)
2246     {
2247       x = maybe_constant_value (x);
2248       /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2249 	 a TARGET_EXPR; undo that here.  */
2250       if (TREE_CODE (x) == TARGET_EXPR)
2251 	x = TARGET_EXPR_INITIAL (x);
2252       else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2253 	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2254 	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2255 	x = TREE_OPERAND (x, 0);
2256     }
2257   return cp_fold_rvalue (x);
2258 }
2259 
2260 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2261    in some cases.  */
2262 
2263 tree
2264 cp_fully_fold_init (tree x)
2265 {
2266   if (processing_template_decl)
2267     return x;
2268   x = cp_fully_fold (x);
2269   hash_set<tree> pset;
2270   cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2271   return x;
2272 }
2273 
2274 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2275    and certain changes are made to the folding done.  Or should be (FIXME).  We
2276    never touch maybe_const, as it is only used for the C front-end
2277    C_MAYBE_CONST_EXPR.  */
2278 
2279 tree
2280 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2281 {
2282   return cp_fold_maybe_rvalue (x, !lval);
2283 }
2284 
2285 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2286 
2287 /* Dispose of the whole FOLD_CACHE.  */
2288 
2289 void
2290 clear_fold_cache (void)
2291 {
2292   if (fold_cache != NULL)
2293     fold_cache->empty ();
2294 }
2295 
2296 /*  This function tries to fold an expression X.
2297     To avoid combinatorial explosion, folding results are kept in fold_cache.
2298     If X is invalid, we don't fold at all.
2299     For performance reasons we don't cache expressions representing a
2300     declaration or constant.
2301     Function returns X or its folded variant.  */
2302 
2303 static tree
2304 cp_fold (tree x)
2305 {
2306   tree op0, op1, op2, op3;
2307   tree org_x = x, r = NULL_TREE;
2308   enum tree_code code;
2309   location_t loc;
2310   bool rval_ops = true;
2311 
2312   if (!x || x == error_mark_node)
2313     return x;
2314 
2315   if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2316     return x;
2317 
2318   /* Don't bother to cache DECLs or constants.  */
2319   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2320     return x;
2321 
2322   if (fold_cache == NULL)
2323     fold_cache = hash_map<tree, tree>::create_ggc (101);
2324 
2325   if (tree *cached = fold_cache->get (x))
2326     return *cached;
2327 
2328   code = TREE_CODE (x);
2329   switch (code)
2330     {
2331     case CLEANUP_POINT_EXPR:
2332       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2333 	 effects.  */
2334       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2335       if (!TREE_SIDE_EFFECTS (r))
2336 	x = r;
2337       break;
2338 
2339     case SIZEOF_EXPR:
2340       x = fold_sizeof_expr (x);
2341       break;
2342 
2343     case VIEW_CONVERT_EXPR:
2344       rval_ops = false;
2345       /* FALLTHRU */
2346     case CONVERT_EXPR:
2347     case NOP_EXPR:
2348     case NON_LVALUE_EXPR:
2349 
2350       if (VOID_TYPE_P (TREE_TYPE (x)))
2351 	{
2352 	  /* This is just to make sure we don't end up with casts to
2353 	     void from error_mark_node.  If we just return x, then
2354 	     cp_fold_r might fold the operand into error_mark_node and
2355 	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2356 	     during gimplification doesn't like such casts.
2357 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2358 	     folding of the operand should be in the caches and if in cp_fold_r
2359 	     it will modify it in place.  */
2360 	  op0 = cp_fold (TREE_OPERAND (x, 0));
2361 	  if (op0 == error_mark_node)
2362 	    x = error_mark_node;
2363 	  break;
2364 	}
2365 
2366       loc = EXPR_LOCATION (x);
2367       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2368 
2369       if (code == CONVERT_EXPR
2370 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2371 	  && op0 != void_node)
2372 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2373 	   folding variants, since fold() doesn't do those transformations.  */
2374 	x = fold (convert (TREE_TYPE (x), op0));
2375       else if (op0 != TREE_OPERAND (x, 0))
2376 	{
2377 	  if (op0 == error_mark_node)
2378 	    x = error_mark_node;
2379 	  else
2380 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2381 	}
2382       else
2383 	x = fold (x);
2384 
2385       /* Conversion of an out-of-range value has implementation-defined
2386 	 behavior; the language considers it different from arithmetic
2387 	 overflow, which is undefined.  */
2388       if (TREE_CODE (op0) == INTEGER_CST
2389 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2390 	TREE_OVERFLOW (x) = false;
2391 
2392       break;
2393 
2394     case INDIRECT_REF:
2395       /* We don't need the decltype(auto) obfuscation anymore.  */
2396       if (REF_PARENTHESIZED_P (x))
2397 	{
2398 	  tree p = maybe_undo_parenthesized_ref (x);
2399 	  return cp_fold (p);
2400 	}
2401       goto unary;
2402 
2403     case ADDR_EXPR:
2404       loc = EXPR_LOCATION (x);
2405       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2406 
2407       /* Cope with user tricks that amount to offsetof.  */
2408       if (op0 != error_mark_node
2409 	  && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2410 	  && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2411 	{
2412 	  tree val = get_base_address (op0);
2413 	  if (val
2414 	      && INDIRECT_REF_P (val)
2415 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2416 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2417 	    {
2418 	      val = TREE_OPERAND (val, 0);
2419 	      STRIP_NOPS (val);
2420 	      val = maybe_constant_value (val);
2421 	      if (TREE_CODE (val) == INTEGER_CST)
2422 		return fold_offsetof (op0, TREE_TYPE (x));
2423 	    }
2424 	}
2425       goto finish_unary;
2426 
2427     case REALPART_EXPR:
2428     case IMAGPART_EXPR:
2429       rval_ops = false;
2430       /* FALLTHRU */
2431     case CONJ_EXPR:
2432     case FIX_TRUNC_EXPR:
2433     case FLOAT_EXPR:
2434     case NEGATE_EXPR:
2435     case ABS_EXPR:
2436     case ABSU_EXPR:
2437     case BIT_NOT_EXPR:
2438     case TRUTH_NOT_EXPR:
2439     case FIXED_CONVERT_EXPR:
2440     unary:
2441 
2442       loc = EXPR_LOCATION (x);
2443       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2444 
2445     finish_unary:
2446       if (op0 != TREE_OPERAND (x, 0))
2447 	{
2448 	  if (op0 == error_mark_node)
2449 	    x = error_mark_node;
2450 	  else
2451 	    {
2452 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2453 	      if (code == INDIRECT_REF
2454 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2455 		{
2456 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2457 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2458 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2459 		}
2460 	    }
2461 	}
2462       else
2463 	x = fold (x);
2464 
2465       gcc_assert (TREE_CODE (x) != COND_EXPR
2466 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2467       break;
2468 
2469     case UNARY_PLUS_EXPR:
2470       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2471       if (op0 == error_mark_node)
2472 	x = error_mark_node;
2473       else
2474 	x = fold_convert (TREE_TYPE (x), op0);
2475       break;
2476 
2477     case POSTDECREMENT_EXPR:
2478     case POSTINCREMENT_EXPR:
2479     case INIT_EXPR:
2480     case PREDECREMENT_EXPR:
2481     case PREINCREMENT_EXPR:
2482     case COMPOUND_EXPR:
2483     case MODIFY_EXPR:
2484       rval_ops = false;
2485       /* FALLTHRU */
2486     case POINTER_PLUS_EXPR:
2487     case PLUS_EXPR:
2488     case POINTER_DIFF_EXPR:
2489     case MINUS_EXPR:
2490     case MULT_EXPR:
2491     case TRUNC_DIV_EXPR:
2492     case CEIL_DIV_EXPR:
2493     case FLOOR_DIV_EXPR:
2494     case ROUND_DIV_EXPR:
2495     case TRUNC_MOD_EXPR:
2496     case CEIL_MOD_EXPR:
2497     case ROUND_MOD_EXPR:
2498     case RDIV_EXPR:
2499     case EXACT_DIV_EXPR:
2500     case MIN_EXPR:
2501     case MAX_EXPR:
2502     case LSHIFT_EXPR:
2503     case RSHIFT_EXPR:
2504     case LROTATE_EXPR:
2505     case RROTATE_EXPR:
2506     case BIT_AND_EXPR:
2507     case BIT_IOR_EXPR:
2508     case BIT_XOR_EXPR:
2509     case TRUTH_AND_EXPR:
2510     case TRUTH_ANDIF_EXPR:
2511     case TRUTH_OR_EXPR:
2512     case TRUTH_ORIF_EXPR:
2513     case TRUTH_XOR_EXPR:
2514     case LT_EXPR: case LE_EXPR:
2515     case GT_EXPR: case GE_EXPR:
2516     case EQ_EXPR: case NE_EXPR:
2517     case UNORDERED_EXPR: case ORDERED_EXPR:
2518     case UNLT_EXPR: case UNLE_EXPR:
2519     case UNGT_EXPR: case UNGE_EXPR:
2520     case UNEQ_EXPR: case LTGT_EXPR:
2521     case RANGE_EXPR: case COMPLEX_EXPR:
2522 
2523       loc = EXPR_LOCATION (x);
2524       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2525       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2526 
2527       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2528 	{
2529 	  if (op0 == error_mark_node || op1 == error_mark_node)
2530 	    x = error_mark_node;
2531 	  else
2532 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2533 	}
2534       else
2535 	x = fold (x);
2536 
2537       /* This is only needed for -Wnonnull-compare and only if
2538 	 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2539 	 generation, we do it always.  */
2540       if (COMPARISON_CLASS_P (org_x))
2541 	{
2542 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2543 	    ;
2544 	  else if (COMPARISON_CLASS_P (x))
2545 	    {
2546 	      if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2547 		TREE_NO_WARNING (x) = 1;
2548 	    }
2549 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2550 	     optimize those later on.  */
2551 	  else if (op0 != TREE_OPERAND (org_x, 0)
2552 		   || op1 != TREE_OPERAND (org_x, 1))
2553 	    {
2554 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2555 	      if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2556 		TREE_NO_WARNING (x) = 1;
2557 	    }
2558 	  else
2559 	    x = org_x;
2560 	}
2561       break;
2562 
2563     case VEC_COND_EXPR:
2564     case COND_EXPR:
2565       loc = EXPR_LOCATION (x);
2566       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2567       op1 = cp_fold (TREE_OPERAND (x, 1));
2568       op2 = cp_fold (TREE_OPERAND (x, 2));
2569 
2570       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2571 	{
2572 	  warning_sentinel s (warn_int_in_bool_context);
2573 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2574 	    op1 = cp_truthvalue_conversion (op1);
2575 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2576 	    op2 = cp_truthvalue_conversion (op2);
2577 	}
2578       else if (VOID_TYPE_P (TREE_TYPE (x)))
2579 	{
2580 	  if (TREE_CODE (op0) == INTEGER_CST)
2581 	    {
2582 	      /* If the condition is constant, fold can fold away
2583 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2584 		 have one of the branches NULL, avoid folding crash.  */
2585 	      if (!op1)
2586 		op1 = build_empty_stmt (loc);
2587 	      if (!op2)
2588 		op2 = build_empty_stmt (loc);
2589 	    }
2590 	  else
2591 	    {
2592 	      /* Otherwise, don't bother folding a void condition, since
2593 		 it can't produce a constant value.  */
2594 	      if (op0 != TREE_OPERAND (x, 0)
2595 		  || op1 != TREE_OPERAND (x, 1)
2596 		  || op2 != TREE_OPERAND (x, 2))
2597 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2598 	      break;
2599 	    }
2600 	}
2601 
2602       if (op0 != TREE_OPERAND (x, 0)
2603 	  || op1 != TREE_OPERAND (x, 1)
2604 	  || op2 != TREE_OPERAND (x, 2))
2605 	{
2606 	  if (op0 == error_mark_node
2607 	      || op1 == error_mark_node
2608 	      || op2 == error_mark_node)
2609 	    x = error_mark_node;
2610 	  else
2611 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2612 	}
2613       else
2614 	x = fold (x);
2615 
2616       /* A COND_EXPR might have incompatible types in branches if one or both
2617 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2618       if (TREE_CODE (x) != code
2619 	  && x != error_mark_node
2620 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2621 	x = fold_convert (TREE_TYPE (org_x), x);
2622 
2623       break;
2624 
2625     case CALL_EXPR:
2626       {
2627 	int i, m, sv = optimize, nw = sv, changed = 0;
2628 	tree callee = get_callee_fndecl (x);
2629 
2630 	/* Some built-in function calls will be evaluated at compile-time in
2631 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2632 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2633 	if (callee && fndecl_built_in_p (callee) && !optimize
2634 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2635 	    && current_function_decl
2636 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2637 	  nw = 1;
2638 
2639 	/* Defer folding __builtin_is_constant_evaluated.  */
2640 	if (callee
2641 	    && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2642 				BUILT_IN_FRONTEND))
2643 	  break;
2644 
2645 	x = copy_node (x);
2646 
2647 	m = call_expr_nargs (x);
2648 	for (i = 0; i < m; i++)
2649 	  {
2650 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2651 	    if (r != CALL_EXPR_ARG (x, i))
2652 	      {
2653 		if (r == error_mark_node)
2654 		  {
2655 		    x = error_mark_node;
2656 		    break;
2657 		  }
2658 		changed = 1;
2659 	      }
2660 	    CALL_EXPR_ARG (x, i) = r;
2661 	  }
2662 	if (x == error_mark_node)
2663 	  break;
2664 
2665 	optimize = nw;
2666 	r = fold (x);
2667 	optimize = sv;
2668 
2669 	if (TREE_CODE (r) != CALL_EXPR)
2670 	  {
2671 	    x = cp_fold (r);
2672 	    break;
2673 	  }
2674 
2675 	optimize = nw;
2676 
2677 	/* Invoke maybe_constant_value for functions declared
2678 	   constexpr and not called with AGGR_INIT_EXPRs.
2679 	   TODO:
2680 	   Do constexpr expansion of expressions where the call itself is not
2681 	   constant, but the call followed by an INDIRECT_REF is.  */
2682 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2683 	    && !flag_no_inline)
2684 	  r = maybe_constant_value (x);
2685 	optimize = sv;
2686 
2687         if (TREE_CODE (r) != CALL_EXPR)
2688 	  {
2689 	    if (DECL_CONSTRUCTOR_P (callee))
2690 	      {
2691 		loc = EXPR_LOCATION (x);
2692 		tree s = build_fold_indirect_ref_loc (loc,
2693 						      CALL_EXPR_ARG (x, 0));
2694 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2695 	      }
2696 	    x = r;
2697 	    break;
2698 	  }
2699 
2700 	if (!changed)
2701 	  x = org_x;
2702 	break;
2703       }
2704 
2705     case CONSTRUCTOR:
2706       {
2707 	unsigned i;
2708 	constructor_elt *p;
2709 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2710 	vec<constructor_elt, va_gc> *nelts = NULL;
2711 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2712 	  {
2713 	    tree op = cp_fold (p->value);
2714 	    if (op != p->value)
2715 	      {
2716 		if (op == error_mark_node)
2717 		  {
2718 		    x = error_mark_node;
2719 		    vec_free (nelts);
2720 		    break;
2721 		  }
2722 		if (nelts == NULL)
2723 		  nelts = elts->copy ();
2724 		(*nelts)[i].value = op;
2725 	      }
2726 	  }
2727 	if (nelts)
2728 	  {
2729 	    x = build_constructor (TREE_TYPE (x), nelts);
2730 	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2731 	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2732 	  }
2733 	if (VECTOR_TYPE_P (TREE_TYPE (x)))
2734 	  x = fold (x);
2735 	break;
2736       }
2737     case TREE_VEC:
2738       {
2739 	bool changed = false;
2740 	vec<tree, va_gc> *vec = make_tree_vector ();
2741 	int i, n = TREE_VEC_LENGTH (x);
2742 	vec_safe_reserve (vec, n);
2743 
2744 	for (i = 0; i < n; i++)
2745 	  {
2746 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2747 	    vec->quick_push (op);
2748 	    if (op != TREE_VEC_ELT (x, i))
2749 	      changed = true;
2750 	  }
2751 
2752 	if (changed)
2753 	  {
2754 	    r = copy_node (x);
2755 	    for (i = 0; i < n; i++)
2756 	      TREE_VEC_ELT (r, i) = (*vec)[i];
2757 	    x = r;
2758 	  }
2759 
2760 	release_tree_vector (vec);
2761       }
2762 
2763       break;
2764 
2765     case ARRAY_REF:
2766     case ARRAY_RANGE_REF:
2767 
2768       loc = EXPR_LOCATION (x);
2769       op0 = cp_fold (TREE_OPERAND (x, 0));
2770       op1 = cp_fold (TREE_OPERAND (x, 1));
2771       op2 = cp_fold (TREE_OPERAND (x, 2));
2772       op3 = cp_fold (TREE_OPERAND (x, 3));
2773 
2774       if (op0 != TREE_OPERAND (x, 0)
2775 	  || op1 != TREE_OPERAND (x, 1)
2776 	  || op2 != TREE_OPERAND (x, 2)
2777 	  || op3 != TREE_OPERAND (x, 3))
2778 	{
2779 	  if (op0 == error_mark_node
2780 	      || op1 == error_mark_node
2781 	      || op2 == error_mark_node
2782 	      || op3 == error_mark_node)
2783 	    x = error_mark_node;
2784 	  else
2785 	    {
2786 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2787 	      TREE_READONLY (x) = TREE_READONLY (org_x);
2788 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2789 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2790 	    }
2791 	}
2792 
2793       x = fold (x);
2794       break;
2795 
2796     case SAVE_EXPR:
2797       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2798 	 folding, evaluates to an invariant.  In that case no need to wrap
2799 	 this folded tree with a SAVE_EXPR.  */
2800       r = cp_fold (TREE_OPERAND (x, 0));
2801       if (tree_invariant_p (r))
2802 	x = r;
2803       break;
2804 
2805     default:
2806       return org_x;
2807     }
2808 
2809   fold_cache->put (org_x, x);
2810   /* Prevent that we try to fold an already folded result again.  */
2811   if (x != org_x)
2812     fold_cache->put (x, x);
2813 
2814   return x;
2815 }
2816 
2817 /* Look up either "hot" or "cold" in attribute list LIST.  */
2818 
2819 tree
2820 lookup_hotness_attribute (tree list)
2821 {
2822   for (; list; list = TREE_CHAIN (list))
2823     {
2824       tree name = get_attribute_name (list);
2825       if (is_attribute_p ("hot", name)
2826 	  || is_attribute_p ("cold", name)
2827 	  || is_attribute_p ("likely", name)
2828 	  || is_attribute_p ("unlikely", name))
2829 	break;
2830     }
2831   return list;
2832 }
2833 
2834 /* Remove both "hot" and "cold" attributes from LIST.  */
2835 
2836 static tree
2837 remove_hotness_attribute (tree list)
2838 {
2839   list = remove_attribute ("hot", list);
2840   list = remove_attribute ("cold", list);
2841   list = remove_attribute ("likely", list);
2842   list = remove_attribute ("unlikely", list);
2843   return list;
2844 }
2845 
2846 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2847    PREDICT_EXPR.  */
2848 
2849 tree
2850 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2851 {
2852   if (std_attrs == error_mark_node)
2853     return std_attrs;
2854   if (tree attr = lookup_hotness_attribute (std_attrs))
2855     {
2856       tree name = get_attribute_name (attr);
2857       bool hot = (is_attribute_p ("hot", name)
2858 		  || is_attribute_p ("likely", name));
2859       tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2860 				      hot ? TAKEN : NOT_TAKEN);
2861       SET_EXPR_LOCATION (pred, attrs_loc);
2862       add_stmt (pred);
2863       if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2864 	warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2865 		 get_attribute_name (other), name);
2866       std_attrs = remove_hotness_attribute (std_attrs);
2867     }
2868   return std_attrs;
2869 }
2870 
2871 #include "gt-cp-cp-gimplify.h"
2872