xref: /dflybsd-src/contrib/gcc-8.0/gcc/cp/cp-gimplify.c (revision 2efb75f3055c1746efc358d68dbc2bf526faaf61)
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2018 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 
38 /* Forward declarations.  */
39 
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
44 
45 /* Local declarations.  */
46 
47 enum bc_t { bc_break = 0, bc_continue = 1 };
48 
49 /* Stack of labels which are targets for "break" or "continue",
50    linked through TREE_CHAIN.  */
51 static tree bc_label[2];
52 
53 /* Begin a scope which can be exited by a break or continue statement.  BC
54    indicates which.
55 
56    Just creates a label with location LOCATION and pushes it into the current
57    context.  */
58 
59 static tree
60 begin_bc_block (enum bc_t bc, location_t location)
61 {
62   tree label = create_artificial_label (location);
63   DECL_CHAIN (label) = bc_label[bc];
64   bc_label[bc] = label;
65   if (bc == bc_break)
66     LABEL_DECL_BREAK (label) = true;
67   else
68     LABEL_DECL_CONTINUE (label) = true;
69   return label;
70 }
71 
72 /* Finish a scope which can be exited by a break or continue statement.
73    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
74    an expression for the contents of the scope.
75 
76    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77    BLOCK.  Otherwise, just forget the label.  */
78 
79 static void
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 {
82   gcc_assert (label == bc_label[bc]);
83 
84   if (TREE_USED (label))
85     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 			      block);
87 
88   bc_label[bc] = DECL_CHAIN (label);
89   DECL_CHAIN (label) = NULL_TREE;
90 }
91 
92 /* Get the LABEL_EXPR to represent a break or continue statement
93    in the current block scope.  BC indicates which.  */
94 
95 static tree
96 get_bc_label (enum bc_t bc)
97 {
98   tree label = bc_label[bc];
99 
100   /* Mark the label used for finish_bc_block.  */
101   TREE_USED (label) = 1;
102   return label;
103 }
104 
105 /* Genericize a TRY_BLOCK.  */
106 
107 static void
108 genericize_try_block (tree *stmt_p)
109 {
110   tree body = TRY_STMTS (*stmt_p);
111   tree cleanup = TRY_HANDLERS (*stmt_p);
112 
113   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 }
115 
116 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
117 
118 static void
119 genericize_catch_block (tree *stmt_p)
120 {
121   tree type = HANDLER_TYPE (*stmt_p);
122   tree body = HANDLER_BODY (*stmt_p);
123 
124   /* FIXME should the caught type go in TREE_TYPE?  */
125   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 }
127 
128 /* A terser interface for building a representation of an exception
129    specification.  */
130 
131 static tree
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 {
134   tree t;
135 
136   /* FIXME should the allowed types go in TREE_TYPE?  */
137   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 
140   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141   append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 
143   return t;
144 }
145 
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
148 
149 static void
150 genericize_eh_spec_block (tree *stmt_p)
151 {
152   tree body = EH_SPEC_STMTS (*stmt_p);
153   tree allowed = EH_SPEC_RAISES (*stmt_p);
154   tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
155 
156   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157   TREE_NO_WARNING (*stmt_p) = true;
158   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
159 }
160 
161 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
162 
163 static void
164 genericize_if_stmt (tree *stmt_p)
165 {
166   tree stmt, cond, then_, else_;
167   location_t locus = EXPR_LOCATION (*stmt_p);
168 
169   stmt = *stmt_p;
170   cond = IF_COND (stmt);
171   then_ = THEN_CLAUSE (stmt);
172   else_ = ELSE_CLAUSE (stmt);
173 
174   if (!then_)
175     then_ = build_empty_stmt (locus);
176   if (!else_)
177     else_ = build_empty_stmt (locus);
178 
179   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180     stmt = then_;
181   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182     stmt = else_;
183   else
184     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185   if (!EXPR_HAS_LOCATION (stmt))
186     protected_set_expr_location (stmt, locus);
187   *stmt_p = stmt;
188 }
189 
190 /* Build a generic representation of one of the C loop forms.  COND is the
191    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
192    controlled by the loop.  INCR is the increment expression of a for-loop,
193    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
194    evaluated before the loop body as in while and for loops, or after the
195    loop body as in do-while loops.  */
196 
197 static void
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 		    tree incr, bool cond_is_first, int *walk_subtrees,
200 		    void *data)
201 {
202   tree blab, clab;
203   tree exit = NULL;
204   tree stmt_list = NULL;
205 
206   blab = begin_bc_block (bc_break, start_locus);
207   clab = begin_bc_block (bc_continue, start_locus);
208 
209   protected_set_expr_location (incr, start_locus);
210 
211   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212   cp_walk_tree (&body, cp_genericize_r, data, NULL);
213   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214   *walk_subtrees = 0;
215 
216   if (cond && TREE_CODE (cond) != INTEGER_CST)
217     {
218       /* If COND is constant, don't bother building an exit.  If it's false,
219 	 we won't build a loop.  If it's true, any exits are in the body.  */
220       location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
221       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 			 get_bc_label (bc_break));
223       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 			      build_empty_stmt (cloc), exit);
225     }
226 
227   if (exit && cond_is_first)
228     append_to_statement_list (exit, &stmt_list);
229   append_to_statement_list (body, &stmt_list);
230   finish_bc_block (&stmt_list, bc_continue, clab);
231   append_to_statement_list (incr, &stmt_list);
232   if (exit && !cond_is_first)
233     append_to_statement_list (exit, &stmt_list);
234 
235   if (!stmt_list)
236     stmt_list = build_empty_stmt (start_locus);
237 
238   tree loop;
239   if (cond && integer_zerop (cond))
240     {
241       if (cond_is_first)
242 	loop = fold_build3_loc (start_locus, COND_EXPR,
243 				void_type_node, cond, stmt_list,
244 				build_empty_stmt (start_locus));
245       else
246 	loop = stmt_list;
247     }
248   else
249     {
250       location_t loc = start_locus;
251       if (!cond || integer_nonzerop (cond))
252 	loc = EXPR_LOCATION (expr_first (body));
253       if (loc == UNKNOWN_LOCATION)
254 	loc = start_locus;
255       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
256     }
257 
258   stmt_list = NULL;
259   append_to_statement_list (loop, &stmt_list);
260   finish_bc_block (&stmt_list, bc_break, blab);
261   if (!stmt_list)
262     stmt_list = build_empty_stmt (start_locus);
263 
264   *stmt_p = stmt_list;
265 }
266 
267 /* Genericize a FOR_STMT node *STMT_P.  */
268 
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
271 {
272   tree stmt = *stmt_p;
273   tree expr = NULL;
274   tree loop;
275   tree init = FOR_INIT_STMT (stmt);
276 
277   if (init)
278     {
279       cp_walk_tree (&init, cp_genericize_r, data, NULL);
280       append_to_statement_list (init, &expr);
281     }
282 
283   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285   append_to_statement_list (loop, &expr);
286   if (expr == NULL_TREE)
287     expr = loop;
288   *stmt_p = expr;
289 }
290 
291 /* Genericize a WHILE_STMT node *STMT_P.  */
292 
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
295 {
296   tree stmt = *stmt_p;
297   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
299 }
300 
301 /* Genericize a DO_STMT node *STMT_P.  */
302 
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
305 {
306   tree stmt = *stmt_p;
307   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
309 }
310 
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
312 
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
315 {
316   tree stmt = *stmt_p;
317   tree break_block, body, cond, type;
318   location_t stmt_locus = EXPR_LOCATION (stmt);
319 
320   break_block = begin_bc_block (bc_break, stmt_locus);
321 
322   body = SWITCH_STMT_BODY (stmt);
323   if (!body)
324     body = build_empty_stmt (stmt_locus);
325   cond = SWITCH_STMT_COND (stmt);
326   type = SWITCH_STMT_TYPE (stmt);
327 
328   cp_walk_tree (&body, cp_genericize_r, data, NULL);
329   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330   cp_walk_tree (&type, cp_genericize_r, data, NULL);
331   *walk_subtrees = 0;
332 
333   if (TREE_USED (break_block))
334     SWITCH_BREAK_LABEL_P (break_block) = 1;
335   finish_bc_block (&body, bc_break, break_block);
336   *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
337   SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
338   gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
339 		       || !TREE_USED (break_block));
340 }
341 
342 /* Genericize a CONTINUE_STMT node *STMT_P.  */
343 
344 static void
345 genericize_continue_stmt (tree *stmt_p)
346 {
347   tree stmt_list = NULL;
348   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
349   tree label = get_bc_label (bc_continue);
350   location_t location = EXPR_LOCATION (*stmt_p);
351   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
352   append_to_statement_list_force (pred, &stmt_list);
353   append_to_statement_list (jump, &stmt_list);
354   *stmt_p = stmt_list;
355 }
356 
357 /* Genericize a BREAK_STMT node *STMT_P.  */
358 
359 static void
360 genericize_break_stmt (tree *stmt_p)
361 {
362   tree label = get_bc_label (bc_break);
363   location_t location = EXPR_LOCATION (*stmt_p);
364   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 }
366 
367 /* Genericize a OMP_FOR node *STMT_P.  */
368 
369 static void
370 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
371 {
372   tree stmt = *stmt_p;
373   location_t locus = EXPR_LOCATION (stmt);
374   tree clab = begin_bc_block (bc_continue, locus);
375 
376   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
377   if (TREE_CODE (stmt) != OMP_TASKLOOP)
378     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
379   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
380   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
381   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
382   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
383   *walk_subtrees = 0;
384 
385   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
386 }
387 
388 /* Hook into the middle of gimplifying an OMP_FOR node.  */
389 
390 static enum gimplify_status
391 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
392 {
393   tree for_stmt = *expr_p;
394   gimple_seq seq = NULL;
395 
396   /* Protect ourselves from recursion.  */
397   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
398     return GS_UNHANDLED;
399   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
400 
401   gimplify_and_add (for_stmt, &seq);
402   gimple_seq_add_seq (pre_p, seq);
403 
404   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
405 
406   return GS_ALL_DONE;
407 }
408 
409 /*  Gimplify an EXPR_STMT node.  */
410 
411 static void
412 gimplify_expr_stmt (tree *stmt_p)
413 {
414   tree stmt = EXPR_STMT_EXPR (*stmt_p);
415 
416   if (stmt == error_mark_node)
417     stmt = NULL;
418 
419   /* Gimplification of a statement expression will nullify the
420      statement if all its side effects are moved to *PRE_P and *POST_P.
421 
422      In this case we will not want to emit the gimplified statement.
423      However, we may still want to emit a warning, so we do that before
424      gimplification.  */
425   if (stmt && warn_unused_value)
426     {
427       if (!TREE_SIDE_EFFECTS (stmt))
428 	{
429 	  if (!IS_EMPTY_STMT (stmt)
430 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
431 	      && !TREE_NO_WARNING (stmt))
432 	    warning (OPT_Wunused_value, "statement with no effect");
433 	}
434       else
435 	warn_if_unused_value (stmt, input_location);
436     }
437 
438   if (stmt == NULL_TREE)
439     stmt = alloc_stmt_list ();
440 
441   *stmt_p = stmt;
442 }
443 
444 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
445 
446 static void
447 cp_gimplify_init_expr (tree *expr_p)
448 {
449   tree from = TREE_OPERAND (*expr_p, 1);
450   tree to = TREE_OPERAND (*expr_p, 0);
451   tree t;
452 
453   /* What about code that pulls out the temp and uses it elsewhere?  I
454      think that such code never uses the TARGET_EXPR as an initializer.  If
455      I'm wrong, we'll abort because the temp won't have any RTL.  In that
456      case, I guess we'll need to replace references somehow.  */
457   if (TREE_CODE (from) == TARGET_EXPR)
458     from = TARGET_EXPR_INITIAL (from);
459 
460   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461      inside the TARGET_EXPR.  */
462   for (t = from; t; )
463     {
464       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
465 
466       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 	 replace the slot operand with our target.
468 
469 	 Should we add a target parm to gimplify_expr instead?  No, as in this
470 	 case we want to replace the INIT_EXPR.  */
471       if (TREE_CODE (sub) == AGGR_INIT_EXPR
472 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
473 	{
474 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
475 	    AGGR_INIT_EXPR_SLOT (sub) = to;
476 	  else
477 	    VEC_INIT_EXPR_SLOT (sub) = to;
478 	  *expr_p = from;
479 
480 	  /* The initialization is now a side-effect, so the container can
481 	     become void.  */
482 	  if (from != sub)
483 	    TREE_TYPE (from) = void_type_node;
484 	}
485 
486       /* Handle aggregate NSDMI.  */
487       replace_placeholders (sub, to);
488 
489       if (t == sub)
490 	break;
491       else
492 	t = TREE_OPERAND (t, 1);
493     }
494 
495 }
496 
497 /* Gimplify a MUST_NOT_THROW_EXPR.  */
498 
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
501 {
502   tree stmt = *expr_p;
503   tree temp = voidify_wrapper_expr (stmt, NULL);
504   tree body = TREE_OPERAND (stmt, 0);
505   gimple_seq try_ = NULL;
506   gimple_seq catch_ = NULL;
507   gimple *mnt;
508 
509   gimplify_and_add (body, &try_);
510   mnt = gimple_build_eh_must_not_throw (terminate_fn);
511   gimple_seq_add_stmt_without_update (&catch_, mnt);
512   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
513 
514   gimple_seq_add_stmt_without_update (pre_p, mnt);
515   if (temp)
516     {
517       *expr_p = temp;
518       return GS_OK;
519     }
520 
521   *expr_p = NULL;
522   return GS_ALL_DONE;
523 }
524 
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
526    really just an empty class copy.
527 
528    Check that the operand has a simple form so that TARGET_EXPRs and
529    non-empty CONSTRUCTORs get reduced properly, and we leave the
530    return slot optimization alone because it isn't a copy.  */
531 
532 static bool
533 simple_empty_class_p (tree type, tree op)
534 {
535   return
536     ((TREE_CODE (op) == COMPOUND_EXPR
537       && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
538      || TREE_CODE (op) == EMPTY_CLASS_EXPR
539      || is_gimple_lvalue (op)
540      || INDIRECT_REF_P (op)
541      || (TREE_CODE (op) == CONSTRUCTOR
542 	 && CONSTRUCTOR_NELTS (op) == 0
543 	 && !TREE_CLOBBER_P (op))
544      || (TREE_CODE (op) == CALL_EXPR
545 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
546     && is_really_empty_class (type);
547 }
548 
549 /* Returns true if evaluating E as an lvalue has side-effects;
550    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551    have side-effects until there is a read or write through it.  */
552 
553 static bool
554 lvalue_has_side_effects (tree e)
555 {
556   if (!TREE_SIDE_EFFECTS (e))
557     return false;
558   while (handled_component_p (e))
559     {
560       if (TREE_CODE (e) == ARRAY_REF
561 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
562 	return true;
563       e = TREE_OPERAND (e, 0);
564     }
565   if (DECL_P (e))
566     /* Just naming a variable has no side-effects.  */
567     return false;
568   else if (INDIRECT_REF_P (e))
569     /* Similarly, indirection has no side-effects.  */
570     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
571   else
572     /* For anything else, trust TREE_SIDE_EFFECTS.  */
573     return TREE_SIDE_EFFECTS (e);
574 }
575 
576 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
577 
578 int
579 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
580 {
581   int saved_stmts_are_full_exprs_p = 0;
582   location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
583   enum tree_code code = TREE_CODE (*expr_p);
584   enum gimplify_status ret;
585 
586   if (STATEMENT_CODE_P (code))
587     {
588       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
589       current_stmt_tree ()->stmts_are_full_exprs_p
590 	= STMT_IS_FULL_EXPR_P (*expr_p);
591     }
592 
593   switch (code)
594     {
595     case AGGR_INIT_EXPR:
596       simplify_aggr_init_expr (expr_p);
597       ret = GS_OK;
598       break;
599 
600     case VEC_INIT_EXPR:
601       {
602 	location_t loc = input_location;
603 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
604 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
605 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
606 	input_location = EXPR_LOCATION (*expr_p);
607 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
608 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
609 				  from_array,
610 				  tf_warning_or_error);
611 	hash_set<tree> pset;
612 	cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
613 	cp_genericize_tree (expr_p, false);
614 	ret = GS_OK;
615 	input_location = loc;
616       }
617       break;
618 
619     case THROW_EXPR:
620       /* FIXME communicate throw type to back end, probably by moving
621 	 THROW_EXPR into ../tree.def.  */
622       *expr_p = TREE_OPERAND (*expr_p, 0);
623       ret = GS_OK;
624       break;
625 
626     case MUST_NOT_THROW_EXPR:
627       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
628       break;
629 
630       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
631 	 LHS of an assignment might also be involved in the RHS, as in bug
632 	 25979.  */
633     case INIT_EXPR:
634       cp_gimplify_init_expr (expr_p);
635       if (TREE_CODE (*expr_p) != INIT_EXPR)
636 	return GS_OK;
637       /* Fall through.  */
638     case MODIFY_EXPR:
639     modify_expr_case:
640       {
641 	/* If the back end isn't clever enough to know that the lhs and rhs
642 	   types are the same, add an explicit conversion.  */
643 	tree op0 = TREE_OPERAND (*expr_p, 0);
644 	tree op1 = TREE_OPERAND (*expr_p, 1);
645 
646 	if (!error_operand_p (op0)
647 	    && !error_operand_p (op1)
648 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
649 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
650 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
651 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
652 					      TREE_TYPE (op0), op1);
653 
654 	else if (simple_empty_class_p (TREE_TYPE (op0), op1))
655 	  {
656 	    /* Remove any copies of empty classes.  Also drop volatile
657 	       variables on the RHS to avoid infinite recursion from
658 	       gimplify_expr trying to load the value.  */
659 	    if (TREE_SIDE_EFFECTS (op1))
660 	      {
661 		if (TREE_THIS_VOLATILE (op1)
662 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
663 		  op1 = build_fold_addr_expr (op1);
664 
665 		gimplify_and_add (op1, pre_p);
666 	      }
667 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
668 			   is_gimple_lvalue, fb_lvalue);
669 	    *expr_p = TREE_OPERAND (*expr_p, 0);
670 	  }
671 	/* P0145 says that the RHS is sequenced before the LHS.
672 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 	   isn't quite strong enough in two cases:
674 
675 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 	   mean it's evaluated after the LHS.
677 
678 	   2) the value calculation of the RHS is also sequenced before the
679 	   LHS, so for scalar assignment we need to preevaluate if the
680 	   RHS could be affected by LHS side-effects even if it has no
681 	   side-effects of its own.  We don't need this for classes because
682 	   class assignment takes its RHS by reference.  */
683        else if (flag_strong_eval_order > 1
684                 && TREE_CODE (*expr_p) == MODIFY_EXPR
685                 && lvalue_has_side_effects (op0)
686 		&& (TREE_CODE (op1) == CALL_EXPR
687 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
688 			&& !TREE_CONSTANT (op1))))
689 	 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
690       }
691       ret = GS_OK;
692       break;
693 
694     case EMPTY_CLASS_EXPR:
695       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
696       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
697       ret = GS_OK;
698       break;
699 
700     case BASELINK:
701       *expr_p = BASELINK_FUNCTIONS (*expr_p);
702       ret = GS_OK;
703       break;
704 
705     case TRY_BLOCK:
706       genericize_try_block (expr_p);
707       ret = GS_OK;
708       break;
709 
710     case HANDLER:
711       genericize_catch_block (expr_p);
712       ret = GS_OK;
713       break;
714 
715     case EH_SPEC_BLOCK:
716       genericize_eh_spec_block (expr_p);
717       ret = GS_OK;
718       break;
719 
720     case USING_STMT:
721       gcc_unreachable ();
722 
723     case FOR_STMT:
724     case WHILE_STMT:
725     case DO_STMT:
726     case SWITCH_STMT:
727     case CONTINUE_STMT:
728     case BREAK_STMT:
729       gcc_unreachable ();
730 
731     case OMP_FOR:
732     case OMP_SIMD:
733     case OMP_DISTRIBUTE:
734     case OMP_TASKLOOP:
735       ret = cp_gimplify_omp_for (expr_p, pre_p);
736       break;
737 
738     case EXPR_STMT:
739       gimplify_expr_stmt (expr_p);
740       ret = GS_OK;
741       break;
742 
743     case UNARY_PLUS_EXPR:
744       {
745 	tree arg = TREE_OPERAND (*expr_p, 0);
746 	tree type = TREE_TYPE (*expr_p);
747 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
748 					    : arg;
749 	ret = GS_OK;
750       }
751       break;
752 
753     case CALL_EXPR:
754       ret = GS_OK;
755       if (!CALL_EXPR_FN (*expr_p))
756 	/* Internal function call.  */;
757       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
758 	{
759 	  /* This is a call to a (compound) assignment operator that used
760 	     the operator syntax; gimplify the RHS first.  */
761 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
762 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
763 	  enum gimplify_status t
764 	    = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
765 	  if (t == GS_ERROR)
766 	    ret = GS_ERROR;
767 	}
768       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
769 	{
770 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
771 	     with __builtin_va_arg_pack().  */
772 	  int nargs = call_expr_nargs (*expr_p) - 1;
773 	  for (int i = 0; i < nargs; ++i)
774 	    {
775 	      enum gimplify_status t
776 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
777 	      if (t == GS_ERROR)
778 		ret = GS_ERROR;
779 	    }
780 	}
781       else if (flag_strong_eval_order
782 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
783 	{
784 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
785 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
786 	  if (POINTER_TYPE_P (fntype))
787 	    fntype = TREE_TYPE (fntype);
788 	  if (TREE_CODE (fntype) == METHOD_TYPE)
789 	    {
790 	      enum gimplify_status t
791 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
792 	      if (t == GS_ERROR)
793 		ret = GS_ERROR;
794 	    }
795 	}
796       break;
797 
798     case RETURN_EXPR:
799       if (TREE_OPERAND (*expr_p, 0)
800 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
801 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
802 	{
803 	  expr_p = &TREE_OPERAND (*expr_p, 0);
804 	  code = TREE_CODE (*expr_p);
805 	  /* Avoid going through the INIT_EXPR case, which can
806 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
807 	  goto modify_expr_case;
808 	}
809       /* Fall through.  */
810 
811     default:
812       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
813       break;
814     }
815 
816   /* Restore saved state.  */
817   if (STATEMENT_CODE_P (code))
818     current_stmt_tree ()->stmts_are_full_exprs_p
819       = saved_stmts_are_full_exprs_p;
820 
821   return ret;
822 }
823 
824 static inline bool
825 is_invisiref_parm (const_tree t)
826 {
827   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
828 	  && DECL_BY_REFERENCE (t));
829 }
830 
831 /* Return true if the uid in both int tree maps are equal.  */
832 
833 bool
834 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
835 {
836   return (a->uid == b->uid);
837 }
838 
839 /* Hash a UID in a cxx_int_tree_map.  */
840 
841 unsigned int
842 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
843 {
844   return item->uid;
845 }
846 
847 /* A stable comparison routine for use with splay trees and DECLs.  */
848 
849 static int
850 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
851 {
852   tree a = (tree) xa;
853   tree b = (tree) xb;
854 
855   return DECL_UID (a) - DECL_UID (b);
856 }
857 
858 /* OpenMP context during genericization.  */
859 
860 struct cp_genericize_omp_taskreg
861 {
862   bool is_parallel;
863   bool default_shared;
864   struct cp_genericize_omp_taskreg *outer;
865   splay_tree variables;
866 };
867 
868 /* Return true if genericization should try to determine if
869    DECL is firstprivate or shared within task regions.  */
870 
871 static bool
872 omp_var_to_track (tree decl)
873 {
874   tree type = TREE_TYPE (decl);
875   if (is_invisiref_parm (decl))
876     type = TREE_TYPE (type);
877   else if (TREE_CODE (type) == REFERENCE_TYPE)
878     type = TREE_TYPE (type);
879   while (TREE_CODE (type) == ARRAY_TYPE)
880     type = TREE_TYPE (type);
881   if (type == error_mark_node || !CLASS_TYPE_P (type))
882     return false;
883   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
884     return false;
885   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
886     return false;
887   return true;
888 }
889 
890 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
891 
892 static void
893 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
894 {
895   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
896 					 (splay_tree_key) decl);
897   if (n == NULL)
898     {
899       int flags = OMP_CLAUSE_DEFAULT_SHARED;
900       if (omp_ctx->outer)
901 	omp_cxx_notice_variable (omp_ctx->outer, decl);
902       if (!omp_ctx->default_shared)
903 	{
904 	  struct cp_genericize_omp_taskreg *octx;
905 
906 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
907 	    {
908 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
909 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
910 		{
911 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
912 		  break;
913 		}
914 	      if (octx->is_parallel)
915 		break;
916 	    }
917 	  if (octx == NULL
918 	      && (TREE_CODE (decl) == PARM_DECL
919 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
920 		      && DECL_CONTEXT (decl) == current_function_decl)))
921 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
922 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
923 	    {
924 	      /* DECL is implicitly determined firstprivate in
925 		 the current task construct.  Ensure copy ctor and
926 		 dtor are instantiated, because during gimplification
927 		 it will be already too late.  */
928 	      tree type = TREE_TYPE (decl);
929 	      if (is_invisiref_parm (decl))
930 		type = TREE_TYPE (type);
931 	      else if (TREE_CODE (type) == REFERENCE_TYPE)
932 		type = TREE_TYPE (type);
933 	      while (TREE_CODE (type) == ARRAY_TYPE)
934 		type = TREE_TYPE (type);
935 	      get_copy_ctor (type, tf_none);
936 	      get_dtor (type, tf_none);
937 	    }
938 	}
939       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
940     }
941 }
942 
943 /* Genericization context.  */
944 
945 struct cp_genericize_data
946 {
947   hash_set<tree> *p_set;
948   vec<tree> bind_expr_stack;
949   struct cp_genericize_omp_taskreg *omp_ctx;
950   tree try_block;
951   bool no_sanitize_p;
952   bool handle_invisiref_parm_p;
953 };
954 
955 /* Perform any pre-gimplification folding of C++ front end trees to
956    GENERIC.
957    Note:  The folding of none-omp cases is something to move into
958      the middle-end.  As for now we have most foldings only on GENERIC
959      in fold-const, we need to perform this before transformation to
960      GIMPLE-form.  */
961 
962 static tree
963 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
964 {
965   tree stmt;
966   enum tree_code code;
967 
968   *stmt_p = stmt = cp_fold (*stmt_p);
969 
970   if (((hash_set<tree> *) data)->add (stmt))
971     {
972       /* Don't walk subtrees of stmts we've already walked once, otherwise
973 	 we can have exponential complexity with e.g. lots of nested
974 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
975 	 always the same tree, which the first time cp_fold_r has been
976 	 called on it had the subtrees walked.  */
977       *walk_subtrees = 0;
978       return NULL;
979     }
980 
981   code = TREE_CODE (stmt);
982   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
983       || code == OMP_TASKLOOP || code == OACC_LOOP)
984     {
985       tree x;
986       int i, n;
987 
988       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
989       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
990       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
991       x = OMP_FOR_COND (stmt);
992       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
993 	{
994 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
995 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
996 	}
997       else if (x && TREE_CODE (x) == TREE_VEC)
998 	{
999 	  n = TREE_VEC_LENGTH (x);
1000 	  for (i = 0; i < n; i++)
1001 	    {
1002 	      tree o = TREE_VEC_ELT (x, i);
1003 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1004 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1005 	    }
1006 	}
1007       x = OMP_FOR_INCR (stmt);
1008       if (x && TREE_CODE (x) == TREE_VEC)
1009 	{
1010 	  n = TREE_VEC_LENGTH (x);
1011 	  for (i = 0; i < n; i++)
1012 	    {
1013 	      tree o = TREE_VEC_ELT (x, i);
1014 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1015 		o = TREE_OPERAND (o, 1);
1016 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1017 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1018 		{
1019 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1020 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1021 		}
1022 	    }
1023 	}
1024       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1025       *walk_subtrees = 0;
1026     }
1027 
1028   return NULL;
1029 }
1030 
1031 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1032    apparently that still causes optimization regressions.  */
1033 
1034 void
1035 cp_fold_function (tree fndecl)
1036 {
1037   hash_set<tree> pset;
1038   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1039 }
1040 
1041 /* Perform any pre-gimplification lowering of C++ front end trees to
1042    GENERIC.  */
1043 
1044 static tree
1045 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1046 {
1047   tree stmt = *stmt_p;
1048   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1049   hash_set<tree> *p_set = wtd->p_set;
1050 
1051   /* If in an OpenMP context, note var uses.  */
1052   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1053       && (VAR_P (stmt)
1054 	  || TREE_CODE (stmt) == PARM_DECL
1055 	  || TREE_CODE (stmt) == RESULT_DECL)
1056       && omp_var_to_track (stmt))
1057     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1058 
1059   /* Don't dereference parms in a thunk, pass the references through. */
1060   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1061       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1062     {
1063       *walk_subtrees = 0;
1064       return NULL;
1065     }
1066 
1067   /* Dereference invisible reference parms.  */
1068   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1069     {
1070       *stmt_p = convert_from_reference (stmt);
1071       p_set->add (*stmt_p);
1072       *walk_subtrees = 0;
1073       return NULL;
1074     }
1075 
1076   /* Map block scope extern declarations to visible declarations with the
1077      same name and type in outer scopes if any.  */
1078   if (cp_function_chain->extern_decl_map
1079       && VAR_OR_FUNCTION_DECL_P (stmt)
1080       && DECL_EXTERNAL (stmt))
1081     {
1082       struct cxx_int_tree_map *h, in;
1083       in.uid = DECL_UID (stmt);
1084       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1085       if (h)
1086 	{
1087 	  *stmt_p = h->to;
1088 	  *walk_subtrees = 0;
1089 	  return NULL;
1090 	}
1091     }
1092 
1093   if (TREE_CODE (stmt) == INTEGER_CST
1094       && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1095       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1096       && !wtd->no_sanitize_p)
1097     {
1098       ubsan_maybe_instrument_reference (stmt_p);
1099       if (*stmt_p != stmt)
1100 	{
1101 	  *walk_subtrees = 0;
1102 	  return NULL_TREE;
1103 	}
1104     }
1105 
1106   /* Other than invisiref parms, don't walk the same tree twice.  */
1107   if (p_set->contains (stmt))
1108     {
1109       *walk_subtrees = 0;
1110       return NULL_TREE;
1111     }
1112 
1113   switch (TREE_CODE (stmt))
1114     {
1115     case ADDR_EXPR:
1116       if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1117 	{
1118 	  /* If in an OpenMP context, note var uses.  */
1119 	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1120 	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1121 	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1122 	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1123 	  *walk_subtrees = 0;
1124 	}
1125       break;
1126 
1127     case RETURN_EXPR:
1128       if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1129 	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1130 	*walk_subtrees = 0;
1131       break;
1132 
1133     case OMP_CLAUSE:
1134       switch (OMP_CLAUSE_CODE (stmt))
1135 	{
1136 	case OMP_CLAUSE_LASTPRIVATE:
1137 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1138 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1139 	    {
1140 	      *walk_subtrees = 0;
1141 	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1142 		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1143 			      cp_genericize_r, data, NULL);
1144 	    }
1145 	  break;
1146 	case OMP_CLAUSE_PRIVATE:
1147 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1148 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1149 	    *walk_subtrees = 0;
1150 	  else if (wtd->omp_ctx != NULL)
1151 	    {
1152 	      /* Private clause doesn't cause any references to the
1153 		 var in outer contexts, avoid calling
1154 		 omp_cxx_notice_variable for it.  */
1155 	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1156 	      wtd->omp_ctx = NULL;
1157 	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1158 			    data, NULL);
1159 	      wtd->omp_ctx = old;
1160 	      *walk_subtrees = 0;
1161 	    }
1162 	  break;
1163 	case OMP_CLAUSE_SHARED:
1164 	case OMP_CLAUSE_FIRSTPRIVATE:
1165 	case OMP_CLAUSE_COPYIN:
1166 	case OMP_CLAUSE_COPYPRIVATE:
1167 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1168 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1169 	    *walk_subtrees = 0;
1170 	  break;
1171 	case OMP_CLAUSE_REDUCTION:
1172 	  /* Don't dereference an invisiref in reduction clause's
1173 	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1174 	     still needs to be genericized.  */
1175 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1176 	    {
1177 	      *walk_subtrees = 0;
1178 	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1179 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1180 			      cp_genericize_r, data, NULL);
1181 	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1182 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1183 			      cp_genericize_r, data, NULL);
1184 	    }
1185 	  break;
1186 	default:
1187 	  break;
1188 	}
1189       break;
1190 
1191     /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1192        to lower this construct before scanning it, so we need to lower these
1193        before doing anything else.  */
1194     case CLEANUP_STMT:
1195       *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1196 			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1197 						   : TRY_FINALLY_EXPR,
1198 			    void_type_node,
1199 			    CLEANUP_BODY (stmt),
1200 			    CLEANUP_EXPR (stmt));
1201       break;
1202 
1203     case IF_STMT:
1204       genericize_if_stmt (stmt_p);
1205       /* *stmt_p has changed, tail recurse to handle it again.  */
1206       return cp_genericize_r (stmt_p, walk_subtrees, data);
1207 
1208     /* COND_EXPR might have incompatible types in branches if one or both
1209        arms are bitfields.  Fix it up now.  */
1210     case COND_EXPR:
1211       {
1212 	tree type_left
1213 	  = (TREE_OPERAND (stmt, 1)
1214 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1215 	     : NULL_TREE);
1216 	tree type_right
1217 	  = (TREE_OPERAND (stmt, 2)
1218 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1219 	     : NULL_TREE);
1220 	if (type_left
1221 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1222 					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1223 	  {
1224 	    TREE_OPERAND (stmt, 1)
1225 	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1226 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1227 						   type_left));
1228 	  }
1229 	if (type_right
1230 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1231 					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1232 	  {
1233 	    TREE_OPERAND (stmt, 2)
1234 	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1235 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1236 						   type_right));
1237 	  }
1238       }
1239       break;
1240 
1241     case BIND_EXPR:
1242       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1243 	{
1244 	  tree decl;
1245 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1246 	    if (VAR_P (decl)
1247 		&& !DECL_EXTERNAL (decl)
1248 		&& omp_var_to_track (decl))
1249 	      {
1250 		splay_tree_node n
1251 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1252 				       (splay_tree_key) decl);
1253 		if (n == NULL)
1254 		  splay_tree_insert (wtd->omp_ctx->variables,
1255 				     (splay_tree_key) decl,
1256 				     TREE_STATIC (decl)
1257 				     ? OMP_CLAUSE_DEFAULT_SHARED
1258 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1259 	      }
1260 	}
1261       if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1262 	{
1263 	  /* The point here is to not sanitize static initializers.  */
1264 	  bool no_sanitize_p = wtd->no_sanitize_p;
1265 	  wtd->no_sanitize_p = true;
1266 	  for (tree decl = BIND_EXPR_VARS (stmt);
1267 	       decl;
1268 	       decl = DECL_CHAIN (decl))
1269 	    if (VAR_P (decl)
1270 		&& TREE_STATIC (decl)
1271 		&& DECL_INITIAL (decl))
1272 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1273 	  wtd->no_sanitize_p = no_sanitize_p;
1274 	}
1275       wtd->bind_expr_stack.safe_push (stmt);
1276       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1277 		    cp_genericize_r, data, NULL);
1278       wtd->bind_expr_stack.pop ();
1279       break;
1280 
1281     case USING_STMT:
1282       {
1283 	tree block = NULL_TREE;
1284 
1285 	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1286 	   BLOCK, and append an IMPORTED_DECL to its
1287 	   BLOCK_VARS chained list.  */
1288 	if (wtd->bind_expr_stack.exists ())
1289 	  {
1290 	    int i;
1291 	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1292 	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1293 		break;
1294 	  }
1295 	if (block)
1296 	  {
1297 	    tree decl = TREE_OPERAND (stmt, 0);
1298 	    gcc_assert (decl);
1299 
1300 	    if (undeduced_auto_decl (decl))
1301 	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1302 	    else
1303 	      {
1304 		tree using_directive = make_node (IMPORTED_DECL);
1305 		TREE_TYPE (using_directive) = void_type_node;
1306 
1307 		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1308 		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1309 		BLOCK_VARS (block) = using_directive;
1310 	      }
1311 	  }
1312 	/* The USING_STMT won't appear in GENERIC.  */
1313 	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1314 	*walk_subtrees = 0;
1315       }
1316       break;
1317 
1318     case DECL_EXPR:
1319       if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1320 	{
1321 	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1322 	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1323 	  *walk_subtrees = 0;
1324 	}
1325       else
1326 	{
1327 	  tree d = DECL_EXPR_DECL (stmt);
1328 	  if (VAR_P (d))
1329 	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1330 	}
1331       break;
1332 
1333     case OMP_PARALLEL:
1334     case OMP_TASK:
1335     case OMP_TASKLOOP:
1336       {
1337 	struct cp_genericize_omp_taskreg omp_ctx;
1338 	tree c, decl;
1339 	splay_tree_node n;
1340 
1341 	*walk_subtrees = 0;
1342 	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1343 	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1344 	omp_ctx.default_shared = omp_ctx.is_parallel;
1345 	omp_ctx.outer = wtd->omp_ctx;
1346 	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1347 	wtd->omp_ctx = &omp_ctx;
1348 	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1349 	  switch (OMP_CLAUSE_CODE (c))
1350 	    {
1351 	    case OMP_CLAUSE_SHARED:
1352 	    case OMP_CLAUSE_PRIVATE:
1353 	    case OMP_CLAUSE_FIRSTPRIVATE:
1354 	    case OMP_CLAUSE_LASTPRIVATE:
1355 	      decl = OMP_CLAUSE_DECL (c);
1356 	      if (decl == error_mark_node || !omp_var_to_track (decl))
1357 		break;
1358 	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1359 	      if (n != NULL)
1360 		break;
1361 	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1362 				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1363 				 ? OMP_CLAUSE_DEFAULT_SHARED
1364 				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1365 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1366 		omp_cxx_notice_variable (omp_ctx.outer, decl);
1367 	      break;
1368 	    case OMP_CLAUSE_DEFAULT:
1369 	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1370 		omp_ctx.default_shared = true;
1371 	    default:
1372 	      break;
1373 	    }
1374 	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1375 	  genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1376 	else
1377 	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1378 	wtd->omp_ctx = omp_ctx.outer;
1379 	splay_tree_delete (omp_ctx.variables);
1380       }
1381       break;
1382 
1383     case TRY_BLOCK:
1384       {
1385         *walk_subtrees = 0;
1386         tree try_block = wtd->try_block;
1387         wtd->try_block = stmt;
1388         cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1389         wtd->try_block = try_block;
1390         cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1391       }
1392       break;
1393 
1394     case MUST_NOT_THROW_EXPR:
1395       /* MUST_NOT_THROW_COND might be something else with TM.  */
1396       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1397 	{
1398 	  *walk_subtrees = 0;
1399 	  tree try_block = wtd->try_block;
1400 	  wtd->try_block = stmt;
1401 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1402 	  wtd->try_block = try_block;
1403 	}
1404       break;
1405 
1406     case THROW_EXPR:
1407       {
1408 	location_t loc = location_of (stmt);
1409 	if (TREE_NO_WARNING (stmt))
1410 	  /* Never mind.  */;
1411 	else if (wtd->try_block)
1412 	  {
1413 	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1414 		&& warning_at (loc, OPT_Wterminate,
1415 			       "throw will always call terminate()")
1416 		&& cxx_dialect >= cxx11
1417 		&& DECL_DESTRUCTOR_P (current_function_decl))
1418 	      inform (loc, "in C++11 destructors default to noexcept");
1419 	  }
1420 	else
1421 	  {
1422 	    if (warn_cxx11_compat && cxx_dialect < cxx11
1423 		&& DECL_DESTRUCTOR_P (current_function_decl)
1424 		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1425 		    == NULL_TREE)
1426 		&& (get_defaulted_eh_spec (current_function_decl)
1427 		    == empty_except_spec))
1428 	      warning_at (loc, OPT_Wc__11_compat,
1429 			  "in C++11 this throw will terminate because "
1430 			  "destructors default to noexcept");
1431 	  }
1432       }
1433       break;
1434 
1435     case CONVERT_EXPR:
1436       gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1437       break;
1438 
1439     case FOR_STMT:
1440       genericize_for_stmt (stmt_p, walk_subtrees, data);
1441       break;
1442 
1443     case WHILE_STMT:
1444       genericize_while_stmt (stmt_p, walk_subtrees, data);
1445       break;
1446 
1447     case DO_STMT:
1448       genericize_do_stmt (stmt_p, walk_subtrees, data);
1449       break;
1450 
1451     case SWITCH_STMT:
1452       genericize_switch_stmt (stmt_p, walk_subtrees, data);
1453       break;
1454 
1455     case CONTINUE_STMT:
1456       genericize_continue_stmt (stmt_p);
1457       break;
1458 
1459     case BREAK_STMT:
1460       genericize_break_stmt (stmt_p);
1461       break;
1462 
1463     case OMP_FOR:
1464     case OMP_SIMD:
1465     case OMP_DISTRIBUTE:
1466       genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1467       break;
1468 
1469     case PTRMEM_CST:
1470       /* By the time we get here we're handing off to the back end, so we don't
1471 	 need or want to preserve PTRMEM_CST anymore.  */
1472       *stmt_p = cplus_expand_constant (stmt);
1473       *walk_subtrees = 0;
1474       break;
1475 
1476     case MEM_REF:
1477       /* For MEM_REF, make sure not to sanitize the second operand even
1478 	 if it has reference type.  It is just an offset with a type
1479 	 holding other information.  There is no other processing we
1480 	 need to do for INTEGER_CSTs, so just ignore the second argument
1481 	 unconditionally.  */
1482       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1483       *walk_subtrees = 0;
1484       break;
1485 
1486     case NOP_EXPR:
1487       if (!wtd->no_sanitize_p
1488 	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1489 	  && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1490 	ubsan_maybe_instrument_reference (stmt_p);
1491       break;
1492 
1493     case CALL_EXPR:
1494       if (!wtd->no_sanitize_p
1495 	  && sanitize_flags_p ((SANITIZE_NULL
1496 				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1497 	{
1498 	  tree fn = CALL_EXPR_FN (stmt);
1499 	  if (fn != NULL_TREE
1500 	      && !error_operand_p (fn)
1501 	      && POINTER_TYPE_P (TREE_TYPE (fn))
1502 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1503 	    {
1504 	      bool is_ctor
1505 		= TREE_CODE (fn) == ADDR_EXPR
1506 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1507 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1508 	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1509 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1510 	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1511 		cp_ubsan_maybe_instrument_member_call (stmt);
1512 	    }
1513 	  else if (fn == NULL_TREE
1514 		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1515 		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1516 		   && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (stmt, 0)))
1517 		       == REFERENCE_TYPE))
1518 	    *walk_subtrees = 0;
1519 	}
1520       /* Fall through.  */
1521     case AGGR_INIT_EXPR:
1522       /* For calls to a multi-versioned function, overload resolution
1523 	 returns the function with the highest target priority, that is,
1524 	 the version that will checked for dispatching first.  If this
1525 	 version is inlinable, a direct call to this version can be made
1526 	 otherwise the call should go through the dispatcher.  */
1527       {
1528 	tree fn = cp_get_callee_fndecl_nofold (stmt);
1529 	if (fn && DECL_FUNCTION_VERSIONED (fn)
1530 	    && (current_function_decl == NULL
1531 		|| !targetm.target_option.can_inline_p (current_function_decl,
1532 							fn)))
1533 	  if (tree dis = get_function_version_dispatcher (fn))
1534 	    {
1535 	      mark_versions_used (dis);
1536 	      dis = build_address (dis);
1537 	      if (TREE_CODE (stmt) == CALL_EXPR)
1538 		CALL_EXPR_FN (stmt) = dis;
1539 	      else
1540 		AGGR_INIT_EXPR_FN (stmt) = dis;
1541 	    }
1542       }
1543       break;
1544 
1545     case TARGET_EXPR:
1546       if (TARGET_EXPR_INITIAL (stmt)
1547 	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1548 	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1549 	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1550       break;
1551 
1552     default:
1553       if (IS_TYPE_OR_DECL_P (stmt))
1554 	*walk_subtrees = 0;
1555       break;
1556     }
1557 
1558   p_set->add (*stmt_p);
1559 
1560   return NULL;
1561 }
1562 
1563 /* Lower C++ front end trees to GENERIC in T_P.  */
1564 
1565 static void
1566 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1567 {
1568   struct cp_genericize_data wtd;
1569 
1570   wtd.p_set = new hash_set<tree>;
1571   wtd.bind_expr_stack.create (0);
1572   wtd.omp_ctx = NULL;
1573   wtd.try_block = NULL_TREE;
1574   wtd.no_sanitize_p = false;
1575   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1576   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1577   delete wtd.p_set;
1578   wtd.bind_expr_stack.release ();
1579   if (sanitize_flags_p (SANITIZE_VPTR))
1580     cp_ubsan_instrument_member_accesses (t_p);
1581 }
1582 
1583 /* If a function that should end with a return in non-void
1584    function doesn't obviously end with return, add ubsan
1585    instrumentation code to verify it at runtime.  If -fsanitize=return
1586    is not enabled, instrument __builtin_unreachable.  */
1587 
1588 static void
1589 cp_maybe_instrument_return (tree fndecl)
1590 {
1591   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1592       || DECL_CONSTRUCTOR_P (fndecl)
1593       || DECL_DESTRUCTOR_P (fndecl)
1594       || !targetm.warn_func_return (fndecl))
1595     return;
1596 
1597   if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1598       /* Don't add __builtin_unreachable () if not optimizing, it will not
1599 	 improve any optimizations in that case, just break UB code.
1600 	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1601 	 UBSan covers this with ubsan_instrument_return above where sufficient
1602 	 information is provided, while the __builtin_unreachable () below
1603 	 if return sanitization is disabled will just result in hard to
1604 	 understand runtime error without location.  */
1605       && (!optimize
1606 	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1607     return;
1608 
1609   tree t = DECL_SAVED_TREE (fndecl);
1610   while (t)
1611     {
1612       switch (TREE_CODE (t))
1613 	{
1614 	case BIND_EXPR:
1615 	  t = BIND_EXPR_BODY (t);
1616 	  continue;
1617 	case TRY_FINALLY_EXPR:
1618 	case CLEANUP_POINT_EXPR:
1619 	  t = TREE_OPERAND (t, 0);
1620 	  continue;
1621 	case STATEMENT_LIST:
1622 	  {
1623 	    tree_stmt_iterator i = tsi_last (t);
1624 	    if (!tsi_end_p (i))
1625 	      {
1626 		t = tsi_stmt (i);
1627 		continue;
1628 	      }
1629 	  }
1630 	  break;
1631 	case RETURN_EXPR:
1632 	  return;
1633 	default:
1634 	  break;
1635 	}
1636       break;
1637     }
1638   if (t == NULL_TREE)
1639     return;
1640   tree *p = &DECL_SAVED_TREE (fndecl);
1641   if (TREE_CODE (*p) == BIND_EXPR)
1642     p = &BIND_EXPR_BODY (*p);
1643 
1644   location_t loc = DECL_SOURCE_LOCATION (fndecl);
1645   if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1646     t = ubsan_instrument_return (loc);
1647   else
1648     {
1649       tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1650       t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1651     }
1652 
1653   append_to_statement_list (t, p);
1654 }
1655 
1656 void
1657 cp_genericize (tree fndecl)
1658 {
1659   tree t;
1660 
1661   /* Fix up the types of parms passed by invisible reference.  */
1662   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1663     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1664       {
1665 	/* If a function's arguments are copied to create a thunk,
1666 	   then DECL_BY_REFERENCE will be set -- but the type of the
1667 	   argument will be a pointer type, so we will never get
1668 	   here.  */
1669 	gcc_assert (!DECL_BY_REFERENCE (t));
1670 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1671 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1672 	DECL_BY_REFERENCE (t) = 1;
1673 	TREE_ADDRESSABLE (t) = 0;
1674 	relayout_decl (t);
1675       }
1676 
1677   /* Do the same for the return value.  */
1678   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1679     {
1680       t = DECL_RESULT (fndecl);
1681       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1682       DECL_BY_REFERENCE (t) = 1;
1683       TREE_ADDRESSABLE (t) = 0;
1684       relayout_decl (t);
1685       if (DECL_NAME (t))
1686 	{
1687 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1688 	  tree outer = outer_curly_brace_block (current_function_decl);
1689 	  tree var;
1690 
1691 	  if (outer)
1692 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1693 	      if (VAR_P (var)
1694 		  && DECL_NAME (t) == DECL_NAME (var)
1695 		  && DECL_HAS_VALUE_EXPR_P (var)
1696 		  && DECL_VALUE_EXPR (var) == t)
1697 		{
1698 		  tree val = convert_from_reference (t);
1699 		  SET_DECL_VALUE_EXPR (var, val);
1700 		  break;
1701 		}
1702 	}
1703     }
1704 
1705   /* If we're a clone, the body is already GIMPLE.  */
1706   if (DECL_CLONED_FUNCTION_P (fndecl))
1707     return;
1708 
1709   /* Allow cp_genericize calls to be nested.  */
1710   tree save_bc_label[2];
1711   save_bc_label[bc_break] = bc_label[bc_break];
1712   save_bc_label[bc_continue] = bc_label[bc_continue];
1713   bc_label[bc_break] = NULL_TREE;
1714   bc_label[bc_continue] = NULL_TREE;
1715 
1716   /* We do want to see every occurrence of the parms, so we can't just use
1717      walk_tree's hash functionality.  */
1718   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1719 
1720   cp_maybe_instrument_return (fndecl);
1721 
1722   /* Do everything else.  */
1723   c_genericize (fndecl);
1724 
1725   gcc_assert (bc_label[bc_break] == NULL);
1726   gcc_assert (bc_label[bc_continue] == NULL);
1727   bc_label[bc_break] = save_bc_label[bc_break];
1728   bc_label[bc_continue] = save_bc_label[bc_continue];
1729 }
1730 
1731 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1732    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1733    actually only takes one argument.  */
1734 
1735 static tree
1736 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1737 {
1738   tree defparm, parm, t;
1739   int i = 0;
1740   int nargs;
1741   tree *argarray;
1742 
1743   if (fn == NULL)
1744     return NULL;
1745 
1746   nargs = list_length (DECL_ARGUMENTS (fn));
1747   argarray = XALLOCAVEC (tree, nargs);
1748 
1749   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1750   if (arg2)
1751     defparm = TREE_CHAIN (defparm);
1752 
1753   bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1754   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1755     {
1756       tree inner_type = TREE_TYPE (arg1);
1757       tree start1, end1, p1;
1758       tree start2 = NULL, p2 = NULL;
1759       tree ret = NULL, lab;
1760 
1761       start1 = arg1;
1762       start2 = arg2;
1763       do
1764 	{
1765 	  inner_type = TREE_TYPE (inner_type);
1766 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1767 			   size_zero_node, NULL, NULL);
1768 	  if (arg2)
1769 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1770 			     size_zero_node, NULL, NULL);
1771 	}
1772       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1773       start1 = build_fold_addr_expr_loc (input_location, start1);
1774       if (arg2)
1775 	start2 = build_fold_addr_expr_loc (input_location, start2);
1776 
1777       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1778       end1 = fold_build_pointer_plus (start1, end1);
1779 
1780       p1 = create_tmp_var (TREE_TYPE (start1));
1781       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1782       append_to_statement_list (t, &ret);
1783 
1784       if (arg2)
1785 	{
1786 	  p2 = create_tmp_var (TREE_TYPE (start2));
1787 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1788 	  append_to_statement_list (t, &ret);
1789 	}
1790 
1791       lab = create_artificial_label (input_location);
1792       t = build1 (LABEL_EXPR, void_type_node, lab);
1793       append_to_statement_list (t, &ret);
1794 
1795       argarray[i++] = p1;
1796       if (arg2)
1797 	argarray[i++] = p2;
1798       /* Handle default arguments.  */
1799       for (parm = defparm; parm && parm != void_list_node;
1800 	   parm = TREE_CHAIN (parm), i++)
1801 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1802 					   TREE_PURPOSE (parm), fn,
1803 					   i - is_method, tf_warning_or_error);
1804       t = build_call_a (fn, i, argarray);
1805       t = fold_convert (void_type_node, t);
1806       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1807       append_to_statement_list (t, &ret);
1808 
1809       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1810       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1811       append_to_statement_list (t, &ret);
1812 
1813       if (arg2)
1814 	{
1815 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1816 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1817 	  append_to_statement_list (t, &ret);
1818 	}
1819 
1820       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1821       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1822       append_to_statement_list (t, &ret);
1823 
1824       return ret;
1825     }
1826   else
1827     {
1828       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1829       if (arg2)
1830 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1831       /* Handle default arguments.  */
1832       for (parm = defparm; parm && parm != void_list_node;
1833 	   parm = TREE_CHAIN (parm), i++)
1834 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1835 					   TREE_PURPOSE (parm), fn,
1836 					   i - is_method, tf_warning_or_error);
1837       t = build_call_a (fn, i, argarray);
1838       t = fold_convert (void_type_node, t);
1839       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1840     }
1841 }
1842 
1843 /* Return code to initialize DECL with its default constructor, or
1844    NULL if there's nothing to do.  */
1845 
1846 tree
1847 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1848 {
1849   tree info = CP_OMP_CLAUSE_INFO (clause);
1850   tree ret = NULL;
1851 
1852   if (info)
1853     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1854 
1855   return ret;
1856 }
1857 
1858 /* Return code to initialize DST with a copy constructor from SRC.  */
1859 
1860 tree
1861 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1862 {
1863   tree info = CP_OMP_CLAUSE_INFO (clause);
1864   tree ret = NULL;
1865 
1866   if (info)
1867     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1868   if (ret == NULL)
1869     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1870 
1871   return ret;
1872 }
1873 
1874 /* Similarly, except use an assignment operator instead.  */
1875 
1876 tree
1877 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1878 {
1879   tree info = CP_OMP_CLAUSE_INFO (clause);
1880   tree ret = NULL;
1881 
1882   if (info)
1883     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1884   if (ret == NULL)
1885     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1886 
1887   return ret;
1888 }
1889 
1890 /* Return code to destroy DECL.  */
1891 
1892 tree
1893 cxx_omp_clause_dtor (tree clause, tree decl)
1894 {
1895   tree info = CP_OMP_CLAUSE_INFO (clause);
1896   tree ret = NULL;
1897 
1898   if (info)
1899     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1900 
1901   return ret;
1902 }
1903 
1904 /* True if OpenMP should privatize what this DECL points to rather
1905    than the DECL itself.  */
1906 
1907 bool
1908 cxx_omp_privatize_by_reference (const_tree decl)
1909 {
1910   return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1911 	  || is_invisiref_parm (decl));
1912 }
1913 
1914 /* Return true if DECL is const qualified var having no mutable member.  */
1915 bool
1916 cxx_omp_const_qual_no_mutable (tree decl)
1917 {
1918   tree type = TREE_TYPE (decl);
1919   if (TREE_CODE (type) == REFERENCE_TYPE)
1920     {
1921       if (!is_invisiref_parm (decl))
1922 	return false;
1923       type = TREE_TYPE (type);
1924 
1925       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1926 	{
1927 	  /* NVR doesn't preserve const qualification of the
1928 	     variable's type.  */
1929 	  tree outer = outer_curly_brace_block (current_function_decl);
1930 	  tree var;
1931 
1932 	  if (outer)
1933 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1934 	      if (VAR_P (var)
1935 		  && DECL_NAME (decl) == DECL_NAME (var)
1936 		  && (TYPE_MAIN_VARIANT (type)
1937 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1938 		{
1939 		  if (TYPE_READONLY (TREE_TYPE (var)))
1940 		    type = TREE_TYPE (var);
1941 		  break;
1942 		}
1943 	}
1944     }
1945 
1946   if (type == error_mark_node)
1947     return false;
1948 
1949   /* Variables with const-qualified type having no mutable member
1950      are predetermined shared.  */
1951   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1952     return true;
1953 
1954   return false;
1955 }
1956 
1957 /* True if OpenMP sharing attribute of DECL is predetermined.  */
1958 
1959 enum omp_clause_default_kind
1960 cxx_omp_predetermined_sharing (tree decl)
1961 {
1962   /* Static data members are predetermined shared.  */
1963   if (TREE_STATIC (decl))
1964     {
1965       tree ctx = CP_DECL_CONTEXT (decl);
1966       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1967 	return OMP_CLAUSE_DEFAULT_SHARED;
1968     }
1969 
1970   /* Const qualified vars having no mutable member are predetermined
1971      shared.  */
1972   if (cxx_omp_const_qual_no_mutable (decl))
1973     return OMP_CLAUSE_DEFAULT_SHARED;
1974 
1975   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1976 }
1977 
1978 /* Finalize an implicitly determined clause.  */
1979 
1980 void
1981 cxx_omp_finish_clause (tree c, gimple_seq *)
1982 {
1983   tree decl, inner_type;
1984   bool make_shared = false;
1985 
1986   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1987     return;
1988 
1989   decl = OMP_CLAUSE_DECL (c);
1990   decl = require_complete_type (decl);
1991   inner_type = TREE_TYPE (decl);
1992   if (decl == error_mark_node)
1993     make_shared = true;
1994   else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1995     inner_type = TREE_TYPE (inner_type);
1996 
1997   /* We're interested in the base element, not arrays.  */
1998   while (TREE_CODE (inner_type) == ARRAY_TYPE)
1999     inner_type = TREE_TYPE (inner_type);
2000 
2001   /* Check for special function availability by building a call to one.
2002      Save the results, because later we won't be in the right context
2003      for making these queries.  */
2004   if (!make_shared
2005       && CLASS_TYPE_P (inner_type)
2006       && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2007     make_shared = true;
2008 
2009   if (make_shared)
2010     {
2011       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2012       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2013       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2014     }
2015 }
2016 
2017 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2018    disregarded in OpenMP construct, because it is going to be
2019    remapped during OpenMP lowering.  SHARED is true if DECL
2020    is going to be shared, false if it is going to be privatized.  */
2021 
2022 bool
2023 cxx_omp_disregard_value_expr (tree decl, bool shared)
2024 {
2025   return !shared
2026 	 && VAR_P (decl)
2027 	 && DECL_HAS_VALUE_EXPR_P (decl)
2028 	 && DECL_ARTIFICIAL (decl)
2029 	 && DECL_LANG_SPECIFIC (decl)
2030 	 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2031 }
2032 
2033 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2034 
2035 static tree
2036 cp_fold_maybe_rvalue (tree x, bool rval)
2037 {
2038   while (true)
2039     {
2040       x = cp_fold (x);
2041       if (rval && DECL_P (x)
2042 	  && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2043 	{
2044 	  tree v = decl_constant_value (x);
2045 	  if (v != x && v != error_mark_node)
2046 	    {
2047 	      x = v;
2048 	      continue;
2049 	    }
2050 	}
2051       break;
2052     }
2053   return x;
2054 }
2055 
2056 /* Fold expression X which is used as an rvalue.  */
2057 
2058 static tree
2059 cp_fold_rvalue (tree x)
2060 {
2061   return cp_fold_maybe_rvalue (x, true);
2062 }
2063 
2064 /* Perform folding on expression X.  */
2065 
2066 tree
2067 cp_fully_fold (tree x)
2068 {
2069   if (processing_template_decl)
2070     return x;
2071   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2072      have to call both.  */
2073   if (cxx_dialect >= cxx11)
2074     {
2075       x = maybe_constant_value (x);
2076       /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2077 	 a TARGET_EXPR; undo that here.  */
2078       if (TREE_CODE (x) == TARGET_EXPR)
2079 	x = TARGET_EXPR_INITIAL (x);
2080       else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2081 	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2082 	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2083 	x = TREE_OPERAND (x, 0);
2084     }
2085   return cp_fold_rvalue (x);
2086 }
2087 
2088 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2089    and certain changes are made to the folding done.  Or should be (FIXME).  We
2090    never touch maybe_const, as it is only used for the C front-end
2091    C_MAYBE_CONST_EXPR.  */
2092 
2093 tree
2094 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2095 {
2096   return cp_fold_maybe_rvalue (x, !lval);
2097 }
2098 
2099 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2100 
2101 /* Dispose of the whole FOLD_CACHE.  */
2102 
2103 void
2104 clear_fold_cache (void)
2105 {
2106   if (fold_cache != NULL)
2107     fold_cache->empty ();
2108 }
2109 
2110 /*  This function tries to fold an expression X.
2111     To avoid combinatorial explosion, folding results are kept in fold_cache.
2112     If X is invalid, we don't fold at all.
2113     For performance reasons we don't cache expressions representing a
2114     declaration or constant.
2115     Function returns X or its folded variant.  */
2116 
2117 static tree
2118 cp_fold (tree x)
2119 {
2120   tree op0, op1, op2, op3;
2121   tree org_x = x, r = NULL_TREE;
2122   enum tree_code code;
2123   location_t loc;
2124   bool rval_ops = true;
2125 
2126   if (!x || x == error_mark_node)
2127     return x;
2128 
2129   if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2130     return x;
2131 
2132   /* Don't bother to cache DECLs or constants.  */
2133   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2134     return x;
2135 
2136   if (fold_cache == NULL)
2137     fold_cache = hash_map<tree, tree>::create_ggc (101);
2138 
2139   if (tree *cached = fold_cache->get (x))
2140     return *cached;
2141 
2142   code = TREE_CODE (x);
2143   switch (code)
2144     {
2145     case CLEANUP_POINT_EXPR:
2146       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2147 	 effects.  */
2148       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2149       if (!TREE_SIDE_EFFECTS (r))
2150 	x = r;
2151       break;
2152 
2153     case SIZEOF_EXPR:
2154       x = fold_sizeof_expr (x);
2155       break;
2156 
2157     case VIEW_CONVERT_EXPR:
2158       rval_ops = false;
2159       /* FALLTHRU */
2160     case CONVERT_EXPR:
2161     case NOP_EXPR:
2162     case NON_LVALUE_EXPR:
2163 
2164       if (VOID_TYPE_P (TREE_TYPE (x)))
2165 	{
2166 	  /* This is just to make sure we don't end up with casts to
2167 	     void from error_mark_node.  If we just return x, then
2168 	     cp_fold_r might fold the operand into error_mark_node and
2169 	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2170 	     during gimplification doesn't like such casts.
2171 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2172 	     folding of the operand should be in the caches and if in cp_fold_r
2173 	     it will modify it in place.  */
2174 	  op0 = cp_fold (TREE_OPERAND (x, 0));
2175 	  if (op0 == error_mark_node)
2176 	    x = error_mark_node;
2177 	  break;
2178 	}
2179 
2180       loc = EXPR_LOCATION (x);
2181       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2182 
2183       if (code == CONVERT_EXPR
2184 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2185 	  && op0 != void_node)
2186 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2187 	   folding variants, since fold() doesn't do those transformations.  */
2188 	x = fold (convert (TREE_TYPE (x), op0));
2189       else if (op0 != TREE_OPERAND (x, 0))
2190 	{
2191 	  if (op0 == error_mark_node)
2192 	    x = error_mark_node;
2193 	  else
2194 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2195 	}
2196       else
2197 	x = fold (x);
2198 
2199       /* Conversion of an out-of-range value has implementation-defined
2200 	 behavior; the language considers it different from arithmetic
2201 	 overflow, which is undefined.  */
2202       if (TREE_CODE (op0) == INTEGER_CST
2203 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2204 	TREE_OVERFLOW (x) = false;
2205 
2206       break;
2207 
2208     case INDIRECT_REF:
2209       /* We don't need the decltype(auto) obfuscation anymore.  */
2210       if (REF_PARENTHESIZED_P (x))
2211 	{
2212 	  tree p = maybe_undo_parenthesized_ref (x);
2213 	  return cp_fold (p);
2214 	}
2215       goto unary;
2216 
2217     case ADDR_EXPR:
2218       loc = EXPR_LOCATION (x);
2219       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2220 
2221       /* Cope with user tricks that amount to offsetof.  */
2222       if (op0 != error_mark_node
2223 	  && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2224 	  && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2225 	{
2226 	  tree val = get_base_address (op0);
2227 	  if (val
2228 	      && INDIRECT_REF_P (val)
2229 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2230 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2231 	    {
2232 	      val = TREE_OPERAND (val, 0);
2233 	      STRIP_NOPS (val);
2234 	      if (TREE_CODE (val) == INTEGER_CST)
2235 		return fold_convert (TREE_TYPE (x), fold_offsetof_1 (op0));
2236 	    }
2237 	}
2238       goto finish_unary;
2239 
2240     case REALPART_EXPR:
2241     case IMAGPART_EXPR:
2242       rval_ops = false;
2243       /* FALLTHRU */
2244     case CONJ_EXPR:
2245     case FIX_TRUNC_EXPR:
2246     case FLOAT_EXPR:
2247     case NEGATE_EXPR:
2248     case ABS_EXPR:
2249     case BIT_NOT_EXPR:
2250     case TRUTH_NOT_EXPR:
2251     case FIXED_CONVERT_EXPR:
2252     unary:
2253 
2254       loc = EXPR_LOCATION (x);
2255       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2256 
2257     finish_unary:
2258       if (op0 != TREE_OPERAND (x, 0))
2259 	{
2260 	  if (op0 == error_mark_node)
2261 	    x = error_mark_node;
2262 	  else
2263 	    {
2264 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2265 	      if (code == INDIRECT_REF
2266 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2267 		{
2268 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2269 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2270 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2271 		}
2272 	    }
2273 	}
2274       else
2275 	x = fold (x);
2276 
2277       gcc_assert (TREE_CODE (x) != COND_EXPR
2278 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2279       break;
2280 
2281     case UNARY_PLUS_EXPR:
2282       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2283       if (op0 == error_mark_node)
2284 	x = error_mark_node;
2285       else
2286 	x = fold_convert (TREE_TYPE (x), op0);
2287       break;
2288 
2289     case POSTDECREMENT_EXPR:
2290     case POSTINCREMENT_EXPR:
2291     case INIT_EXPR:
2292     case PREDECREMENT_EXPR:
2293     case PREINCREMENT_EXPR:
2294     case COMPOUND_EXPR:
2295     case MODIFY_EXPR:
2296       rval_ops = false;
2297       /* FALLTHRU */
2298     case POINTER_PLUS_EXPR:
2299     case PLUS_EXPR:
2300     case POINTER_DIFF_EXPR:
2301     case MINUS_EXPR:
2302     case MULT_EXPR:
2303     case TRUNC_DIV_EXPR:
2304     case CEIL_DIV_EXPR:
2305     case FLOOR_DIV_EXPR:
2306     case ROUND_DIV_EXPR:
2307     case TRUNC_MOD_EXPR:
2308     case CEIL_MOD_EXPR:
2309     case ROUND_MOD_EXPR:
2310     case RDIV_EXPR:
2311     case EXACT_DIV_EXPR:
2312     case MIN_EXPR:
2313     case MAX_EXPR:
2314     case LSHIFT_EXPR:
2315     case RSHIFT_EXPR:
2316     case LROTATE_EXPR:
2317     case RROTATE_EXPR:
2318     case BIT_AND_EXPR:
2319     case BIT_IOR_EXPR:
2320     case BIT_XOR_EXPR:
2321     case TRUTH_AND_EXPR:
2322     case TRUTH_ANDIF_EXPR:
2323     case TRUTH_OR_EXPR:
2324     case TRUTH_ORIF_EXPR:
2325     case TRUTH_XOR_EXPR:
2326     case LT_EXPR: case LE_EXPR:
2327     case GT_EXPR: case GE_EXPR:
2328     case EQ_EXPR: case NE_EXPR:
2329     case UNORDERED_EXPR: case ORDERED_EXPR:
2330     case UNLT_EXPR: case UNLE_EXPR:
2331     case UNGT_EXPR: case UNGE_EXPR:
2332     case UNEQ_EXPR: case LTGT_EXPR:
2333     case RANGE_EXPR: case COMPLEX_EXPR:
2334 
2335       loc = EXPR_LOCATION (x);
2336       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2337       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2338 
2339       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2340 	{
2341 	  if (op0 == error_mark_node || op1 == error_mark_node)
2342 	    x = error_mark_node;
2343 	  else
2344 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2345 	}
2346       else
2347 	x = fold (x);
2348 
2349       if (TREE_NO_WARNING (org_x)
2350 	  && warn_nonnull_compare
2351 	  && COMPARISON_CLASS_P (org_x))
2352 	{
2353 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2354 	    ;
2355 	  else if (COMPARISON_CLASS_P (x))
2356 	    TREE_NO_WARNING (x) = 1;
2357 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2358 	     optimize those later on.  */
2359 	  else if (op0 != TREE_OPERAND (org_x, 0)
2360 		   || op1 != TREE_OPERAND (org_x, 1))
2361 	    {
2362 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2363 	      TREE_NO_WARNING (x) = 1;
2364 	    }
2365 	  else
2366 	    x = org_x;
2367 	}
2368       break;
2369 
2370     case VEC_COND_EXPR:
2371     case COND_EXPR:
2372       loc = EXPR_LOCATION (x);
2373       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2374       op1 = cp_fold (TREE_OPERAND (x, 1));
2375       op2 = cp_fold (TREE_OPERAND (x, 2));
2376 
2377       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2378 	{
2379 	  warning_sentinel s (warn_int_in_bool_context);
2380 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2381 	    op1 = cp_truthvalue_conversion (op1);
2382 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2383 	    op2 = cp_truthvalue_conversion (op2);
2384 	}
2385       else if (VOID_TYPE_P (TREE_TYPE (x)))
2386 	{
2387 	  if (TREE_CODE (op0) == INTEGER_CST)
2388 	    {
2389 	      /* If the condition is constant, fold can fold away
2390 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2391 		 have one of the branches NULL, avoid folding crash.  */
2392 	      if (!op1)
2393 		op1 = build_empty_stmt (loc);
2394 	      if (!op2)
2395 		op2 = build_empty_stmt (loc);
2396 	    }
2397 	  else
2398 	    {
2399 	      /* Otherwise, don't bother folding a void condition, since
2400 		 it can't produce a constant value.  */
2401 	      if (op0 != TREE_OPERAND (x, 0)
2402 		  || op1 != TREE_OPERAND (x, 1)
2403 		  || op2 != TREE_OPERAND (x, 2))
2404 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2405 	      break;
2406 	    }
2407 	}
2408 
2409       if (op0 != TREE_OPERAND (x, 0)
2410 	  || op1 != TREE_OPERAND (x, 1)
2411 	  || op2 != TREE_OPERAND (x, 2))
2412 	{
2413 	  if (op0 == error_mark_node
2414 	      || op1 == error_mark_node
2415 	      || op2 == error_mark_node)
2416 	    x = error_mark_node;
2417 	  else
2418 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2419 	}
2420       else
2421 	x = fold (x);
2422 
2423       /* A COND_EXPR might have incompatible types in branches if one or both
2424 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2425       if (TREE_CODE (x) != code
2426 	  && x != error_mark_node
2427 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2428 	x = fold_convert (TREE_TYPE (org_x), x);
2429 
2430       break;
2431 
2432     case CALL_EXPR:
2433       {
2434 	int i, m, sv = optimize, nw = sv, changed = 0;
2435 	tree callee = get_callee_fndecl (x);
2436 
2437 	/* Some built-in function calls will be evaluated at compile-time in
2438 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2439 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2440 	if (callee && DECL_BUILT_IN (callee) && !optimize
2441 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2442 	    && current_function_decl
2443 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2444 	  nw = 1;
2445 
2446 	x = copy_node (x);
2447 
2448 	m = call_expr_nargs (x);
2449 	for (i = 0; i < m; i++)
2450 	  {
2451 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2452 	    if (r != CALL_EXPR_ARG (x, i))
2453 	      {
2454 		if (r == error_mark_node)
2455 		  {
2456 		    x = error_mark_node;
2457 		    break;
2458 		  }
2459 		changed = 1;
2460 	      }
2461 	    CALL_EXPR_ARG (x, i) = r;
2462 	  }
2463 	if (x == error_mark_node)
2464 	  break;
2465 
2466 	optimize = nw;
2467 	r = fold (x);
2468 	optimize = sv;
2469 
2470 	if (TREE_CODE (r) != CALL_EXPR)
2471 	  {
2472 	    x = cp_fold (r);
2473 	    break;
2474 	  }
2475 
2476 	optimize = nw;
2477 
2478 	/* Invoke maybe_constant_value for functions declared
2479 	   constexpr and not called with AGGR_INIT_EXPRs.
2480 	   TODO:
2481 	   Do constexpr expansion of expressions where the call itself is not
2482 	   constant, but the call followed by an INDIRECT_REF is.  */
2483 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2484 	    && !flag_no_inline)
2485 	  r = maybe_constant_value (x);
2486 	optimize = sv;
2487 
2488         if (TREE_CODE (r) != CALL_EXPR)
2489 	  {
2490 	    if (DECL_CONSTRUCTOR_P (callee))
2491 	      {
2492 		loc = EXPR_LOCATION (x);
2493 		tree s = build_fold_indirect_ref_loc (loc,
2494 						      CALL_EXPR_ARG (x, 0));
2495 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2496 	      }
2497 	    x = r;
2498 	    break;
2499 	  }
2500 
2501 	if (!changed)
2502 	  x = org_x;
2503 	break;
2504       }
2505 
2506     case CONSTRUCTOR:
2507       {
2508 	unsigned i;
2509 	constructor_elt *p;
2510 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2511 	vec<constructor_elt, va_gc> *nelts = NULL;
2512 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2513 	  {
2514 	    tree op = cp_fold (p->value);
2515 	    if (op != p->value)
2516 	      {
2517 		if (op == error_mark_node)
2518 		  {
2519 		    x = error_mark_node;
2520 		    vec_free (nelts);
2521 		    break;
2522 		  }
2523 		if (nelts == NULL)
2524 		  nelts = elts->copy ();
2525 		(*nelts)[i].value = op;
2526 	      }
2527 	  }
2528 	if (nelts)
2529 	  {
2530 	    x = build_constructor (TREE_TYPE (x), nelts);
2531 	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2532 	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2533 	  }
2534 	if (VECTOR_TYPE_P (TREE_TYPE (x)))
2535 	  x = fold (x);
2536 	break;
2537       }
2538     case TREE_VEC:
2539       {
2540 	bool changed = false;
2541 	vec<tree, va_gc> *vec = make_tree_vector ();
2542 	int i, n = TREE_VEC_LENGTH (x);
2543 	vec_safe_reserve (vec, n);
2544 
2545 	for (i = 0; i < n; i++)
2546 	  {
2547 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2548 	    vec->quick_push (op);
2549 	    if (op != TREE_VEC_ELT (x, i))
2550 	      changed = true;
2551 	  }
2552 
2553 	if (changed)
2554 	  {
2555 	    r = copy_node (x);
2556 	    for (i = 0; i < n; i++)
2557 	      TREE_VEC_ELT (r, i) = (*vec)[i];
2558 	    x = r;
2559 	  }
2560 
2561 	release_tree_vector (vec);
2562       }
2563 
2564       break;
2565 
2566     case ARRAY_REF:
2567     case ARRAY_RANGE_REF:
2568 
2569       loc = EXPR_LOCATION (x);
2570       op0 = cp_fold (TREE_OPERAND (x, 0));
2571       op1 = cp_fold (TREE_OPERAND (x, 1));
2572       op2 = cp_fold (TREE_OPERAND (x, 2));
2573       op3 = cp_fold (TREE_OPERAND (x, 3));
2574 
2575       if (op0 != TREE_OPERAND (x, 0)
2576 	  || op1 != TREE_OPERAND (x, 1)
2577 	  || op2 != TREE_OPERAND (x, 2)
2578 	  || op3 != TREE_OPERAND (x, 3))
2579 	{
2580 	  if (op0 == error_mark_node
2581 	      || op1 == error_mark_node
2582 	      || op2 == error_mark_node
2583 	      || op3 == error_mark_node)
2584 	    x = error_mark_node;
2585 	  else
2586 	    {
2587 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2588 	      TREE_READONLY (x) = TREE_READONLY (org_x);
2589 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2590 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2591 	    }
2592 	}
2593 
2594       x = fold (x);
2595       break;
2596 
2597     case SAVE_EXPR:
2598       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2599 	 folding, evaluates to an invariant.  In that case no need to wrap
2600 	 this folded tree with a SAVE_EXPR.  */
2601       r = cp_fold (TREE_OPERAND (x, 0));
2602       if (tree_invariant_p (r))
2603 	x = r;
2604       break;
2605 
2606     default:
2607       return org_x;
2608     }
2609 
2610   fold_cache->put (org_x, x);
2611   /* Prevent that we try to fold an already folded result again.  */
2612   if (x != org_x)
2613     fold_cache->put (x, x);
2614 
2615   return x;
2616 }
2617 
2618 #include "gt-cp-cp-gimplify.h"
2619