xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cp/cp-gimplify.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2017 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35 #include "cp-cilkplus.h"
36 
37 /* Forward declarations.  */
38 
39 static tree cp_genericize_r (tree *, int *, void *);
40 static tree cp_fold_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*, bool);
42 static tree cp_fold (tree);
43 
44 /* Local declarations.  */
45 
46 enum bc_t { bc_break = 0, bc_continue = 1 };
47 
48 /* Stack of labels which are targets for "break" or "continue",
49    linked through TREE_CHAIN.  */
50 static tree bc_label[2];
51 
52 /* Begin a scope which can be exited by a break or continue statement.  BC
53    indicates which.
54 
55    Just creates a label with location LOCATION and pushes it into the current
56    context.  */
57 
58 static tree
59 begin_bc_block (enum bc_t bc, location_t location)
60 {
61   tree label = create_artificial_label (location);
62   DECL_CHAIN (label) = bc_label[bc];
63   bc_label[bc] = label;
64   if (bc == bc_break)
65     LABEL_DECL_BREAK (label) = true;
66   else
67     LABEL_DECL_CONTINUE (label) = true;
68   return label;
69 }
70 
71 /* Finish a scope which can be exited by a break or continue statement.
72    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
73    an expression for the contents of the scope.
74 
75    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76    BLOCK.  Otherwise, just forget the label.  */
77 
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
80 {
81   gcc_assert (label == bc_label[bc]);
82 
83   if (TREE_USED (label))
84     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 			      block);
86 
87   bc_label[bc] = DECL_CHAIN (label);
88   DECL_CHAIN (label) = NULL_TREE;
89 }
90 
91 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
92    *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
93    TARGET_EXPR.  *PRE_P and *POST_P are gimple sequences from the caller
94    of gimplify_cilk_spawn.  */
95 
96 static void
97 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
98 					    gimple_seq *post_p)
99 {
100   int ii = 0;
101 
102   cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
103   if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
104     for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
105       gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
106 		     is_gimple_reg, fb_rvalue);
107 }
108 
109 
110 /* Get the LABEL_EXPR to represent a break or continue statement
111    in the current block scope.  BC indicates which.  */
112 
113 static tree
114 get_bc_label (enum bc_t bc)
115 {
116   tree label = bc_label[bc];
117 
118   /* Mark the label used for finish_bc_block.  */
119   TREE_USED (label) = 1;
120   return label;
121 }
122 
123 /* Genericize a TRY_BLOCK.  */
124 
125 static void
126 genericize_try_block (tree *stmt_p)
127 {
128   tree body = TRY_STMTS (*stmt_p);
129   tree cleanup = TRY_HANDLERS (*stmt_p);
130 
131   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
132 }
133 
134 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
135 
136 static void
137 genericize_catch_block (tree *stmt_p)
138 {
139   tree type = HANDLER_TYPE (*stmt_p);
140   tree body = HANDLER_BODY (*stmt_p);
141 
142   /* FIXME should the caught type go in TREE_TYPE?  */
143   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
144 }
145 
146 /* A terser interface for building a representation of an exception
147    specification.  */
148 
149 static tree
150 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
151 {
152   tree t;
153 
154   /* FIXME should the allowed types go in TREE_TYPE?  */
155   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
156   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
157 
158   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
159   append_to_statement_list (body, &TREE_OPERAND (t, 0));
160 
161   return t;
162 }
163 
164 /* Genericize an EH_SPEC_BLOCK by converting it to a
165    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
166 
167 static void
168 genericize_eh_spec_block (tree *stmt_p)
169 {
170   tree body = EH_SPEC_STMTS (*stmt_p);
171   tree allowed = EH_SPEC_RAISES (*stmt_p);
172   tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
173 
174   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
175   TREE_NO_WARNING (*stmt_p) = true;
176   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
177 }
178 
179 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
180 
181 static void
182 genericize_if_stmt (tree *stmt_p)
183 {
184   tree stmt, cond, then_, else_;
185   location_t locus = EXPR_LOCATION (*stmt_p);
186 
187   stmt = *stmt_p;
188   cond = IF_COND (stmt);
189   then_ = THEN_CLAUSE (stmt);
190   else_ = ELSE_CLAUSE (stmt);
191 
192   if (!then_)
193     then_ = build_empty_stmt (locus);
194   if (!else_)
195     else_ = build_empty_stmt (locus);
196 
197   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
198     stmt = then_;
199   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
200     stmt = else_;
201   else
202     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
203   if (!EXPR_HAS_LOCATION (stmt))
204     protected_set_expr_location (stmt, locus);
205   *stmt_p = stmt;
206 }
207 
208 /* Build a generic representation of one of the C loop forms.  COND is the
209    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
210    controlled by the loop.  INCR is the increment expression of a for-loop,
211    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
212    evaluated before the loop body as in while and for loops, or after the
213    loop body as in do-while loops.  */
214 
215 static void
216 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
217 		    tree incr, bool cond_is_first, int *walk_subtrees,
218 		    void *data)
219 {
220   tree blab, clab;
221   tree exit = NULL;
222   tree stmt_list = NULL;
223 
224   blab = begin_bc_block (bc_break, start_locus);
225   clab = begin_bc_block (bc_continue, start_locus);
226 
227   protected_set_expr_location (incr, start_locus);
228 
229   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
230   cp_walk_tree (&body, cp_genericize_r, data, NULL);
231   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
232   *walk_subtrees = 0;
233 
234   if (cond && TREE_CODE (cond) != INTEGER_CST)
235     {
236       /* If COND is constant, don't bother building an exit.  If it's false,
237 	 we won't build a loop.  If it's true, any exits are in the body.  */
238       location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
239       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
240 			 get_bc_label (bc_break));
241       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
242 			      build_empty_stmt (cloc), exit);
243     }
244 
245   if (exit && cond_is_first)
246     append_to_statement_list (exit, &stmt_list);
247   append_to_statement_list (body, &stmt_list);
248   finish_bc_block (&stmt_list, bc_continue, clab);
249   append_to_statement_list (incr, &stmt_list);
250   if (exit && !cond_is_first)
251     append_to_statement_list (exit, &stmt_list);
252 
253   if (!stmt_list)
254     stmt_list = build_empty_stmt (start_locus);
255 
256   tree loop;
257   if (cond && integer_zerop (cond))
258     {
259       if (cond_is_first)
260 	loop = fold_build3_loc (start_locus, COND_EXPR,
261 				void_type_node, cond, stmt_list,
262 				build_empty_stmt (start_locus));
263       else
264 	loop = stmt_list;
265     }
266   else
267     {
268       location_t loc = start_locus;
269       if (!cond || integer_nonzerop (cond))
270 	loc = EXPR_LOCATION (expr_first (body));
271       if (loc == UNKNOWN_LOCATION)
272 	loc = start_locus;
273       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
274     }
275 
276   stmt_list = NULL;
277   append_to_statement_list (loop, &stmt_list);
278   finish_bc_block (&stmt_list, bc_break, blab);
279   if (!stmt_list)
280     stmt_list = build_empty_stmt (start_locus);
281 
282   *stmt_p = stmt_list;
283 }
284 
285 /* Genericize a FOR_STMT node *STMT_P.  */
286 
287 static void
288 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
289 {
290   tree stmt = *stmt_p;
291   tree expr = NULL;
292   tree loop;
293   tree init = FOR_INIT_STMT (stmt);
294 
295   if (init)
296     {
297       cp_walk_tree (&init, cp_genericize_r, data, NULL);
298       append_to_statement_list (init, &expr);
299     }
300 
301   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
302 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
303   append_to_statement_list (loop, &expr);
304   if (expr == NULL_TREE)
305     expr = loop;
306   *stmt_p = expr;
307 }
308 
309 /* Genericize a WHILE_STMT node *STMT_P.  */
310 
311 static void
312 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
313 {
314   tree stmt = *stmt_p;
315   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
316 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
317 }
318 
319 /* Genericize a DO_STMT node *STMT_P.  */
320 
321 static void
322 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
323 {
324   tree stmt = *stmt_p;
325   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
326 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
327 }
328 
329 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
330 
331 static void
332 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
333 {
334   tree stmt = *stmt_p;
335   tree break_block, body, cond, type;
336   location_t stmt_locus = EXPR_LOCATION (stmt);
337 
338   break_block = begin_bc_block (bc_break, stmt_locus);
339 
340   body = SWITCH_STMT_BODY (stmt);
341   if (!body)
342     body = build_empty_stmt (stmt_locus);
343   cond = SWITCH_STMT_COND (stmt);
344   type = SWITCH_STMT_TYPE (stmt);
345 
346   cp_walk_tree (&body, cp_genericize_r, data, NULL);
347   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
348   cp_walk_tree (&type, cp_genericize_r, data, NULL);
349   *walk_subtrees = 0;
350 
351   *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
352   finish_bc_block (stmt_p, bc_break, break_block);
353 }
354 
355 /* Genericize a CONTINUE_STMT node *STMT_P.  */
356 
357 static void
358 genericize_continue_stmt (tree *stmt_p)
359 {
360   tree stmt_list = NULL;
361   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
362   tree label = get_bc_label (bc_continue);
363   location_t location = EXPR_LOCATION (*stmt_p);
364   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
365   append_to_statement_list_force (pred, &stmt_list);
366   append_to_statement_list (jump, &stmt_list);
367   *stmt_p = stmt_list;
368 }
369 
370 /* Genericize a BREAK_STMT node *STMT_P.  */
371 
372 static void
373 genericize_break_stmt (tree *stmt_p)
374 {
375   tree label = get_bc_label (bc_break);
376   location_t location = EXPR_LOCATION (*stmt_p);
377   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
378 }
379 
380 /* Genericize a OMP_FOR node *STMT_P.  */
381 
382 static void
383 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
384 {
385   tree stmt = *stmt_p;
386   location_t locus = EXPR_LOCATION (stmt);
387   tree clab = begin_bc_block (bc_continue, locus);
388 
389   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
390   if (TREE_CODE (stmt) != OMP_TASKLOOP)
391     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
392   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
393   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
394   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
395   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
396   *walk_subtrees = 0;
397 
398   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
399 }
400 
401 /* Hook into the middle of gimplifying an OMP_FOR node.  */
402 
403 static enum gimplify_status
404 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
405 {
406   tree for_stmt = *expr_p;
407   gimple_seq seq = NULL;
408 
409   /* Protect ourselves from recursion.  */
410   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
411     return GS_UNHANDLED;
412   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
413 
414   gimplify_and_add (for_stmt, &seq);
415   gimple_seq_add_seq (pre_p, seq);
416 
417   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
418 
419   return GS_ALL_DONE;
420 }
421 
422 /*  Gimplify an EXPR_STMT node.  */
423 
424 static void
425 gimplify_expr_stmt (tree *stmt_p)
426 {
427   tree stmt = EXPR_STMT_EXPR (*stmt_p);
428 
429   if (stmt == error_mark_node)
430     stmt = NULL;
431 
432   /* Gimplification of a statement expression will nullify the
433      statement if all its side effects are moved to *PRE_P and *POST_P.
434 
435      In this case we will not want to emit the gimplified statement.
436      However, we may still want to emit a warning, so we do that before
437      gimplification.  */
438   if (stmt && warn_unused_value)
439     {
440       if (!TREE_SIDE_EFFECTS (stmt))
441 	{
442 	  if (!IS_EMPTY_STMT (stmt)
443 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
444 	      && !TREE_NO_WARNING (stmt))
445 	    warning (OPT_Wunused_value, "statement with no effect");
446 	}
447       else
448 	warn_if_unused_value (stmt, input_location);
449     }
450 
451   if (stmt == NULL_TREE)
452     stmt = alloc_stmt_list ();
453 
454   *stmt_p = stmt;
455 }
456 
457 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
458 
459 static void
460 cp_gimplify_init_expr (tree *expr_p)
461 {
462   tree from = TREE_OPERAND (*expr_p, 1);
463   tree to = TREE_OPERAND (*expr_p, 0);
464   tree t;
465 
466   /* What about code that pulls out the temp and uses it elsewhere?  I
467      think that such code never uses the TARGET_EXPR as an initializer.  If
468      I'm wrong, we'll abort because the temp won't have any RTL.  In that
469      case, I guess we'll need to replace references somehow.  */
470   if (TREE_CODE (from) == TARGET_EXPR)
471     from = TARGET_EXPR_INITIAL (from);
472 
473   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
474      inside the TARGET_EXPR.  */
475   for (t = from; t; )
476     {
477       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
478 
479       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
480 	 replace the slot operand with our target.
481 
482 	 Should we add a target parm to gimplify_expr instead?  No, as in this
483 	 case we want to replace the INIT_EXPR.  */
484       if (TREE_CODE (sub) == AGGR_INIT_EXPR
485 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
486 	{
487 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
488 	    AGGR_INIT_EXPR_SLOT (sub) = to;
489 	  else
490 	    VEC_INIT_EXPR_SLOT (sub) = to;
491 	  *expr_p = from;
492 
493 	  /* The initialization is now a side-effect, so the container can
494 	     become void.  */
495 	  if (from != sub)
496 	    TREE_TYPE (from) = void_type_node;
497 	}
498 
499       /* Handle aggregate NSDMI.  */
500       replace_placeholders (sub, to);
501 
502       if (t == sub)
503 	break;
504       else
505 	t = TREE_OPERAND (t, 1);
506     }
507 
508 }
509 
510 /* Gimplify a MUST_NOT_THROW_EXPR.  */
511 
512 static enum gimplify_status
513 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
514 {
515   tree stmt = *expr_p;
516   tree temp = voidify_wrapper_expr (stmt, NULL);
517   tree body = TREE_OPERAND (stmt, 0);
518   gimple_seq try_ = NULL;
519   gimple_seq catch_ = NULL;
520   gimple *mnt;
521 
522   gimplify_and_add (body, &try_);
523   mnt = gimple_build_eh_must_not_throw (terminate_node);
524   gimple_seq_add_stmt_without_update (&catch_, mnt);
525   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
526 
527   gimple_seq_add_stmt_without_update (pre_p, mnt);
528   if (temp)
529     {
530       *expr_p = temp;
531       return GS_OK;
532     }
533 
534   *expr_p = NULL;
535   return GS_ALL_DONE;
536 }
537 
538 /* Return TRUE if an operand (OP) of a given TYPE being copied is
539    really just an empty class copy.
540 
541    Check that the operand has a simple form so that TARGET_EXPRs and
542    non-empty CONSTRUCTORs get reduced properly, and we leave the
543    return slot optimization alone because it isn't a copy.  */
544 
545 static bool
546 simple_empty_class_p (tree type, tree op)
547 {
548   return
549     ((TREE_CODE (op) == COMPOUND_EXPR
550       && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
551      || TREE_CODE (op) == EMPTY_CLASS_EXPR
552      || is_gimple_lvalue (op)
553      || INDIRECT_REF_P (op)
554      || (TREE_CODE (op) == CONSTRUCTOR
555 	 && CONSTRUCTOR_NELTS (op) == 0
556 	 && !TREE_CLOBBER_P (op))
557      || (TREE_CODE (op) == CALL_EXPR
558 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
559     && is_really_empty_class (type);
560 }
561 
562 /* Returns true if evaluating E as an lvalue has side-effects;
563    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
564    have side-effects until there is a read or write through it.  */
565 
566 static bool
567 lvalue_has_side_effects (tree e)
568 {
569   if (!TREE_SIDE_EFFECTS (e))
570     return false;
571   while (handled_component_p (e))
572     {
573       if (TREE_CODE (e) == ARRAY_REF
574 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
575 	return true;
576       e = TREE_OPERAND (e, 0);
577     }
578   if (DECL_P (e))
579     /* Just naming a variable has no side-effects.  */
580     return false;
581   else if (INDIRECT_REF_P (e))
582     /* Similarly, indirection has no side-effects.  */
583     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
584   else
585     /* For anything else, trust TREE_SIDE_EFFECTS.  */
586     return TREE_SIDE_EFFECTS (e);
587 }
588 
589 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
590 
591 int
592 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
593 {
594   int saved_stmts_are_full_exprs_p = 0;
595   location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
596   enum tree_code code = TREE_CODE (*expr_p);
597   enum gimplify_status ret;
598 
599   if (STATEMENT_CODE_P (code))
600     {
601       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
602       current_stmt_tree ()->stmts_are_full_exprs_p
603 	= STMT_IS_FULL_EXPR_P (*expr_p);
604     }
605 
606   switch (code)
607     {
608     case AGGR_INIT_EXPR:
609       simplify_aggr_init_expr (expr_p);
610       ret = GS_OK;
611       break;
612 
613     case VEC_INIT_EXPR:
614       {
615 	location_t loc = input_location;
616 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
617 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
618 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
619 	input_location = EXPR_LOCATION (*expr_p);
620 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
621 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
622 				  from_array,
623 				  tf_warning_or_error);
624 	hash_set<tree> pset;
625 	cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
626 	cp_genericize_tree (expr_p, false);
627 	ret = GS_OK;
628 	input_location = loc;
629       }
630       break;
631 
632     case THROW_EXPR:
633       /* FIXME communicate throw type to back end, probably by moving
634 	 THROW_EXPR into ../tree.def.  */
635       *expr_p = TREE_OPERAND (*expr_p, 0);
636       ret = GS_OK;
637       break;
638 
639     case MUST_NOT_THROW_EXPR:
640       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
641       break;
642 
643       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
644 	 LHS of an assignment might also be involved in the RHS, as in bug
645 	 25979.  */
646     case INIT_EXPR:
647       if (fn_contains_cilk_spawn_p (cfun))
648 	{
649 	  if (cilk_cp_detect_spawn_and_unwrap (expr_p))
650 	    {
651 	      cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
652 							  pre_p, post_p);
653 	      return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
654 	    }
655 	  if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
656 	    return GS_ERROR;
657 	}
658 
659       cp_gimplify_init_expr (expr_p);
660       if (TREE_CODE (*expr_p) != INIT_EXPR)
661 	return GS_OK;
662       /* Fall through.  */
663     case MODIFY_EXPR:
664     modify_expr_case:
665       {
666 	if (fn_contains_cilk_spawn_p (cfun)
667 	    && cilk_cp_detect_spawn_and_unwrap (expr_p)
668 	    && !seen_error ())
669 	  {
670 	    cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
671 	    return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
672 	  }
673 	/* If the back end isn't clever enough to know that the lhs and rhs
674 	   types are the same, add an explicit conversion.  */
675 	tree op0 = TREE_OPERAND (*expr_p, 0);
676 	tree op1 = TREE_OPERAND (*expr_p, 1);
677 
678 	if (!error_operand_p (op0)
679 	    && !error_operand_p (op1)
680 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
681 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
682 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
683 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
684 					      TREE_TYPE (op0), op1);
685 
686 	else if (simple_empty_class_p (TREE_TYPE (op0), op1))
687 	  {
688 	    /* Remove any copies of empty classes.  Also drop volatile
689 	       variables on the RHS to avoid infinite recursion from
690 	       gimplify_expr trying to load the value.  */
691 	    if (TREE_SIDE_EFFECTS (op1))
692 	      {
693 		if (TREE_THIS_VOLATILE (op1)
694 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
695 		  op1 = build_fold_addr_expr (op1);
696 
697 		gimplify_and_add (op1, pre_p);
698 	      }
699 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
700 			   is_gimple_lvalue, fb_lvalue);
701 	    *expr_p = TREE_OPERAND (*expr_p, 0);
702 	  }
703 	/* P0145 says that the RHS is sequenced before the LHS.
704 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
705 	   isn't quite strong enough in two cases:
706 
707 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
708 	   mean it's evaluated after the LHS.
709 
710 	   2) the value calculation of the RHS is also sequenced before the
711 	   LHS, so for scalar assignment we need to preevaluate if the
712 	   RHS could be affected by LHS side-effects even if it has no
713 	   side-effects of its own.  We don't need this for classes because
714 	   class assignment takes its RHS by reference.  */
715        else if (flag_strong_eval_order > 1
716                 && TREE_CODE (*expr_p) == MODIFY_EXPR
717                 && lvalue_has_side_effects (op0)
718 		&& (TREE_CODE (op1) == CALL_EXPR
719 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
720 			&& !TREE_CONSTANT (op1))))
721 	 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
722       }
723       ret = GS_OK;
724       break;
725 
726     case EMPTY_CLASS_EXPR:
727       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
728       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
729       ret = GS_OK;
730       break;
731 
732     case BASELINK:
733       *expr_p = BASELINK_FUNCTIONS (*expr_p);
734       ret = GS_OK;
735       break;
736 
737     case TRY_BLOCK:
738       genericize_try_block (expr_p);
739       ret = GS_OK;
740       break;
741 
742     case HANDLER:
743       genericize_catch_block (expr_p);
744       ret = GS_OK;
745       break;
746 
747     case EH_SPEC_BLOCK:
748       genericize_eh_spec_block (expr_p);
749       ret = GS_OK;
750       break;
751 
752     case USING_STMT:
753       gcc_unreachable ();
754 
755     case FOR_STMT:
756     case WHILE_STMT:
757     case DO_STMT:
758     case SWITCH_STMT:
759     case CONTINUE_STMT:
760     case BREAK_STMT:
761       gcc_unreachable ();
762 
763     case OMP_FOR:
764     case OMP_SIMD:
765     case OMP_DISTRIBUTE:
766     case OMP_TASKLOOP:
767       ret = cp_gimplify_omp_for (expr_p, pre_p);
768       break;
769 
770     case EXPR_STMT:
771       gimplify_expr_stmt (expr_p);
772       ret = GS_OK;
773       break;
774 
775     case UNARY_PLUS_EXPR:
776       {
777 	tree arg = TREE_OPERAND (*expr_p, 0);
778 	tree type = TREE_TYPE (*expr_p);
779 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
780 					    : arg;
781 	ret = GS_OK;
782       }
783       break;
784 
785     case CILK_SPAWN_STMT:
786       gcc_assert(fn_contains_cilk_spawn_p (cfun)
787 		 && cilk_cp_detect_spawn_and_unwrap (expr_p));
788 
789       if (!seen_error ())
790 	{
791 	  cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
792 	  return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
793 	}
794       return GS_ERROR;
795 
796     case CALL_EXPR:
797       if (fn_contains_cilk_spawn_p (cfun)
798 	  && cilk_cp_detect_spawn_and_unwrap (expr_p)
799 	  && !seen_error ())
800 	{
801 	  cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
802 	  return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
803 	}
804       ret = GS_OK;
805       if (!CALL_EXPR_FN (*expr_p))
806 	/* Internal function call.  */;
807       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
808 	{
809 	  /* This is a call to a (compound) assignment operator that used
810 	     the operator syntax; gimplify the RHS first.  */
811 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
812 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
813 	  enum gimplify_status t
814 	    = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
815 	  if (t == GS_ERROR)
816 	    ret = GS_ERROR;
817 	}
818       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
819 	{
820 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
821 	     with __builtin_va_arg_pack().  */
822 	  int nargs = call_expr_nargs (*expr_p) - 1;
823 	  for (int i = 0; i < nargs; ++i)
824 	    {
825 	      enum gimplify_status t
826 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
827 	      if (t == GS_ERROR)
828 		ret = GS_ERROR;
829 	    }
830 	}
831       else if (flag_strong_eval_order
832 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
833 	{
834 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
835 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
836 	  if (POINTER_TYPE_P (fntype))
837 	    fntype = TREE_TYPE (fntype);
838 	  if (TREE_CODE (fntype) == METHOD_TYPE)
839 	    {
840 	      enum gimplify_status t
841 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
842 	      if (t == GS_ERROR)
843 		ret = GS_ERROR;
844 	    }
845 	}
846       break;
847 
848     case RETURN_EXPR:
849       if (TREE_OPERAND (*expr_p, 0)
850 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
851 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
852 	{
853 	  expr_p = &TREE_OPERAND (*expr_p, 0);
854 	  code = TREE_CODE (*expr_p);
855 	  /* Avoid going through the INIT_EXPR case, which can
856 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
857 	  goto modify_expr_case;
858 	}
859       /* Fall through.  */
860 
861     default:
862       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
863       break;
864     }
865 
866   /* Restore saved state.  */
867   if (STATEMENT_CODE_P (code))
868     current_stmt_tree ()->stmts_are_full_exprs_p
869       = saved_stmts_are_full_exprs_p;
870 
871   return ret;
872 }
873 
874 static inline bool
875 is_invisiref_parm (const_tree t)
876 {
877   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
878 	  && DECL_BY_REFERENCE (t));
879 }
880 
881 /* Return true if the uid in both int tree maps are equal.  */
882 
883 bool
884 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
885 {
886   return (a->uid == b->uid);
887 }
888 
889 /* Hash a UID in a cxx_int_tree_map.  */
890 
891 unsigned int
892 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
893 {
894   return item->uid;
895 }
896 
897 /* A stable comparison routine for use with splay trees and DECLs.  */
898 
899 static int
900 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
901 {
902   tree a = (tree) xa;
903   tree b = (tree) xb;
904 
905   return DECL_UID (a) - DECL_UID (b);
906 }
907 
908 /* OpenMP context during genericization.  */
909 
910 struct cp_genericize_omp_taskreg
911 {
912   bool is_parallel;
913   bool default_shared;
914   struct cp_genericize_omp_taskreg *outer;
915   splay_tree variables;
916 };
917 
918 /* Return true if genericization should try to determine if
919    DECL is firstprivate or shared within task regions.  */
920 
921 static bool
922 omp_var_to_track (tree decl)
923 {
924   tree type = TREE_TYPE (decl);
925   if (is_invisiref_parm (decl))
926     type = TREE_TYPE (type);
927   else if (TREE_CODE (type) == REFERENCE_TYPE)
928     type = TREE_TYPE (type);
929   while (TREE_CODE (type) == ARRAY_TYPE)
930     type = TREE_TYPE (type);
931   if (type == error_mark_node || !CLASS_TYPE_P (type))
932     return false;
933   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
934     return false;
935   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
936     return false;
937   return true;
938 }
939 
940 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
941 
942 static void
943 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
944 {
945   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
946 					 (splay_tree_key) decl);
947   if (n == NULL)
948     {
949       int flags = OMP_CLAUSE_DEFAULT_SHARED;
950       if (omp_ctx->outer)
951 	omp_cxx_notice_variable (omp_ctx->outer, decl);
952       if (!omp_ctx->default_shared)
953 	{
954 	  struct cp_genericize_omp_taskreg *octx;
955 
956 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
957 	    {
958 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
959 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
960 		{
961 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
962 		  break;
963 		}
964 	      if (octx->is_parallel)
965 		break;
966 	    }
967 	  if (octx == NULL
968 	      && (TREE_CODE (decl) == PARM_DECL
969 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
970 		      && DECL_CONTEXT (decl) == current_function_decl)))
971 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
972 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
973 	    {
974 	      /* DECL is implicitly determined firstprivate in
975 		 the current task construct.  Ensure copy ctor and
976 		 dtor are instantiated, because during gimplification
977 		 it will be already too late.  */
978 	      tree type = TREE_TYPE (decl);
979 	      if (is_invisiref_parm (decl))
980 		type = TREE_TYPE (type);
981 	      else if (TREE_CODE (type) == REFERENCE_TYPE)
982 		type = TREE_TYPE (type);
983 	      while (TREE_CODE (type) == ARRAY_TYPE)
984 		type = TREE_TYPE (type);
985 	      get_copy_ctor (type, tf_none);
986 	      get_dtor (type, tf_none);
987 	    }
988 	}
989       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
990     }
991 }
992 
993 /* Genericization context.  */
994 
995 struct cp_genericize_data
996 {
997   hash_set<tree> *p_set;
998   vec<tree> bind_expr_stack;
999   struct cp_genericize_omp_taskreg *omp_ctx;
1000   tree try_block;
1001   bool no_sanitize_p;
1002   bool handle_invisiref_parm_p;
1003 };
1004 
1005 /* Perform any pre-gimplification folding of C++ front end trees to
1006    GENERIC.
1007    Note:  The folding of none-omp cases is something to move into
1008      the middle-end.  As for now we have most foldings only on GENERIC
1009      in fold-const, we need to perform this before transformation to
1010      GIMPLE-form.  */
1011 
1012 static tree
1013 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1014 {
1015   tree stmt;
1016   enum tree_code code;
1017 
1018   *stmt_p = stmt = cp_fold (*stmt_p);
1019 
1020   if (((hash_set<tree> *) data)->add (stmt))
1021     {
1022       /* Don't walk subtrees of stmts we've already walked once, otherwise
1023 	 we can have exponential complexity with e.g. lots of nested
1024 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
1025 	 always the same tree, which the first time cp_fold_r has been
1026 	 called on it had the subtrees walked.  */
1027       *walk_subtrees = 0;
1028       return NULL;
1029     }
1030 
1031   code = TREE_CODE (stmt);
1032   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1033       || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
1034       || code == OACC_LOOP)
1035     {
1036       tree x;
1037       int i, n;
1038 
1039       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1040       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1041       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1042       x = OMP_FOR_COND (stmt);
1043       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1044 	{
1045 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1046 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1047 	}
1048       else if (x && TREE_CODE (x) == TREE_VEC)
1049 	{
1050 	  n = TREE_VEC_LENGTH (x);
1051 	  for (i = 0; i < n; i++)
1052 	    {
1053 	      tree o = TREE_VEC_ELT (x, i);
1054 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1055 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1056 	    }
1057 	}
1058       x = OMP_FOR_INCR (stmt);
1059       if (x && TREE_CODE (x) == TREE_VEC)
1060 	{
1061 	  n = TREE_VEC_LENGTH (x);
1062 	  for (i = 0; i < n; i++)
1063 	    {
1064 	      tree o = TREE_VEC_ELT (x, i);
1065 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1066 		o = TREE_OPERAND (o, 1);
1067 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1068 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1069 		{
1070 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1071 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1072 		}
1073 	    }
1074 	}
1075       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1076       *walk_subtrees = 0;
1077     }
1078 
1079   return NULL;
1080 }
1081 
1082 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1083    apparently that still causes optimization regressions.  */
1084 
1085 void
1086 cp_fold_function (tree fndecl)
1087 {
1088   hash_set<tree> pset;
1089   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1090 }
1091 
1092 /* Perform any pre-gimplification lowering of C++ front end trees to
1093    GENERIC.  */
1094 
1095 static tree
1096 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1097 {
1098   tree stmt = *stmt_p;
1099   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1100   hash_set<tree> *p_set = wtd->p_set;
1101 
1102   /* If in an OpenMP context, note var uses.  */
1103   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1104       && (VAR_P (stmt)
1105 	  || TREE_CODE (stmt) == PARM_DECL
1106 	  || TREE_CODE (stmt) == RESULT_DECL)
1107       && omp_var_to_track (stmt))
1108     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1109 
1110   /* Don't dereference parms in a thunk, pass the references through. */
1111   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1112       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1113     {
1114       *walk_subtrees = 0;
1115       return NULL;
1116     }
1117 
1118   /* Dereference invisible reference parms.  */
1119   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1120     {
1121       *stmt_p = convert_from_reference (stmt);
1122       p_set->add (*stmt_p);
1123       *walk_subtrees = 0;
1124       return NULL;
1125     }
1126 
1127   /* Map block scope extern declarations to visible declarations with the
1128      same name and type in outer scopes if any.  */
1129   if (cp_function_chain->extern_decl_map
1130       && VAR_OR_FUNCTION_DECL_P (stmt)
1131       && DECL_EXTERNAL (stmt))
1132     {
1133       struct cxx_int_tree_map *h, in;
1134       in.uid = DECL_UID (stmt);
1135       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1136       if (h)
1137 	{
1138 	  *stmt_p = h->to;
1139 	  TREE_USED (h->to) |= TREE_USED (stmt);
1140 	  *walk_subtrees = 0;
1141 	  return NULL;
1142 	}
1143     }
1144 
1145   if (TREE_CODE (stmt) == INTEGER_CST
1146       && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1147       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1148       && !wtd->no_sanitize_p)
1149     {
1150       ubsan_maybe_instrument_reference (stmt_p);
1151       if (*stmt_p != stmt)
1152 	{
1153 	  *walk_subtrees = 0;
1154 	  return NULL_TREE;
1155 	}
1156     }
1157 
1158   /* Other than invisiref parms, don't walk the same tree twice.  */
1159   if (p_set->contains (stmt))
1160     {
1161       *walk_subtrees = 0;
1162       return NULL_TREE;
1163     }
1164 
1165   if (TREE_CODE (stmt) == ADDR_EXPR
1166       && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1167     {
1168       /* If in an OpenMP context, note var uses.  */
1169       if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1170 	  && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1171 	omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1172       *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1173       *walk_subtrees = 0;
1174     }
1175   else if (TREE_CODE (stmt) == RETURN_EXPR
1176 	   && TREE_OPERAND (stmt, 0)
1177 	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1178     /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1179     *walk_subtrees = 0;
1180   else if (TREE_CODE (stmt) == OMP_CLAUSE)
1181     switch (OMP_CLAUSE_CODE (stmt))
1182       {
1183       case OMP_CLAUSE_LASTPRIVATE:
1184 	/* Don't dereference an invisiref in OpenMP clauses.  */
1185 	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1186 	  {
1187 	    *walk_subtrees = 0;
1188 	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1189 	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1190 			    cp_genericize_r, data, NULL);
1191 	  }
1192 	break;
1193       case OMP_CLAUSE_PRIVATE:
1194 	/* Don't dereference an invisiref in OpenMP clauses.  */
1195 	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1196 	  *walk_subtrees = 0;
1197 	else if (wtd->omp_ctx != NULL)
1198 	  {
1199 	    /* Private clause doesn't cause any references to the
1200 	       var in outer contexts, avoid calling
1201 	       omp_cxx_notice_variable for it.  */
1202 	    struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1203 	    wtd->omp_ctx = NULL;
1204 	    cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1205 			  data, NULL);
1206 	    wtd->omp_ctx = old;
1207 	    *walk_subtrees = 0;
1208 	  }
1209 	break;
1210       case OMP_CLAUSE_SHARED:
1211       case OMP_CLAUSE_FIRSTPRIVATE:
1212       case OMP_CLAUSE_COPYIN:
1213       case OMP_CLAUSE_COPYPRIVATE:
1214 	/* Don't dereference an invisiref in OpenMP clauses.  */
1215 	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1216 	  *walk_subtrees = 0;
1217 	break;
1218       case OMP_CLAUSE_REDUCTION:
1219 	/* Don't dereference an invisiref in reduction clause's
1220 	   OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1221 	   still needs to be genericized.  */
1222 	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1223 	  {
1224 	    *walk_subtrees = 0;
1225 	    if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1226 	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1227 			    cp_genericize_r, data, NULL);
1228 	    if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1229 	      cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1230 			    cp_genericize_r, data, NULL);
1231 	  }
1232 	break;
1233       default:
1234 	break;
1235       }
1236   else if (IS_TYPE_OR_DECL_P (stmt))
1237     *walk_subtrees = 0;
1238 
1239   /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1240      to lower this construct before scanning it, so we need to lower these
1241      before doing anything else.  */
1242   else if (TREE_CODE (stmt) == CLEANUP_STMT)
1243     *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1244 			  CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1245 						 : TRY_FINALLY_EXPR,
1246 			  void_type_node,
1247 			  CLEANUP_BODY (stmt),
1248 			  CLEANUP_EXPR (stmt));
1249 
1250   else if (TREE_CODE (stmt) == IF_STMT)
1251     {
1252       genericize_if_stmt (stmt_p);
1253       /* *stmt_p has changed, tail recurse to handle it again.  */
1254       return cp_genericize_r (stmt_p, walk_subtrees, data);
1255     }
1256 
1257   /* COND_EXPR might have incompatible types in branches if one or both
1258      arms are bitfields.  Fix it up now.  */
1259   else if (TREE_CODE (stmt) == COND_EXPR)
1260     {
1261       tree type_left
1262 	= (TREE_OPERAND (stmt, 1)
1263 	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1264 	   : NULL_TREE);
1265       tree type_right
1266 	= (TREE_OPERAND (stmt, 2)
1267 	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1268 	   : NULL_TREE);
1269       if (type_left
1270 	  && !useless_type_conversion_p (TREE_TYPE (stmt),
1271 					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1272 	{
1273 	  TREE_OPERAND (stmt, 1)
1274 	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1275 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1276 						 type_left));
1277 	}
1278       if (type_right
1279 	  && !useless_type_conversion_p (TREE_TYPE (stmt),
1280 					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1281 	{
1282 	  TREE_OPERAND (stmt, 2)
1283 	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1284 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1285 						 type_right));
1286 	}
1287     }
1288 
1289   else if (TREE_CODE (stmt) == BIND_EXPR)
1290     {
1291       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1292 	{
1293 	  tree decl;
1294 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1295 	    if (VAR_P (decl)
1296 		&& !DECL_EXTERNAL (decl)
1297 		&& omp_var_to_track (decl))
1298 	      {
1299 		splay_tree_node n
1300 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1301 				       (splay_tree_key) decl);
1302 		if (n == NULL)
1303 		  splay_tree_insert (wtd->omp_ctx->variables,
1304 				     (splay_tree_key) decl,
1305 				     TREE_STATIC (decl)
1306 				     ? OMP_CLAUSE_DEFAULT_SHARED
1307 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1308 	      }
1309 	}
1310       if (flag_sanitize
1311 	  & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1312 	{
1313 	  /* The point here is to not sanitize static initializers.  */
1314 	  bool no_sanitize_p = wtd->no_sanitize_p;
1315 	  wtd->no_sanitize_p = true;
1316 	  for (tree decl = BIND_EXPR_VARS (stmt);
1317 	       decl;
1318 	       decl = DECL_CHAIN (decl))
1319 	    if (VAR_P (decl)
1320 		&& TREE_STATIC (decl)
1321 		&& DECL_INITIAL (decl))
1322 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1323 	  wtd->no_sanitize_p = no_sanitize_p;
1324 	}
1325       wtd->bind_expr_stack.safe_push (stmt);
1326       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1327 		    cp_genericize_r, data, NULL);
1328       wtd->bind_expr_stack.pop ();
1329     }
1330 
1331   else if (TREE_CODE (stmt) == USING_STMT)
1332     {
1333       tree block = NULL_TREE;
1334 
1335       /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1336          BLOCK, and append an IMPORTED_DECL to its
1337 	 BLOCK_VARS chained list.  */
1338       if (wtd->bind_expr_stack.exists ())
1339 	{
1340 	  int i;
1341 	  for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1342 	    if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1343 	      break;
1344 	}
1345       if (block)
1346 	{
1347 	  tree using_directive;
1348 	  gcc_assert (TREE_OPERAND (stmt, 0));
1349 
1350 	  using_directive = make_node (IMPORTED_DECL);
1351 	  TREE_TYPE (using_directive) = void_type_node;
1352 
1353 	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1354 	    = TREE_OPERAND (stmt, 0);
1355 	  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1356 	  BLOCK_VARS (block) = using_directive;
1357 	}
1358       /* The USING_STMT won't appear in GENERIC.  */
1359       *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1360       *walk_subtrees = 0;
1361     }
1362 
1363   else if (TREE_CODE (stmt) == DECL_EXPR
1364 	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1365     {
1366       /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1367       *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1368       *walk_subtrees = 0;
1369     }
1370   else if (TREE_CODE (stmt) == DECL_EXPR)
1371     {
1372       tree d = DECL_EXPR_DECL (stmt);
1373       if (VAR_P (d))
1374 	gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1375     }
1376   else if (TREE_CODE (stmt) == OMP_PARALLEL
1377 	   || TREE_CODE (stmt) == OMP_TASK
1378 	   || TREE_CODE (stmt) == OMP_TASKLOOP)
1379     {
1380       struct cp_genericize_omp_taskreg omp_ctx;
1381       tree c, decl;
1382       splay_tree_node n;
1383 
1384       *walk_subtrees = 0;
1385       cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1386       omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1387       omp_ctx.default_shared = omp_ctx.is_parallel;
1388       omp_ctx.outer = wtd->omp_ctx;
1389       omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1390       wtd->omp_ctx = &omp_ctx;
1391       for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1392 	switch (OMP_CLAUSE_CODE (c))
1393 	  {
1394 	  case OMP_CLAUSE_SHARED:
1395 	  case OMP_CLAUSE_PRIVATE:
1396 	  case OMP_CLAUSE_FIRSTPRIVATE:
1397 	  case OMP_CLAUSE_LASTPRIVATE:
1398 	    decl = OMP_CLAUSE_DECL (c);
1399 	    if (decl == error_mark_node || !omp_var_to_track (decl))
1400 	      break;
1401 	    n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1402 	    if (n != NULL)
1403 	      break;
1404 	    splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1405 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1406 			       ? OMP_CLAUSE_DEFAULT_SHARED
1407 			       : OMP_CLAUSE_DEFAULT_PRIVATE);
1408 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1409 		&& omp_ctx.outer)
1410 	      omp_cxx_notice_variable (omp_ctx.outer, decl);
1411 	    break;
1412 	  case OMP_CLAUSE_DEFAULT:
1413 	    if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1414 	      omp_ctx.default_shared = true;
1415 	  default:
1416 	    break;
1417 	  }
1418       if (TREE_CODE (stmt) == OMP_TASKLOOP)
1419 	genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1420       else
1421 	cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1422       wtd->omp_ctx = omp_ctx.outer;
1423       splay_tree_delete (omp_ctx.variables);
1424     }
1425   else if (TREE_CODE (stmt) == TRY_BLOCK)
1426     {
1427       *walk_subtrees = 0;
1428       tree try_block = wtd->try_block;
1429       wtd->try_block = stmt;
1430       cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1431       wtd->try_block = try_block;
1432       cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1433     }
1434   else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1435     {
1436       /* MUST_NOT_THROW_COND might be something else with TM.  */
1437       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1438 	{
1439 	  *walk_subtrees = 0;
1440 	  tree try_block = wtd->try_block;
1441 	  wtd->try_block = stmt;
1442 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1443 	  wtd->try_block = try_block;
1444 	}
1445     }
1446   else if (TREE_CODE (stmt) == THROW_EXPR)
1447     {
1448       location_t loc = location_of (stmt);
1449       if (TREE_NO_WARNING (stmt))
1450 	/* Never mind.  */;
1451       else if (wtd->try_block)
1452 	{
1453 	  if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1454 	      && warning_at (loc, OPT_Wterminate,
1455 			     "throw will always call terminate()")
1456 	      && cxx_dialect >= cxx11
1457 	      && DECL_DESTRUCTOR_P (current_function_decl))
1458 	    inform (loc, "in C++11 destructors default to noexcept");
1459 	}
1460       else
1461 	{
1462 	  if (warn_cxx11_compat && cxx_dialect < cxx11
1463 	      && DECL_DESTRUCTOR_P (current_function_decl)
1464 	      && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1465 		  == NULL_TREE)
1466 	      && (get_defaulted_eh_spec (current_function_decl)
1467 		  == empty_except_spec))
1468 	    warning_at (loc, OPT_Wc__11_compat,
1469 			"in C++11 this throw will terminate because "
1470 			"destructors default to noexcept");
1471 	}
1472     }
1473   else if (TREE_CODE (stmt) == CONVERT_EXPR)
1474     gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1475   else if (TREE_CODE (stmt) == FOR_STMT)
1476     genericize_for_stmt (stmt_p, walk_subtrees, data);
1477   else if (TREE_CODE (stmt) == WHILE_STMT)
1478     genericize_while_stmt (stmt_p, walk_subtrees, data);
1479   else if (TREE_CODE (stmt) == DO_STMT)
1480     genericize_do_stmt (stmt_p, walk_subtrees, data);
1481   else if (TREE_CODE (stmt) == SWITCH_STMT)
1482     genericize_switch_stmt (stmt_p, walk_subtrees, data);
1483   else if (TREE_CODE (stmt) == CONTINUE_STMT)
1484     genericize_continue_stmt (stmt_p);
1485   else if (TREE_CODE (stmt) == BREAK_STMT)
1486     genericize_break_stmt (stmt_p);
1487   else if (TREE_CODE (stmt) == OMP_FOR
1488 	   || TREE_CODE (stmt) == OMP_SIMD
1489 	   || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1490     genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1491   else if (TREE_CODE (stmt) == PTRMEM_CST)
1492     {
1493       /* By the time we get here we're handing off to the back end, so we don't
1494 	 need or want to preserve PTRMEM_CST anymore.  */
1495       *stmt_p = cplus_expand_constant (stmt);
1496       *walk_subtrees = 0;
1497     }
1498   else if (TREE_CODE (stmt) == MEM_REF)
1499     {
1500       /* For MEM_REF, make sure not to sanitize the second operand even
1501          if it has reference type.  It is just an offset with a type
1502 	 holding other information.  There is no other processing we
1503 	 need to do for INTEGER_CSTs, so just ignore the second argument
1504 	 unconditionally.  */
1505       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1506       *walk_subtrees = 0;
1507     }
1508   else if ((flag_sanitize
1509 	    & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1510 	   && !wtd->no_sanitize_p)
1511     {
1512       if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1513 	  && TREE_CODE (stmt) == NOP_EXPR
1514 	  && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1515 	ubsan_maybe_instrument_reference (stmt_p);
1516       else if (TREE_CODE (stmt) == CALL_EXPR)
1517 	{
1518 	  tree fn = CALL_EXPR_FN (stmt);
1519 	  if (fn != NULL_TREE
1520 	      && !error_operand_p (fn)
1521 	      && POINTER_TYPE_P (TREE_TYPE (fn))
1522 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1523 	    {
1524 	      bool is_ctor
1525 		= TREE_CODE (fn) == ADDR_EXPR
1526 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1527 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1528 	      if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1529 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1530 	      if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1531 		cp_ubsan_maybe_instrument_member_call (stmt);
1532 	    }
1533 	}
1534     }
1535 
1536   p_set->add (*stmt_p);
1537 
1538   return NULL;
1539 }
1540 
1541 /* Lower C++ front end trees to GENERIC in T_P.  */
1542 
1543 static void
1544 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1545 {
1546   struct cp_genericize_data wtd;
1547 
1548   wtd.p_set = new hash_set<tree>;
1549   wtd.bind_expr_stack.create (0);
1550   wtd.omp_ctx = NULL;
1551   wtd.try_block = NULL_TREE;
1552   wtd.no_sanitize_p = false;
1553   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1554   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1555   delete wtd.p_set;
1556   wtd.bind_expr_stack.release ();
1557   if (flag_sanitize & SANITIZE_VPTR)
1558     cp_ubsan_instrument_member_accesses (t_p);
1559 }
1560 
1561 /* If a function that should end with a return in non-void
1562    function doesn't obviously end with return, add ubsan
1563    instrumentation code to verify it at runtime.  */
1564 
1565 static void
1566 cp_ubsan_maybe_instrument_return (tree fndecl)
1567 {
1568   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1569       || DECL_CONSTRUCTOR_P (fndecl)
1570       || DECL_DESTRUCTOR_P (fndecl)
1571       || !targetm.warn_func_return (fndecl))
1572     return;
1573 
1574   tree t = DECL_SAVED_TREE (fndecl);
1575   while (t)
1576     {
1577       switch (TREE_CODE (t))
1578 	{
1579 	case BIND_EXPR:
1580 	  t = BIND_EXPR_BODY (t);
1581 	  continue;
1582 	case TRY_FINALLY_EXPR:
1583 	  t = TREE_OPERAND (t, 0);
1584 	  continue;
1585 	case STATEMENT_LIST:
1586 	  {
1587 	    tree_stmt_iterator i = tsi_last (t);
1588 	    if (!tsi_end_p (i))
1589 	      {
1590 		t = tsi_stmt (i);
1591 		continue;
1592 	      }
1593 	  }
1594 	  break;
1595 	case RETURN_EXPR:
1596 	  return;
1597 	default:
1598 	  break;
1599 	}
1600       break;
1601     }
1602   if (t == NULL_TREE)
1603     return;
1604   tree *p = &DECL_SAVED_TREE (fndecl);
1605   if (TREE_CODE (*p) == BIND_EXPR)
1606     p = &BIND_EXPR_BODY (*p);
1607   t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1608   append_to_statement_list (t, p);
1609 }
1610 
1611 void
1612 cp_genericize (tree fndecl)
1613 {
1614   tree t;
1615 
1616   /* Fix up the types of parms passed by invisible reference.  */
1617   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1618     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1619       {
1620 	/* If a function's arguments are copied to create a thunk,
1621 	   then DECL_BY_REFERENCE will be set -- but the type of the
1622 	   argument will be a pointer type, so we will never get
1623 	   here.  */
1624 	gcc_assert (!DECL_BY_REFERENCE (t));
1625 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1626 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1627 	DECL_BY_REFERENCE (t) = 1;
1628 	TREE_ADDRESSABLE (t) = 0;
1629 	relayout_decl (t);
1630       }
1631 
1632   /* Do the same for the return value.  */
1633   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1634     {
1635       t = DECL_RESULT (fndecl);
1636       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1637       DECL_BY_REFERENCE (t) = 1;
1638       TREE_ADDRESSABLE (t) = 0;
1639       relayout_decl (t);
1640       if (DECL_NAME (t))
1641 	{
1642 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1643 	  tree outer = outer_curly_brace_block (current_function_decl);
1644 	  tree var;
1645 
1646 	  if (outer)
1647 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1648 	      if (VAR_P (var)
1649 		  && DECL_NAME (t) == DECL_NAME (var)
1650 		  && DECL_HAS_VALUE_EXPR_P (var)
1651 		  && DECL_VALUE_EXPR (var) == t)
1652 		{
1653 		  tree val = convert_from_reference (t);
1654 		  SET_DECL_VALUE_EXPR (var, val);
1655 		  break;
1656 		}
1657 	}
1658     }
1659 
1660   /* If we're a clone, the body is already GIMPLE.  */
1661   if (DECL_CLONED_FUNCTION_P (fndecl))
1662     return;
1663 
1664   /* Allow cp_genericize calls to be nested.  */
1665   tree save_bc_label[2];
1666   save_bc_label[bc_break] = bc_label[bc_break];
1667   save_bc_label[bc_continue] = bc_label[bc_continue];
1668   bc_label[bc_break] = NULL_TREE;
1669   bc_label[bc_continue] = NULL_TREE;
1670 
1671   /* Expand all the array notations here.  */
1672   if (flag_cilkplus
1673       && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1674     DECL_SAVED_TREE (fndecl)
1675       = expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1676 
1677   /* We do want to see every occurrence of the parms, so we can't just use
1678      walk_tree's hash functionality.  */
1679   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1680 
1681   if (flag_sanitize & SANITIZE_RETURN
1682       && do_ubsan_in_current_function ())
1683     cp_ubsan_maybe_instrument_return (fndecl);
1684 
1685   /* Do everything else.  */
1686   c_genericize (fndecl);
1687 
1688   gcc_assert (bc_label[bc_break] == NULL);
1689   gcc_assert (bc_label[bc_continue] == NULL);
1690   bc_label[bc_break] = save_bc_label[bc_break];
1691   bc_label[bc_continue] = save_bc_label[bc_continue];
1692 }
1693 
1694 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1695    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1696    actually only takes one argument.  */
1697 
1698 static tree
1699 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1700 {
1701   tree defparm, parm, t;
1702   int i = 0;
1703   int nargs;
1704   tree *argarray;
1705 
1706   if (fn == NULL)
1707     return NULL;
1708 
1709   nargs = list_length (DECL_ARGUMENTS (fn));
1710   argarray = XALLOCAVEC (tree, nargs);
1711 
1712   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1713   if (arg2)
1714     defparm = TREE_CHAIN (defparm);
1715 
1716   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1717     {
1718       tree inner_type = TREE_TYPE (arg1);
1719       tree start1, end1, p1;
1720       tree start2 = NULL, p2 = NULL;
1721       tree ret = NULL, lab;
1722 
1723       start1 = arg1;
1724       start2 = arg2;
1725       do
1726 	{
1727 	  inner_type = TREE_TYPE (inner_type);
1728 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1729 			   size_zero_node, NULL, NULL);
1730 	  if (arg2)
1731 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1732 			     size_zero_node, NULL, NULL);
1733 	}
1734       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1735       start1 = build_fold_addr_expr_loc (input_location, start1);
1736       if (arg2)
1737 	start2 = build_fold_addr_expr_loc (input_location, start2);
1738 
1739       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1740       end1 = fold_build_pointer_plus (start1, end1);
1741 
1742       p1 = create_tmp_var (TREE_TYPE (start1));
1743       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1744       append_to_statement_list (t, &ret);
1745 
1746       if (arg2)
1747 	{
1748 	  p2 = create_tmp_var (TREE_TYPE (start2));
1749 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1750 	  append_to_statement_list (t, &ret);
1751 	}
1752 
1753       lab = create_artificial_label (input_location);
1754       t = build1 (LABEL_EXPR, void_type_node, lab);
1755       append_to_statement_list (t, &ret);
1756 
1757       argarray[i++] = p1;
1758       if (arg2)
1759 	argarray[i++] = p2;
1760       /* Handle default arguments.  */
1761       for (parm = defparm; parm && parm != void_list_node;
1762 	   parm = TREE_CHAIN (parm), i++)
1763 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1764 					   TREE_PURPOSE (parm), fn, i,
1765 					   tf_warning_or_error);
1766       t = build_call_a (fn, i, argarray);
1767       t = fold_convert (void_type_node, t);
1768       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1769       append_to_statement_list (t, &ret);
1770 
1771       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1772       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1773       append_to_statement_list (t, &ret);
1774 
1775       if (arg2)
1776 	{
1777 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1778 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1779 	  append_to_statement_list (t, &ret);
1780 	}
1781 
1782       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1783       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1784       append_to_statement_list (t, &ret);
1785 
1786       return ret;
1787     }
1788   else
1789     {
1790       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1791       if (arg2)
1792 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1793       /* Handle default arguments.  */
1794       for (parm = defparm; parm && parm != void_list_node;
1795 	   parm = TREE_CHAIN (parm), i++)
1796 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1797 					   TREE_PURPOSE (parm),
1798 					   fn, i, tf_warning_or_error);
1799       t = build_call_a (fn, i, argarray);
1800       t = fold_convert (void_type_node, t);
1801       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1802     }
1803 }
1804 
1805 /* Return code to initialize DECL with its default constructor, or
1806    NULL if there's nothing to do.  */
1807 
1808 tree
1809 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1810 {
1811   tree info = CP_OMP_CLAUSE_INFO (clause);
1812   tree ret = NULL;
1813 
1814   if (info)
1815     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1816 
1817   return ret;
1818 }
1819 
1820 /* Return code to initialize DST with a copy constructor from SRC.  */
1821 
1822 tree
1823 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1824 {
1825   tree info = CP_OMP_CLAUSE_INFO (clause);
1826   tree ret = NULL;
1827 
1828   if (info)
1829     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1830   if (ret == NULL)
1831     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1832 
1833   return ret;
1834 }
1835 
1836 /* Similarly, except use an assignment operator instead.  */
1837 
1838 tree
1839 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1840 {
1841   tree info = CP_OMP_CLAUSE_INFO (clause);
1842   tree ret = NULL;
1843 
1844   if (info)
1845     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1846   if (ret == NULL)
1847     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1848 
1849   return ret;
1850 }
1851 
1852 /* Return code to destroy DECL.  */
1853 
1854 tree
1855 cxx_omp_clause_dtor (tree clause, tree decl)
1856 {
1857   tree info = CP_OMP_CLAUSE_INFO (clause);
1858   tree ret = NULL;
1859 
1860   if (info)
1861     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1862 
1863   return ret;
1864 }
1865 
1866 /* True if OpenMP should privatize what this DECL points to rather
1867    than the DECL itself.  */
1868 
1869 bool
1870 cxx_omp_privatize_by_reference (const_tree decl)
1871 {
1872   return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1873 	  || is_invisiref_parm (decl));
1874 }
1875 
1876 /* Return true if DECL is const qualified var having no mutable member.  */
1877 bool
1878 cxx_omp_const_qual_no_mutable (tree decl)
1879 {
1880   tree type = TREE_TYPE (decl);
1881   if (TREE_CODE (type) == REFERENCE_TYPE)
1882     {
1883       if (!is_invisiref_parm (decl))
1884 	return false;
1885       type = TREE_TYPE (type);
1886 
1887       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1888 	{
1889 	  /* NVR doesn't preserve const qualification of the
1890 	     variable's type.  */
1891 	  tree outer = outer_curly_brace_block (current_function_decl);
1892 	  tree var;
1893 
1894 	  if (outer)
1895 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1896 	      if (VAR_P (var)
1897 		  && DECL_NAME (decl) == DECL_NAME (var)
1898 		  && (TYPE_MAIN_VARIANT (type)
1899 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1900 		{
1901 		  if (TYPE_READONLY (TREE_TYPE (var)))
1902 		    type = TREE_TYPE (var);
1903 		  break;
1904 		}
1905 	}
1906     }
1907 
1908   if (type == error_mark_node)
1909     return false;
1910 
1911   /* Variables with const-qualified type having no mutable member
1912      are predetermined shared.  */
1913   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1914     return true;
1915 
1916   return false;
1917 }
1918 
1919 /* True if OpenMP sharing attribute of DECL is predetermined.  */
1920 
1921 enum omp_clause_default_kind
1922 cxx_omp_predetermined_sharing_1 (tree decl)
1923 {
1924   /* Static data members are predetermined shared.  */
1925   if (TREE_STATIC (decl))
1926     {
1927       tree ctx = CP_DECL_CONTEXT (decl);
1928       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1929 	return OMP_CLAUSE_DEFAULT_SHARED;
1930     }
1931 
1932   /* Const qualified vars having no mutable member are predetermined
1933      shared.  */
1934   if (cxx_omp_const_qual_no_mutable (decl))
1935     return OMP_CLAUSE_DEFAULT_SHARED;
1936 
1937   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1938 }
1939 
1940 /* Likewise, but also include the artificial vars.  We don't want to
1941    disallow the artificial vars being mentioned in explicit clauses,
1942    as we use artificial vars e.g. for loop constructs with random
1943    access iterators other than pointers, but during gimplification
1944    we want to treat them as predetermined.  */
1945 
1946 enum omp_clause_default_kind
1947 cxx_omp_predetermined_sharing (tree decl)
1948 {
1949   enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
1950   if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1951     return ret;
1952 
1953   /* Predetermine artificial variables holding integral values, those
1954      are usually result of gimplify_one_sizepos or SAVE_EXPR
1955      gimplification.  */
1956   if (VAR_P (decl)
1957       && DECL_ARTIFICIAL (decl)
1958       && INTEGRAL_TYPE_P (TREE_TYPE (decl))
1959       && !(DECL_LANG_SPECIFIC (decl)
1960 	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
1961     return OMP_CLAUSE_DEFAULT_SHARED;
1962 
1963   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1964 }
1965 
1966 /* Finalize an implicitly determined clause.  */
1967 
1968 void
1969 cxx_omp_finish_clause (tree c, gimple_seq *)
1970 {
1971   tree decl, inner_type;
1972   bool make_shared = false;
1973 
1974   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1975     return;
1976 
1977   decl = OMP_CLAUSE_DECL (c);
1978   decl = require_complete_type (decl);
1979   inner_type = TREE_TYPE (decl);
1980   if (decl == error_mark_node)
1981     make_shared = true;
1982   else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1983     inner_type = TREE_TYPE (inner_type);
1984 
1985   /* We're interested in the base element, not arrays.  */
1986   while (TREE_CODE (inner_type) == ARRAY_TYPE)
1987     inner_type = TREE_TYPE (inner_type);
1988 
1989   /* Check for special function availability by building a call to one.
1990      Save the results, because later we won't be in the right context
1991      for making these queries.  */
1992   if (!make_shared
1993       && CLASS_TYPE_P (inner_type)
1994       && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1995     make_shared = true;
1996 
1997   if (make_shared)
1998     {
1999       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2000       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2001       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2002     }
2003 }
2004 
2005 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2006    disregarded in OpenMP construct, because it is going to be
2007    remapped during OpenMP lowering.  SHARED is true if DECL
2008    is going to be shared, false if it is going to be privatized.  */
2009 
2010 bool
2011 cxx_omp_disregard_value_expr (tree decl, bool shared)
2012 {
2013   return !shared
2014 	 && VAR_P (decl)
2015 	 && DECL_HAS_VALUE_EXPR_P (decl)
2016 	 && DECL_ARTIFICIAL (decl)
2017 	 && DECL_LANG_SPECIFIC (decl)
2018 	 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2019 }
2020 
2021 /* Perform folding on expression X.  */
2022 
2023 tree
2024 cp_fully_fold (tree x)
2025 {
2026   if (processing_template_decl)
2027     return x;
2028   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2029      have to call both.  */
2030   if (cxx_dialect >= cxx11)
2031     x = maybe_constant_value (x);
2032   return cp_fold (x);
2033 }
2034 
2035 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2036 
2037 static tree
2038 cp_fold_maybe_rvalue (tree x, bool rval)
2039 {
2040   while (true)
2041     {
2042       x = cp_fold (x);
2043       if (rval && DECL_P (x)
2044 	  && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2045 	{
2046 	  tree v = decl_constant_value (x);
2047 	  if (v != x && v != error_mark_node)
2048 	    {
2049 	      x = v;
2050 	      continue;
2051 	    }
2052 	}
2053       break;
2054     }
2055   return x;
2056 }
2057 
2058 /* Fold expression X which is used as an rvalue.  */
2059 
2060 static tree
2061 cp_fold_rvalue (tree x)
2062 {
2063   return cp_fold_maybe_rvalue (x, true);
2064 }
2065 
2066 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2067    and certain changes are made to the folding done.  Or should be (FIXME).  We
2068    never touch maybe_const, as it is only used for the C front-end
2069    C_MAYBE_CONST_EXPR.  */
2070 
2071 tree
2072 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
2073 {
2074   /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
2075      INTEGER_CST.  */
2076   return cp_fold_rvalue (x);
2077 }
2078 
2079 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2080 
2081 /* Dispose of the whole FOLD_CACHE.  */
2082 
2083 void
2084 clear_fold_cache (void)
2085 {
2086   if (fold_cache != NULL)
2087     fold_cache->empty ();
2088 }
2089 
2090 /*  This function tries to fold an expression X.
2091     To avoid combinatorial explosion, folding results are kept in fold_cache.
2092     If we are processing a template or X is invalid, we don't fold at all.
2093     For performance reasons we don't cache expressions representing a
2094     declaration or constant.
2095     Function returns X or its folded variant.  */
2096 
2097 static tree
2098 cp_fold (tree x)
2099 {
2100   tree op0, op1, op2, op3;
2101   tree org_x = x, r = NULL_TREE;
2102   enum tree_code code;
2103   location_t loc;
2104   bool rval_ops = true;
2105 
2106   if (!x || x == error_mark_node)
2107     return x;
2108 
2109   if (processing_template_decl
2110       || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
2111     return x;
2112 
2113   /* Don't bother to cache DECLs or constants.  */
2114   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2115     return x;
2116 
2117   if (fold_cache == NULL)
2118     fold_cache = hash_map<tree, tree>::create_ggc (101);
2119 
2120   if (tree *cached = fold_cache->get (x))
2121     return *cached;
2122 
2123   code = TREE_CODE (x);
2124   switch (code)
2125     {
2126     case CLEANUP_POINT_EXPR:
2127       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2128 	 effects.  */
2129       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2130       if (!TREE_SIDE_EFFECTS (r))
2131 	x = r;
2132       break;
2133 
2134     case SIZEOF_EXPR:
2135       x = fold_sizeof_expr (x);
2136       break;
2137 
2138     case VIEW_CONVERT_EXPR:
2139       rval_ops = false;
2140       /* FALLTHRU */
2141     case CONVERT_EXPR:
2142     case NOP_EXPR:
2143     case NON_LVALUE_EXPR:
2144 
2145       if (VOID_TYPE_P (TREE_TYPE (x)))
2146 	return x;
2147 
2148       loc = EXPR_LOCATION (x);
2149       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2150 
2151       if (code == CONVERT_EXPR
2152 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2153 	  && op0 != void_node)
2154 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2155 	   folding variants, since fold() doesn't do those transformations.  */
2156 	x = fold (convert (TREE_TYPE (x), op0));
2157       else if (op0 != TREE_OPERAND (x, 0))
2158 	{
2159 	  if (op0 == error_mark_node)
2160 	    x = error_mark_node;
2161 	  else
2162 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2163 	}
2164       else
2165 	x = fold (x);
2166 
2167       /* Conversion of an out-of-range value has implementation-defined
2168 	 behavior; the language considers it different from arithmetic
2169 	 overflow, which is undefined.  */
2170       if (TREE_CODE (op0) == INTEGER_CST
2171 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2172 	TREE_OVERFLOW (x) = false;
2173 
2174       break;
2175 
2176     case INDIRECT_REF:
2177       /* We don't need the decltype(auto) obfuscation anymore.  */
2178       if (REF_PARENTHESIZED_P (x))
2179 	{
2180 	  tree p = maybe_undo_parenthesized_ref (x);
2181 	  return cp_fold (p);
2182 	}
2183       goto unary;
2184 
2185     case ADDR_EXPR:
2186       loc = EXPR_LOCATION (x);
2187       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2188 
2189       /* Cope with user tricks that amount to offsetof.  */
2190       if (op0 != error_mark_node
2191 	  && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2192 	  && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2193 	{
2194 	  tree val = get_base_address (op0);
2195 	  if (val
2196 	      && INDIRECT_REF_P (val)
2197 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2198 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2199 	    {
2200 	      val = TREE_OPERAND (val, 0);
2201 	      STRIP_NOPS (val);
2202 	      if (TREE_CODE (val) == INTEGER_CST)
2203 		return fold_offsetof (op0, TREE_TYPE (x));
2204 	    }
2205 	}
2206       goto finish_unary;
2207 
2208     case REALPART_EXPR:
2209     case IMAGPART_EXPR:
2210       rval_ops = false;
2211       /* FALLTHRU */
2212     case CONJ_EXPR:
2213     case FIX_TRUNC_EXPR:
2214     case FLOAT_EXPR:
2215     case NEGATE_EXPR:
2216     case ABS_EXPR:
2217     case BIT_NOT_EXPR:
2218     case TRUTH_NOT_EXPR:
2219     case FIXED_CONVERT_EXPR:
2220     unary:
2221 
2222       loc = EXPR_LOCATION (x);
2223       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2224 
2225     finish_unary:
2226       if (op0 != TREE_OPERAND (x, 0))
2227 	{
2228 	  if (op0 == error_mark_node)
2229 	    x = error_mark_node;
2230 	  else
2231 	    {
2232 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2233 	      if (code == INDIRECT_REF
2234 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2235 		{
2236 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2237 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2238 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2239 		}
2240 	    }
2241 	}
2242       else
2243 	x = fold (x);
2244 
2245       gcc_assert (TREE_CODE (x) != COND_EXPR
2246 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2247       break;
2248 
2249     case UNARY_PLUS_EXPR:
2250       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2251       if (op0 == error_mark_node)
2252 	x = error_mark_node;
2253       else
2254 	x = fold_convert (TREE_TYPE (x), op0);
2255       break;
2256 
2257     case POSTDECREMENT_EXPR:
2258     case POSTINCREMENT_EXPR:
2259     case INIT_EXPR:
2260     case PREDECREMENT_EXPR:
2261     case PREINCREMENT_EXPR:
2262     case COMPOUND_EXPR:
2263     case MODIFY_EXPR:
2264       rval_ops = false;
2265       /* FALLTHRU */
2266     case POINTER_PLUS_EXPR:
2267     case PLUS_EXPR:
2268     case MINUS_EXPR:
2269     case MULT_EXPR:
2270     case TRUNC_DIV_EXPR:
2271     case CEIL_DIV_EXPR:
2272     case FLOOR_DIV_EXPR:
2273     case ROUND_DIV_EXPR:
2274     case TRUNC_MOD_EXPR:
2275     case CEIL_MOD_EXPR:
2276     case ROUND_MOD_EXPR:
2277     case RDIV_EXPR:
2278     case EXACT_DIV_EXPR:
2279     case MIN_EXPR:
2280     case MAX_EXPR:
2281     case LSHIFT_EXPR:
2282     case RSHIFT_EXPR:
2283     case LROTATE_EXPR:
2284     case RROTATE_EXPR:
2285     case BIT_AND_EXPR:
2286     case BIT_IOR_EXPR:
2287     case BIT_XOR_EXPR:
2288     case TRUTH_AND_EXPR:
2289     case TRUTH_ANDIF_EXPR:
2290     case TRUTH_OR_EXPR:
2291     case TRUTH_ORIF_EXPR:
2292     case TRUTH_XOR_EXPR:
2293     case LT_EXPR: case LE_EXPR:
2294     case GT_EXPR: case GE_EXPR:
2295     case EQ_EXPR: case NE_EXPR:
2296     case UNORDERED_EXPR: case ORDERED_EXPR:
2297     case UNLT_EXPR: case UNLE_EXPR:
2298     case UNGT_EXPR: case UNGE_EXPR:
2299     case UNEQ_EXPR: case LTGT_EXPR:
2300     case RANGE_EXPR: case COMPLEX_EXPR:
2301 
2302       loc = EXPR_LOCATION (x);
2303       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2304       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2305 
2306       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2307 	{
2308 	  if (op0 == error_mark_node || op1 == error_mark_node)
2309 	    x = error_mark_node;
2310 	  else
2311 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2312 	}
2313       else
2314 	x = fold (x);
2315 
2316       if (TREE_NO_WARNING (org_x)
2317 	  && warn_nonnull_compare
2318 	  && COMPARISON_CLASS_P (org_x))
2319 	{
2320 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2321 	    ;
2322 	  else if (COMPARISON_CLASS_P (x))
2323 	    TREE_NO_WARNING (x) = 1;
2324 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2325 	     optimize those later on.  */
2326 	  else if (op0 != TREE_OPERAND (org_x, 0)
2327 		   || op1 != TREE_OPERAND (org_x, 1))
2328 	    {
2329 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2330 	      TREE_NO_WARNING (x) = 1;
2331 	    }
2332 	  else
2333 	    x = org_x;
2334 	}
2335       break;
2336 
2337     case VEC_COND_EXPR:
2338     case COND_EXPR:
2339       loc = EXPR_LOCATION (x);
2340       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2341       op1 = cp_fold (TREE_OPERAND (x, 1));
2342       op2 = cp_fold (TREE_OPERAND (x, 2));
2343 
2344       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2345 	{
2346 	  warning_sentinel s (warn_int_in_bool_context);
2347 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2348 	    op1 = cp_truthvalue_conversion (op1);
2349 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2350 	    op2 = cp_truthvalue_conversion (op2);
2351 	}
2352       else if (VOID_TYPE_P (TREE_TYPE (x)))
2353 	{
2354 	  if (TREE_CODE (op0) == INTEGER_CST)
2355 	    {
2356 	      /* If the condition is constant, fold can fold away
2357 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2358 		 have one of the branches NULL, avoid folding crash.  */
2359 	      if (!op1)
2360 		op1 = build_empty_stmt (loc);
2361 	      if (!op2)
2362 		op2 = build_empty_stmt (loc);
2363 	    }
2364 	  else
2365 	    {
2366 	      /* Otherwise, don't bother folding a void condition, since
2367 		 it can't produce a constant value.  */
2368 	      if (op0 != TREE_OPERAND (x, 0)
2369 		  || op1 != TREE_OPERAND (x, 1)
2370 		  || op2 != TREE_OPERAND (x, 2))
2371 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2372 	      break;
2373 	    }
2374 	}
2375 
2376       if (op0 != TREE_OPERAND (x, 0)
2377 	  || op1 != TREE_OPERAND (x, 1)
2378 	  || op2 != TREE_OPERAND (x, 2))
2379 	{
2380 	  if (op0 == error_mark_node
2381 	      || op1 == error_mark_node
2382 	      || op2 == error_mark_node)
2383 	    x = error_mark_node;
2384 	  else
2385 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2386 	}
2387       else
2388 	x = fold (x);
2389 
2390       /* A COND_EXPR might have incompatible types in branches if one or both
2391 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2392       if (TREE_CODE (x) != code
2393 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2394 	x = fold_convert (TREE_TYPE (org_x), x);
2395 
2396       break;
2397 
2398     case CALL_EXPR:
2399       {
2400 	int i, m, sv = optimize, nw = sv, changed = 0;
2401 	tree callee = get_callee_fndecl (x);
2402 
2403 	/* Some built-in function calls will be evaluated at compile-time in
2404 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2405 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2406 	if (callee && DECL_BUILT_IN (callee) && !optimize
2407 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2408 	    && current_function_decl
2409 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2410 	  nw = 1;
2411 
2412 	x = copy_node (x);
2413 
2414 	m = call_expr_nargs (x);
2415 	for (i = 0; i < m; i++)
2416 	  {
2417 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2418 	    if (r != CALL_EXPR_ARG (x, i))
2419 	      {
2420 		if (r == error_mark_node)
2421 		  {
2422 		    x = error_mark_node;
2423 		    break;
2424 		  }
2425 		changed = 1;
2426 	      }
2427 	    CALL_EXPR_ARG (x, i) = r;
2428 	  }
2429 	if (x == error_mark_node)
2430 	  break;
2431 
2432 	optimize = nw;
2433 	r = fold (x);
2434 	optimize = sv;
2435 
2436 	if (TREE_CODE (r) != CALL_EXPR)
2437 	  {
2438 	    x = cp_fold (r);
2439 	    break;
2440 	  }
2441 
2442 	optimize = nw;
2443 
2444 	/* Invoke maybe_constant_value for functions declared
2445 	   constexpr and not called with AGGR_INIT_EXPRs.
2446 	   TODO:
2447 	   Do constexpr expansion of expressions where the call itself is not
2448 	   constant, but the call followed by an INDIRECT_REF is.  */
2449 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2450 	    && !flag_no_inline)
2451 	  r = maybe_constant_value (x);
2452 	optimize = sv;
2453 
2454         if (TREE_CODE (r) != CALL_EXPR)
2455 	  {
2456 	    if (DECL_CONSTRUCTOR_P (callee))
2457 	      {
2458 		loc = EXPR_LOCATION (x);
2459 		tree s = build_fold_indirect_ref_loc (loc,
2460 						      CALL_EXPR_ARG (x, 0));
2461 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2462 	      }
2463 	    x = r;
2464 	    break;
2465 	  }
2466 
2467 	if (!changed)
2468 	  x = org_x;
2469 	break;
2470       }
2471 
2472     case CONSTRUCTOR:
2473       {
2474 	unsigned i;
2475 	constructor_elt *p;
2476 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2477 	vec<constructor_elt, va_gc> *nelts = NULL;
2478 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2479 	  {
2480 	    tree op = cp_fold (p->value);
2481 	    if (op != p->value)
2482 	      {
2483 		if (op == error_mark_node)
2484 		  {
2485 		    x = error_mark_node;
2486 		    vec_free (nelts);
2487 		    break;
2488 		  }
2489 		if (nelts == NULL)
2490 		  nelts = elts->copy ();
2491 		(*nelts)[i].value = op;
2492 	      }
2493 	  }
2494 	if (nelts)
2495 	  x = build_constructor (TREE_TYPE (x), nelts);
2496 	break;
2497       }
2498     case TREE_VEC:
2499       {
2500 	bool changed = false;
2501 	vec<tree, va_gc> *vec = make_tree_vector ();
2502 	int i, n = TREE_VEC_LENGTH (x);
2503 	vec_safe_reserve (vec, n);
2504 
2505 	for (i = 0; i < n; i++)
2506 	  {
2507 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2508 	    vec->quick_push (op);
2509 	    if (op != TREE_VEC_ELT (x, i))
2510 	      changed = true;
2511 	  }
2512 
2513 	if (changed)
2514 	  {
2515 	    r = copy_node (x);
2516 	    for (i = 0; i < n; i++)
2517 	      TREE_VEC_ELT (r, i) = (*vec)[i];
2518 	    x = r;
2519 	  }
2520 
2521 	release_tree_vector (vec);
2522       }
2523 
2524       break;
2525 
2526     case ARRAY_REF:
2527     case ARRAY_RANGE_REF:
2528 
2529       loc = EXPR_LOCATION (x);
2530       op0 = cp_fold (TREE_OPERAND (x, 0));
2531       op1 = cp_fold (TREE_OPERAND (x, 1));
2532       op2 = cp_fold (TREE_OPERAND (x, 2));
2533       op3 = cp_fold (TREE_OPERAND (x, 3));
2534 
2535       if (op0 != TREE_OPERAND (x, 0)
2536 	  || op1 != TREE_OPERAND (x, 1)
2537 	  || op2 != TREE_OPERAND (x, 2)
2538 	  || op3 != TREE_OPERAND (x, 3))
2539 	{
2540 	  if (op0 == error_mark_node
2541 	      || op1 == error_mark_node
2542 	      || op2 == error_mark_node
2543 	      || op3 == error_mark_node)
2544 	    x = error_mark_node;
2545 	  else
2546 	    {
2547 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2548 	      TREE_READONLY (x) = TREE_READONLY (org_x);
2549 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2550 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2551 	    }
2552 	}
2553 
2554       x = fold (x);
2555       break;
2556 
2557     default:
2558       return org_x;
2559     }
2560 
2561   fold_cache->put (org_x, x);
2562   /* Prevent that we try to fold an already folded result again.  */
2563   if (x != org_x)
2564     fold_cache->put (x, x);
2565 
2566   return x;
2567 }
2568 
2569 #include "gt-cp-cp-gimplify.h"
2570