xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cp/cp-gimplify.c (revision 946379e7b37692fc43f68eb0d1c10daa0a7f3b6c)
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2013 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "cp-tree.h"
28 #include "c-family/c-common.h"
29 #include "tree-iterator.h"
30 #include "gimple.h"
31 #include "hashtab.h"
32 #include "pointer-set.h"
33 #include "flags.h"
34 #include "splay-tree.h"
35 
36 /* Forward declarations.  */
37 
38 static tree cp_genericize_r (tree *, int *, void *);
39 static void cp_genericize_tree (tree*);
40 
41 /* Local declarations.  */
42 
43 enum bc_t { bc_break = 0, bc_continue = 1 };
44 
45 /* Stack of labels which are targets for "break" or "continue",
46    linked through TREE_CHAIN.  */
47 static tree bc_label[2];
48 
49 /* Begin a scope which can be exited by a break or continue statement.  BC
50    indicates which.
51 
52    Just creates a label with location LOCATION and pushes it into the current
53    context.  */
54 
55 static tree
56 begin_bc_block (enum bc_t bc, location_t location)
57 {
58   tree label = create_artificial_label (location);
59   DECL_CHAIN (label) = bc_label[bc];
60   bc_label[bc] = label;
61   return label;
62 }
63 
64 /* Finish a scope which can be exited by a break or continue statement.
65    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
66    an expression for the contents of the scope.
67 
68    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
69    BLOCK.  Otherwise, just forget the label.  */
70 
71 static void
72 finish_bc_block (tree *block, enum bc_t bc, tree label)
73 {
74   gcc_assert (label == bc_label[bc]);
75 
76   if (TREE_USED (label))
77     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
78 			      block);
79 
80   bc_label[bc] = DECL_CHAIN (label);
81   DECL_CHAIN (label) = NULL_TREE;
82 }
83 
84 /* Get the LABEL_EXPR to represent a break or continue statement
85    in the current block scope.  BC indicates which.  */
86 
87 static tree
88 get_bc_label (enum bc_t bc)
89 {
90   tree label = bc_label[bc];
91 
92   /* Mark the label used for finish_bc_block.  */
93   TREE_USED (label) = 1;
94   return label;
95 }
96 
97 /* Genericize a TRY_BLOCK.  */
98 
99 static void
100 genericize_try_block (tree *stmt_p)
101 {
102   tree body = TRY_STMTS (*stmt_p);
103   tree cleanup = TRY_HANDLERS (*stmt_p);
104 
105   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
106 }
107 
108 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
109 
110 static void
111 genericize_catch_block (tree *stmt_p)
112 {
113   tree type = HANDLER_TYPE (*stmt_p);
114   tree body = HANDLER_BODY (*stmt_p);
115 
116   /* FIXME should the caught type go in TREE_TYPE?  */
117   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
118 }
119 
120 /* A terser interface for building a representation of an exception
121    specification.  */
122 
123 static tree
124 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
125 {
126   tree t;
127 
128   /* FIXME should the allowed types go in TREE_TYPE?  */
129   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
130   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
131 
132   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
133   append_to_statement_list (body, &TREE_OPERAND (t, 0));
134 
135   return t;
136 }
137 
138 /* Genericize an EH_SPEC_BLOCK by converting it to a
139    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
140 
141 static void
142 genericize_eh_spec_block (tree *stmt_p)
143 {
144   tree body = EH_SPEC_STMTS (*stmt_p);
145   tree allowed = EH_SPEC_RAISES (*stmt_p);
146   tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
147 
148   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
149   TREE_NO_WARNING (*stmt_p) = true;
150   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
151 }
152 
153 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
154 
155 static void
156 genericize_if_stmt (tree *stmt_p)
157 {
158   tree stmt, cond, then_, else_;
159   location_t locus = EXPR_LOCATION (*stmt_p);
160 
161   stmt = *stmt_p;
162   cond = IF_COND (stmt);
163   then_ = THEN_CLAUSE (stmt);
164   else_ = ELSE_CLAUSE (stmt);
165 
166   if (!then_)
167     then_ = build_empty_stmt (locus);
168   if (!else_)
169     else_ = build_empty_stmt (locus);
170 
171   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
172     stmt = then_;
173   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
174     stmt = else_;
175   else
176     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
177   if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
178     SET_EXPR_LOCATION (stmt, locus);
179   *stmt_p = stmt;
180 }
181 
182 /* Build a generic representation of one of the C loop forms.  COND is the
183    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
184    controlled by the loop.  INCR is the increment expression of a for-loop,
185    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
186    evaluated before the loop body as in while and for loops, or after the
187    loop body as in do-while loops.  */
188 
189 static void
190 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
191 		    tree incr, bool cond_is_first, int *walk_subtrees,
192 		    void *data)
193 {
194   tree blab, clab;
195   tree entry = NULL, exit = NULL, t;
196   tree stmt_list = NULL;
197 
198   blab = begin_bc_block (bc_break, start_locus);
199   clab = begin_bc_block (bc_continue, start_locus);
200 
201   if (incr && EXPR_P (incr))
202     SET_EXPR_LOCATION (incr, start_locus);
203 
204   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
205   cp_walk_tree (&body, cp_genericize_r, data, NULL);
206   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
207   *walk_subtrees = 0;
208 
209   /* If condition is zero don't generate a loop construct.  */
210   if (cond && integer_zerop (cond))
211     {
212       if (cond_is_first)
213 	{
214 	  t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
215 			  get_bc_label (bc_break));
216 	  append_to_statement_list (t, &stmt_list);
217 	}
218     }
219   else
220     {
221       /* Expand to gotos, just like c_finish_loop.  TODO: Use LOOP_EXPR.  */
222       tree top = build1 (LABEL_EXPR, void_type_node,
223 			 create_artificial_label (start_locus));
224 
225       /* If we have an exit condition, then we build an IF with gotos either
226 	 out of the loop, or to the top of it.  If there's no exit condition,
227 	 then we just build a jump back to the top.  */
228       exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
229 
230       if (cond && !integer_nonzerop (cond))
231 	{
232 	  /* Canonicalize the loop condition to the end.  This means
233 	     generating a branch to the loop condition.  Reuse the
234 	     continue label, if possible.  */
235 	  if (cond_is_first)
236 	    {
237 	      if (incr)
238 		{
239 		  entry = build1 (LABEL_EXPR, void_type_node,
240 				  create_artificial_label (start_locus));
241 		  t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
242 				  LABEL_EXPR_LABEL (entry));
243 		}
244 	      else
245 		t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
246 				get_bc_label (bc_continue));
247 	      append_to_statement_list (t, &stmt_list);
248 	    }
249 
250 	  t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
251 	  exit = fold_build3_loc (start_locus,
252 				  COND_EXPR, void_type_node, cond, exit, t);
253 	}
254 
255       append_to_statement_list (top, &stmt_list);
256     }
257 
258   append_to_statement_list (body, &stmt_list);
259   finish_bc_block (&stmt_list, bc_continue, clab);
260   append_to_statement_list (incr, &stmt_list);
261   append_to_statement_list (entry, &stmt_list);
262   append_to_statement_list (exit, &stmt_list);
263   finish_bc_block (&stmt_list, bc_break, blab);
264 
265   if (stmt_list == NULL_TREE)
266     stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
267 
268   *stmt_p = stmt_list;
269 }
270 
271 /* Genericize a FOR_STMT node *STMT_P.  */
272 
273 static void
274 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
275 {
276   tree stmt = *stmt_p;
277   tree expr = NULL;
278   tree loop;
279   tree init = FOR_INIT_STMT (stmt);
280 
281   if (init)
282     {
283       cp_walk_tree (&init, cp_genericize_r, data, NULL);
284       append_to_statement_list (init, &expr);
285     }
286 
287   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
288 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
289   append_to_statement_list (loop, &expr);
290   *stmt_p = expr;
291 }
292 
293 /* Genericize a WHILE_STMT node *STMT_P.  */
294 
295 static void
296 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
297 {
298   tree stmt = *stmt_p;
299   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
300 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
301 }
302 
303 /* Genericize a DO_STMT node *STMT_P.  */
304 
305 static void
306 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
307 {
308   tree stmt = *stmt_p;
309   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
310 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
311 }
312 
313 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
314 
315 static void
316 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
317 {
318   tree stmt = *stmt_p;
319   tree break_block, body, cond, type;
320   location_t stmt_locus = EXPR_LOCATION (stmt);
321 
322   break_block = begin_bc_block (bc_break, stmt_locus);
323 
324   body = SWITCH_STMT_BODY (stmt);
325   if (!body)
326     body = build_empty_stmt (stmt_locus);
327   cond = SWITCH_STMT_COND (stmt);
328   type = SWITCH_STMT_TYPE (stmt);
329 
330   cp_walk_tree (&body, cp_genericize_r, data, NULL);
331   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
332   cp_walk_tree (&type, cp_genericize_r, data, NULL);
333   *walk_subtrees = 0;
334 
335   *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
336   finish_bc_block (stmt_p, bc_break, break_block);
337 }
338 
339 /* Genericize a CONTINUE_STMT node *STMT_P.  */
340 
341 static void
342 genericize_continue_stmt (tree *stmt_p)
343 {
344   tree stmt_list = NULL;
345   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
346   tree label = get_bc_label (bc_continue);
347   location_t location = EXPR_LOCATION (*stmt_p);
348   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
349   append_to_statement_list (pred, &stmt_list);
350   append_to_statement_list (jump, &stmt_list);
351   *stmt_p = stmt_list;
352 }
353 
354 /* Genericize a BREAK_STMT node *STMT_P.  */
355 
356 static void
357 genericize_break_stmt (tree *stmt_p)
358 {
359   tree label = get_bc_label (bc_break);
360   location_t location = EXPR_LOCATION (*stmt_p);
361   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
362 }
363 
364 /* Genericize a OMP_FOR node *STMT_P.  */
365 
366 static void
367 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
368 {
369   tree stmt = *stmt_p;
370   location_t locus = EXPR_LOCATION (stmt);
371   tree clab = begin_bc_block (bc_continue, locus);
372 
373   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
374   cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
375   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
376   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
377   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
378   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
379   *walk_subtrees = 0;
380 
381   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
382 }
383 
384 /* Hook into the middle of gimplifying an OMP_FOR node.  */
385 
386 static enum gimplify_status
387 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
388 {
389   tree for_stmt = *expr_p;
390   gimple_seq seq = NULL;
391 
392   /* Protect ourselves from recursion.  */
393   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
394     return GS_UNHANDLED;
395   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
396 
397   gimplify_and_add (for_stmt, &seq);
398   gimple_seq_add_seq (pre_p, seq);
399 
400   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
401 
402   return GS_ALL_DONE;
403 }
404 
405 /*  Gimplify an EXPR_STMT node.  */
406 
407 static void
408 gimplify_expr_stmt (tree *stmt_p)
409 {
410   tree stmt = EXPR_STMT_EXPR (*stmt_p);
411 
412   if (stmt == error_mark_node)
413     stmt = NULL;
414 
415   /* Gimplification of a statement expression will nullify the
416      statement if all its side effects are moved to *PRE_P and *POST_P.
417 
418      In this case we will not want to emit the gimplified statement.
419      However, we may still want to emit a warning, so we do that before
420      gimplification.  */
421   if (stmt && warn_unused_value)
422     {
423       if (!TREE_SIDE_EFFECTS (stmt))
424 	{
425 	  if (!IS_EMPTY_STMT (stmt)
426 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
427 	      && !TREE_NO_WARNING (stmt))
428 	    warning (OPT_Wunused_value, "statement with no effect");
429 	}
430       else
431 	warn_if_unused_value (stmt, input_location);
432     }
433 
434   if (stmt == NULL_TREE)
435     stmt = alloc_stmt_list ();
436 
437   *stmt_p = stmt;
438 }
439 
440 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
441 
442 static void
443 cp_gimplify_init_expr (tree *expr_p)
444 {
445   tree from = TREE_OPERAND (*expr_p, 1);
446   tree to = TREE_OPERAND (*expr_p, 0);
447   tree t;
448 
449   /* What about code that pulls out the temp and uses it elsewhere?  I
450      think that such code never uses the TARGET_EXPR as an initializer.  If
451      I'm wrong, we'll abort because the temp won't have any RTL.  In that
452      case, I guess we'll need to replace references somehow.  */
453   if (TREE_CODE (from) == TARGET_EXPR)
454     from = TARGET_EXPR_INITIAL (from);
455 
456   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
457      inside the TARGET_EXPR.  */
458   for (t = from; t; )
459     {
460       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
461 
462       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
463 	 replace the slot operand with our target.
464 
465 	 Should we add a target parm to gimplify_expr instead?  No, as in this
466 	 case we want to replace the INIT_EXPR.  */
467       if (TREE_CODE (sub) == AGGR_INIT_EXPR
468 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
469 	{
470 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
471 	    AGGR_INIT_EXPR_SLOT (sub) = to;
472 	  else
473 	    VEC_INIT_EXPR_SLOT (sub) = to;
474 	  *expr_p = from;
475 
476 	  /* The initialization is now a side-effect, so the container can
477 	     become void.  */
478 	  if (from != sub)
479 	    TREE_TYPE (from) = void_type_node;
480 	}
481 
482       if (t == sub)
483 	break;
484       else
485 	t = TREE_OPERAND (t, 1);
486     }
487 
488 }
489 
490 /* Gimplify a MUST_NOT_THROW_EXPR.  */
491 
492 static enum gimplify_status
493 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
494 {
495   tree stmt = *expr_p;
496   tree temp = voidify_wrapper_expr (stmt, NULL);
497   tree body = TREE_OPERAND (stmt, 0);
498   gimple_seq try_ = NULL;
499   gimple_seq catch_ = NULL;
500   gimple mnt;
501 
502   gimplify_and_add (body, &try_);
503   mnt = gimple_build_eh_must_not_throw (terminate_node);
504   gimple_seq_add_stmt_without_update (&catch_, mnt);
505   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
506 
507   gimple_seq_add_stmt_without_update (pre_p, mnt);
508   if (temp)
509     {
510       *expr_p = temp;
511       return GS_OK;
512     }
513 
514   *expr_p = NULL;
515   return GS_ALL_DONE;
516 }
517 
518 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
519 
520 int
521 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
522 {
523   int saved_stmts_are_full_exprs_p = 0;
524   enum tree_code code = TREE_CODE (*expr_p);
525   enum gimplify_status ret;
526 
527   if (STATEMENT_CODE_P (code))
528     {
529       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
530       current_stmt_tree ()->stmts_are_full_exprs_p
531 	= STMT_IS_FULL_EXPR_P (*expr_p);
532     }
533 
534   switch (code)
535     {
536     case PTRMEM_CST:
537       *expr_p = cplus_expand_constant (*expr_p);
538       ret = GS_OK;
539       break;
540 
541     case AGGR_INIT_EXPR:
542       simplify_aggr_init_expr (expr_p);
543       ret = GS_OK;
544       break;
545 
546     case VEC_INIT_EXPR:
547       {
548 	location_t loc = input_location;
549 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
550 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
551 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
552 	input_location = EXPR_LOCATION (*expr_p);
553 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
554 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
555 				  from_array,
556 				  tf_warning_or_error);
557 	cp_genericize_tree (expr_p);
558 	ret = GS_OK;
559 	input_location = loc;
560       }
561       break;
562 
563     case THROW_EXPR:
564       /* FIXME communicate throw type to back end, probably by moving
565 	 THROW_EXPR into ../tree.def.  */
566       *expr_p = TREE_OPERAND (*expr_p, 0);
567       ret = GS_OK;
568       break;
569 
570     case MUST_NOT_THROW_EXPR:
571       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
572       break;
573 
574       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
575 	 LHS of an assignment might also be involved in the RHS, as in bug
576 	 25979.  */
577     case INIT_EXPR:
578       cp_gimplify_init_expr (expr_p);
579       if (TREE_CODE (*expr_p) != INIT_EXPR)
580 	return GS_OK;
581       /* Otherwise fall through.  */
582     case MODIFY_EXPR:
583       {
584 	/* If the back end isn't clever enough to know that the lhs and rhs
585 	   types are the same, add an explicit conversion.  */
586 	tree op0 = TREE_OPERAND (*expr_p, 0);
587 	tree op1 = TREE_OPERAND (*expr_p, 1);
588 
589 	if (!error_operand_p (op0)
590 	    && !error_operand_p (op1)
591 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
592 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
593 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
594 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
595 					      TREE_TYPE (op0), op1);
596 
597 	else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
598 		  || (TREE_CODE (op1) == CONSTRUCTOR
599 		      && CONSTRUCTOR_NELTS (op1) == 0
600 		      && !TREE_CLOBBER_P (op1))
601 		  || (TREE_CODE (op1) == CALL_EXPR
602 		      && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
603 		 && is_really_empty_class (TREE_TYPE (op0)))
604 	  {
605 	    /* Remove any copies of empty classes.  We check that the RHS
606 	       has a simple form so that TARGET_EXPRs and non-empty
607 	       CONSTRUCTORs get reduced properly, and we leave the return
608 	       slot optimization alone because it isn't a copy (FIXME so it
609 	       shouldn't be represented as one).
610 
611 	       Also drop volatile variables on the RHS to avoid infinite
612 	       recursion from gimplify_expr trying to load the value.  */
613 	    if (!TREE_SIDE_EFFECTS (op1)
614 		|| (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
615 	      *expr_p = op0;
616 	    else if (TREE_CODE (op1) == MEM_REF
617 		     && TREE_THIS_VOLATILE (op1))
618 	      {
619 		/* Similarly for volatile MEM_REFs on the RHS.  */
620 		if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
621 		  *expr_p = op0;
622 		else
623 		  *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
624 				    TREE_OPERAND (op1, 0), op0);
625 	      }
626 	    else
627 	      *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
628 				op0, op1);
629 	  }
630       }
631       ret = GS_OK;
632       break;
633 
634     case EMPTY_CLASS_EXPR:
635       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
636       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
637       ret = GS_OK;
638       break;
639 
640     case BASELINK:
641       *expr_p = BASELINK_FUNCTIONS (*expr_p);
642       ret = GS_OK;
643       break;
644 
645     case TRY_BLOCK:
646       genericize_try_block (expr_p);
647       ret = GS_OK;
648       break;
649 
650     case HANDLER:
651       genericize_catch_block (expr_p);
652       ret = GS_OK;
653       break;
654 
655     case EH_SPEC_BLOCK:
656       genericize_eh_spec_block (expr_p);
657       ret = GS_OK;
658       break;
659 
660     case USING_STMT:
661       gcc_unreachable ();
662 
663     case FOR_STMT:
664     case WHILE_STMT:
665     case DO_STMT:
666     case SWITCH_STMT:
667     case CONTINUE_STMT:
668     case BREAK_STMT:
669       gcc_unreachable ();
670 
671     case OMP_FOR:
672       ret = cp_gimplify_omp_for (expr_p, pre_p);
673       break;
674 
675     case EXPR_STMT:
676       gimplify_expr_stmt (expr_p);
677       ret = GS_OK;
678       break;
679 
680     case UNARY_PLUS_EXPR:
681       {
682 	tree arg = TREE_OPERAND (*expr_p, 0);
683 	tree type = TREE_TYPE (*expr_p);
684 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
685 					    : arg;
686 	ret = GS_OK;
687       }
688       break;
689 
690     default:
691       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
692       break;
693     }
694 
695   /* Restore saved state.  */
696   if (STATEMENT_CODE_P (code))
697     current_stmt_tree ()->stmts_are_full_exprs_p
698       = saved_stmts_are_full_exprs_p;
699 
700   return ret;
701 }
702 
703 static inline bool
704 is_invisiref_parm (const_tree t)
705 {
706   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
707 	  && DECL_BY_REFERENCE (t));
708 }
709 
710 /* Return true if the uid in both int tree maps are equal.  */
711 
712 int
713 cxx_int_tree_map_eq (const void *va, const void *vb)
714 {
715   const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
716   const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
717   return (a->uid == b->uid);
718 }
719 
720 /* Hash a UID in a cxx_int_tree_map.  */
721 
722 unsigned int
723 cxx_int_tree_map_hash (const void *item)
724 {
725   return ((const struct cxx_int_tree_map *)item)->uid;
726 }
727 
728 /* A stable comparison routine for use with splay trees and DECLs.  */
729 
730 static int
731 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
732 {
733   tree a = (tree) xa;
734   tree b = (tree) xb;
735 
736   return DECL_UID (a) - DECL_UID (b);
737 }
738 
739 /* OpenMP context during genericization.  */
740 
741 struct cp_genericize_omp_taskreg
742 {
743   bool is_parallel;
744   bool default_shared;
745   struct cp_genericize_omp_taskreg *outer;
746   splay_tree variables;
747 };
748 
749 /* Return true if genericization should try to determine if
750    DECL is firstprivate or shared within task regions.  */
751 
752 static bool
753 omp_var_to_track (tree decl)
754 {
755   tree type = TREE_TYPE (decl);
756   if (is_invisiref_parm (decl))
757     type = TREE_TYPE (type);
758   while (TREE_CODE (type) == ARRAY_TYPE)
759     type = TREE_TYPE (type);
760   if (type == error_mark_node || !CLASS_TYPE_P (type))
761     return false;
762   if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
763     return false;
764   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
765     return false;
766   return true;
767 }
768 
769 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
770 
771 static void
772 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
773 {
774   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
775 					 (splay_tree_key) decl);
776   if (n == NULL)
777     {
778       int flags = OMP_CLAUSE_DEFAULT_SHARED;
779       if (omp_ctx->outer)
780 	omp_cxx_notice_variable (omp_ctx->outer, decl);
781       if (!omp_ctx->default_shared)
782 	{
783 	  struct cp_genericize_omp_taskreg *octx;
784 
785 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
786 	    {
787 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
788 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
789 		{
790 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
791 		  break;
792 		}
793 	      if (octx->is_parallel)
794 		break;
795 	    }
796 	  if (octx == NULL
797 	      && (TREE_CODE (decl) == PARM_DECL
798 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
799 		      && DECL_CONTEXT (decl) == current_function_decl)))
800 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
801 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
802 	    {
803 	      /* DECL is implicitly determined firstprivate in
804 		 the current task construct.  Ensure copy ctor and
805 		 dtor are instantiated, because during gimplification
806 		 it will be already too late.  */
807 	      tree type = TREE_TYPE (decl);
808 	      if (is_invisiref_parm (decl))
809 		type = TREE_TYPE (type);
810 	      while (TREE_CODE (type) == ARRAY_TYPE)
811 		type = TREE_TYPE (type);
812 	      get_copy_ctor (type, tf_none);
813 	      get_dtor (type, tf_none);
814 	    }
815 	}
816       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
817     }
818 }
819 
820 /* Genericization context.  */
821 
822 struct cp_genericize_data
823 {
824   struct pointer_set_t *p_set;
825   vec<tree> bind_expr_stack;
826   struct cp_genericize_omp_taskreg *omp_ctx;
827 };
828 
829 /* Perform any pre-gimplification lowering of C++ front end trees to
830    GENERIC.  */
831 
832 static tree
833 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
834 {
835   tree stmt = *stmt_p;
836   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
837   struct pointer_set_t *p_set = wtd->p_set;
838 
839   /* If in an OpenMP context, note var uses.  */
840   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
841       && (TREE_CODE (stmt) == VAR_DECL
842 	  || TREE_CODE (stmt) == PARM_DECL
843 	  || TREE_CODE (stmt) == RESULT_DECL)
844       && omp_var_to_track (stmt))
845     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
846 
847   if (is_invisiref_parm (stmt)
848       /* Don't dereference parms in a thunk, pass the references through. */
849       && !(DECL_THUNK_P (current_function_decl)
850 	   && TREE_CODE (stmt) == PARM_DECL))
851     {
852       *stmt_p = convert_from_reference (stmt);
853       *walk_subtrees = 0;
854       return NULL;
855     }
856 
857   /* Map block scope extern declarations to visible declarations with the
858      same name and type in outer scopes if any.  */
859   if (cp_function_chain->extern_decl_map
860       && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
861       && DECL_EXTERNAL (stmt))
862     {
863       struct cxx_int_tree_map *h, in;
864       in.uid = DECL_UID (stmt);
865       h = (struct cxx_int_tree_map *)
866 	  htab_find_with_hash (cp_function_chain->extern_decl_map,
867 			       &in, in.uid);
868       if (h)
869 	{
870 	  *stmt_p = h->to;
871 	  *walk_subtrees = 0;
872 	  return NULL;
873 	}
874     }
875 
876   /* Other than invisiref parms, don't walk the same tree twice.  */
877   if (pointer_set_contains (p_set, stmt))
878     {
879       *walk_subtrees = 0;
880       return NULL_TREE;
881     }
882 
883   if (TREE_CODE (stmt) == ADDR_EXPR
884       && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
885     {
886       /* If in an OpenMP context, note var uses.  */
887       if (__builtin_expect (wtd->omp_ctx != NULL, 0)
888 	  && omp_var_to_track (TREE_OPERAND (stmt, 0)))
889 	omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
890       *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
891       *walk_subtrees = 0;
892     }
893   else if (TREE_CODE (stmt) == RETURN_EXPR
894 	   && TREE_OPERAND (stmt, 0)
895 	   && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
896     /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
897     *walk_subtrees = 0;
898   else if (TREE_CODE (stmt) == OMP_CLAUSE)
899     switch (OMP_CLAUSE_CODE (stmt))
900       {
901       case OMP_CLAUSE_LASTPRIVATE:
902 	/* Don't dereference an invisiref in OpenMP clauses.  */
903 	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
904 	  {
905 	    *walk_subtrees = 0;
906 	    if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
907 	      cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
908 			    cp_genericize_r, data, NULL);
909 	  }
910 	break;
911       case OMP_CLAUSE_PRIVATE:
912 	/* Don't dereference an invisiref in OpenMP clauses.  */
913 	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
914 	  *walk_subtrees = 0;
915 	else if (wtd->omp_ctx != NULL)
916 	  {
917 	    /* Private clause doesn't cause any references to the
918 	       var in outer contexts, avoid calling
919 	       omp_cxx_notice_variable for it.  */
920 	    struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
921 	    wtd->omp_ctx = NULL;
922 	    cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
923 			  data, NULL);
924 	    wtd->omp_ctx = old;
925 	    *walk_subtrees = 0;
926 	  }
927 	break;
928       case OMP_CLAUSE_SHARED:
929       case OMP_CLAUSE_FIRSTPRIVATE:
930       case OMP_CLAUSE_COPYIN:
931       case OMP_CLAUSE_COPYPRIVATE:
932 	/* Don't dereference an invisiref in OpenMP clauses.  */
933 	if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
934 	  *walk_subtrees = 0;
935 	break;
936       case OMP_CLAUSE_REDUCTION:
937 	gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
938 	break;
939       default:
940 	break;
941       }
942   else if (IS_TYPE_OR_DECL_P (stmt))
943     *walk_subtrees = 0;
944 
945   /* Due to the way voidify_wrapper_expr is written, we don't get a chance
946      to lower this construct before scanning it, so we need to lower these
947      before doing anything else.  */
948   else if (TREE_CODE (stmt) == CLEANUP_STMT)
949     *stmt_p = build2_loc (EXPR_LOCATION (stmt),
950 			  CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
951 						 : TRY_FINALLY_EXPR,
952 			  void_type_node,
953 			  CLEANUP_BODY (stmt),
954 			  CLEANUP_EXPR (stmt));
955 
956   else if (TREE_CODE (stmt) == IF_STMT)
957     {
958       genericize_if_stmt (stmt_p);
959       /* *stmt_p has changed, tail recurse to handle it again.  */
960       return cp_genericize_r (stmt_p, walk_subtrees, data);
961     }
962 
963   /* COND_EXPR might have incompatible types in branches if one or both
964      arms are bitfields.  Fix it up now.  */
965   else if (TREE_CODE (stmt) == COND_EXPR)
966     {
967       tree type_left
968 	= (TREE_OPERAND (stmt, 1)
969 	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
970 	   : NULL_TREE);
971       tree type_right
972 	= (TREE_OPERAND (stmt, 2)
973 	   ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
974 	   : NULL_TREE);
975       if (type_left
976 	  && !useless_type_conversion_p (TREE_TYPE (stmt),
977 					 TREE_TYPE (TREE_OPERAND (stmt, 1))))
978 	{
979 	  TREE_OPERAND (stmt, 1)
980 	    = fold_convert (type_left, TREE_OPERAND (stmt, 1));
981 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
982 						 type_left));
983 	}
984       if (type_right
985 	  && !useless_type_conversion_p (TREE_TYPE (stmt),
986 					 TREE_TYPE (TREE_OPERAND (stmt, 2))))
987 	{
988 	  TREE_OPERAND (stmt, 2)
989 	    = fold_convert (type_right, TREE_OPERAND (stmt, 2));
990 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
991 						 type_right));
992 	}
993     }
994 
995   else if (TREE_CODE (stmt) == BIND_EXPR)
996     {
997       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
998 	{
999 	  tree decl;
1000 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1001 	    if (TREE_CODE (decl) == VAR_DECL
1002 		&& !DECL_EXTERNAL (decl)
1003 		&& omp_var_to_track (decl))
1004 	      {
1005 		splay_tree_node n
1006 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1007 				       (splay_tree_key) decl);
1008 		if (n == NULL)
1009 		  splay_tree_insert (wtd->omp_ctx->variables,
1010 				     (splay_tree_key) decl,
1011 				     TREE_STATIC (decl)
1012 				     ? OMP_CLAUSE_DEFAULT_SHARED
1013 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1014 	      }
1015 	}
1016       wtd->bind_expr_stack.safe_push (stmt);
1017       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1018 		    cp_genericize_r, data, NULL);
1019       wtd->bind_expr_stack.pop ();
1020     }
1021 
1022   else if (TREE_CODE (stmt) == USING_STMT)
1023     {
1024       tree block = NULL_TREE;
1025 
1026       /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1027          BLOCK, and append an IMPORTED_DECL to its
1028 	 BLOCK_VARS chained list.  */
1029       if (wtd->bind_expr_stack.exists ())
1030 	{
1031 	  int i;
1032 	  for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1033 	    if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1034 	      break;
1035 	}
1036       if (block)
1037 	{
1038 	  tree using_directive;
1039 	  gcc_assert (TREE_OPERAND (stmt, 0));
1040 
1041 	  using_directive = make_node (IMPORTED_DECL);
1042 	  TREE_TYPE (using_directive) = void_type_node;
1043 
1044 	  IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1045 	    = TREE_OPERAND (stmt, 0);
1046 	  DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1047 	  BLOCK_VARS (block) = using_directive;
1048 	}
1049       /* The USING_STMT won't appear in GENERIC.  */
1050       *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1051       *walk_subtrees = 0;
1052     }
1053 
1054   else if (TREE_CODE (stmt) == DECL_EXPR
1055 	   && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1056     {
1057       /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1058       *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1059       *walk_subtrees = 0;
1060     }
1061   else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1062     {
1063       struct cp_genericize_omp_taskreg omp_ctx;
1064       tree c, decl;
1065       splay_tree_node n;
1066 
1067       *walk_subtrees = 0;
1068       cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1069       omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1070       omp_ctx.default_shared = omp_ctx.is_parallel;
1071       omp_ctx.outer = wtd->omp_ctx;
1072       omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1073       wtd->omp_ctx = &omp_ctx;
1074       for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1075 	switch (OMP_CLAUSE_CODE (c))
1076 	  {
1077 	  case OMP_CLAUSE_SHARED:
1078 	  case OMP_CLAUSE_PRIVATE:
1079 	  case OMP_CLAUSE_FIRSTPRIVATE:
1080 	  case OMP_CLAUSE_LASTPRIVATE:
1081 	    decl = OMP_CLAUSE_DECL (c);
1082 	    if (decl == error_mark_node || !omp_var_to_track (decl))
1083 	      break;
1084 	    n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1085 	    if (n != NULL)
1086 	      break;
1087 	    splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1088 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1089 			       ? OMP_CLAUSE_DEFAULT_SHARED
1090 			       : OMP_CLAUSE_DEFAULT_PRIVATE);
1091 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1092 		&& omp_ctx.outer)
1093 	      omp_cxx_notice_variable (omp_ctx.outer, decl);
1094 	    break;
1095 	  case OMP_CLAUSE_DEFAULT:
1096 	    if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1097 	      omp_ctx.default_shared = true;
1098 	  default:
1099 	    break;
1100 	  }
1101       cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1102       wtd->omp_ctx = omp_ctx.outer;
1103       splay_tree_delete (omp_ctx.variables);
1104     }
1105   else if (TREE_CODE (stmt) == CONVERT_EXPR)
1106     gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1107   else if (TREE_CODE (stmt) == FOR_STMT)
1108     genericize_for_stmt (stmt_p, walk_subtrees, data);
1109   else if (TREE_CODE (stmt) == WHILE_STMT)
1110     genericize_while_stmt (stmt_p, walk_subtrees, data);
1111   else if (TREE_CODE (stmt) == DO_STMT)
1112     genericize_do_stmt (stmt_p, walk_subtrees, data);
1113   else if (TREE_CODE (stmt) == SWITCH_STMT)
1114     genericize_switch_stmt (stmt_p, walk_subtrees, data);
1115   else if (TREE_CODE (stmt) == CONTINUE_STMT)
1116     genericize_continue_stmt (stmt_p);
1117   else if (TREE_CODE (stmt) == BREAK_STMT)
1118     genericize_break_stmt (stmt_p);
1119   else if (TREE_CODE (stmt) == OMP_FOR)
1120     genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1121   else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1122     {
1123       if (SIZEOF_EXPR_TYPE_P (stmt))
1124 	*stmt_p
1125 	  = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1126 					SIZEOF_EXPR, false);
1127       else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1128 	*stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1129 					      SIZEOF_EXPR, false);
1130       else
1131 	*stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1132 					      SIZEOF_EXPR, false);
1133       if (*stmt_p == error_mark_node)
1134 	*stmt_p = size_one_node;
1135       return NULL;
1136     }
1137 
1138   pointer_set_insert (p_set, *stmt_p);
1139 
1140   return NULL;
1141 }
1142 
1143 /* Lower C++ front end trees to GENERIC in T_P.  */
1144 
1145 static void
1146 cp_genericize_tree (tree* t_p)
1147 {
1148   struct cp_genericize_data wtd;
1149 
1150   wtd.p_set = pointer_set_create ();
1151   wtd.bind_expr_stack.create (0);
1152   wtd.omp_ctx = NULL;
1153   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1154   pointer_set_destroy (wtd.p_set);
1155   wtd.bind_expr_stack.release ();
1156 }
1157 
1158 void
1159 cp_genericize (tree fndecl)
1160 {
1161   tree t;
1162 
1163   /* Fix up the types of parms passed by invisible reference.  */
1164   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1165     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1166       {
1167 	/* If a function's arguments are copied to create a thunk,
1168 	   then DECL_BY_REFERENCE will be set -- but the type of the
1169 	   argument will be a pointer type, so we will never get
1170 	   here.  */
1171 	gcc_assert (!DECL_BY_REFERENCE (t));
1172 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1173 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1174 	DECL_BY_REFERENCE (t) = 1;
1175 	TREE_ADDRESSABLE (t) = 0;
1176 	relayout_decl (t);
1177       }
1178 
1179   /* Do the same for the return value.  */
1180   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1181     {
1182       t = DECL_RESULT (fndecl);
1183       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1184       DECL_BY_REFERENCE (t) = 1;
1185       TREE_ADDRESSABLE (t) = 0;
1186       relayout_decl (t);
1187       if (DECL_NAME (t))
1188 	{
1189 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1190 	  tree outer = outer_curly_brace_block (current_function_decl);
1191 	  tree var;
1192 
1193 	  if (outer)
1194 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1195 	      if (DECL_NAME (t) == DECL_NAME (var)
1196 		  && DECL_HAS_VALUE_EXPR_P (var)
1197 		  && DECL_VALUE_EXPR (var) == t)
1198 		{
1199 		  tree val = convert_from_reference (t);
1200 		  SET_DECL_VALUE_EXPR (var, val);
1201 		  break;
1202 		}
1203 	}
1204     }
1205 
1206   /* If we're a clone, the body is already GIMPLE.  */
1207   if (DECL_CLONED_FUNCTION_P (fndecl))
1208     return;
1209 
1210   /* We do want to see every occurrence of the parms, so we can't just use
1211      walk_tree's hash functionality.  */
1212   cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1213 
1214   /* Do everything else.  */
1215   c_genericize (fndecl);
1216 
1217   gcc_assert (bc_label[bc_break] == NULL);
1218   gcc_assert (bc_label[bc_continue] == NULL);
1219 }
1220 
1221 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1222    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1223    actually only takes one argument.  */
1224 
1225 static tree
1226 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1227 {
1228   tree defparm, parm, t;
1229   int i = 0;
1230   int nargs;
1231   tree *argarray;
1232 
1233   if (fn == NULL)
1234     return NULL;
1235 
1236   nargs = list_length (DECL_ARGUMENTS (fn));
1237   argarray = XALLOCAVEC (tree, nargs);
1238 
1239   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1240   if (arg2)
1241     defparm = TREE_CHAIN (defparm);
1242 
1243   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1244     {
1245       tree inner_type = TREE_TYPE (arg1);
1246       tree start1, end1, p1;
1247       tree start2 = NULL, p2 = NULL;
1248       tree ret = NULL, lab;
1249 
1250       start1 = arg1;
1251       start2 = arg2;
1252       do
1253 	{
1254 	  inner_type = TREE_TYPE (inner_type);
1255 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1256 			   size_zero_node, NULL, NULL);
1257 	  if (arg2)
1258 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1259 			     size_zero_node, NULL, NULL);
1260 	}
1261       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1262       start1 = build_fold_addr_expr_loc (input_location, start1);
1263       if (arg2)
1264 	start2 = build_fold_addr_expr_loc (input_location, start2);
1265 
1266       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1267       end1 = fold_build_pointer_plus (start1, end1);
1268 
1269       p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1270       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1271       append_to_statement_list (t, &ret);
1272 
1273       if (arg2)
1274 	{
1275 	  p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1276 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1277 	  append_to_statement_list (t, &ret);
1278 	}
1279 
1280       lab = create_artificial_label (input_location);
1281       t = build1 (LABEL_EXPR, void_type_node, lab);
1282       append_to_statement_list (t, &ret);
1283 
1284       argarray[i++] = p1;
1285       if (arg2)
1286 	argarray[i++] = p2;
1287       /* Handle default arguments.  */
1288       for (parm = defparm; parm && parm != void_list_node;
1289 	   parm = TREE_CHAIN (parm), i++)
1290 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1291 					   TREE_PURPOSE (parm), fn, i,
1292 					   tf_warning_or_error);
1293       t = build_call_a (fn, i, argarray);
1294       t = fold_convert (void_type_node, t);
1295       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1296       append_to_statement_list (t, &ret);
1297 
1298       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1299       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1300       append_to_statement_list (t, &ret);
1301 
1302       if (arg2)
1303 	{
1304 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1305 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1306 	  append_to_statement_list (t, &ret);
1307 	}
1308 
1309       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1310       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1311       append_to_statement_list (t, &ret);
1312 
1313       return ret;
1314     }
1315   else
1316     {
1317       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1318       if (arg2)
1319 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1320       /* Handle default arguments.  */
1321       for (parm = defparm; parm && parm != void_list_node;
1322 	   parm = TREE_CHAIN (parm), i++)
1323 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1324 					   TREE_PURPOSE (parm),
1325 					   fn, i, tf_warning_or_error);
1326       t = build_call_a (fn, i, argarray);
1327       t = fold_convert (void_type_node, t);
1328       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1329     }
1330 }
1331 
1332 /* Return code to initialize DECL with its default constructor, or
1333    NULL if there's nothing to do.  */
1334 
1335 tree
1336 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1337 {
1338   tree info = CP_OMP_CLAUSE_INFO (clause);
1339   tree ret = NULL;
1340 
1341   if (info)
1342     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1343 
1344   return ret;
1345 }
1346 
1347 /* Return code to initialize DST with a copy constructor from SRC.  */
1348 
1349 tree
1350 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1351 {
1352   tree info = CP_OMP_CLAUSE_INFO (clause);
1353   tree ret = NULL;
1354 
1355   if (info)
1356     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1357   if (ret == NULL)
1358     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1359 
1360   return ret;
1361 }
1362 
1363 /* Similarly, except use an assignment operator instead.  */
1364 
1365 tree
1366 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1367 {
1368   tree info = CP_OMP_CLAUSE_INFO (clause);
1369   tree ret = NULL;
1370 
1371   if (info)
1372     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1373   if (ret == NULL)
1374     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1375 
1376   return ret;
1377 }
1378 
1379 /* Return code to destroy DECL.  */
1380 
1381 tree
1382 cxx_omp_clause_dtor (tree clause, tree decl)
1383 {
1384   tree info = CP_OMP_CLAUSE_INFO (clause);
1385   tree ret = NULL;
1386 
1387   if (info)
1388     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1389 
1390   return ret;
1391 }
1392 
1393 /* True if OpenMP should privatize what this DECL points to rather
1394    than the DECL itself.  */
1395 
1396 bool
1397 cxx_omp_privatize_by_reference (const_tree decl)
1398 {
1399   return is_invisiref_parm (decl);
1400 }
1401 
1402 /* Return true if DECL is const qualified var having no mutable member.  */
1403 bool
1404 cxx_omp_const_qual_no_mutable (tree decl)
1405 {
1406   tree type = TREE_TYPE (decl);
1407   if (TREE_CODE (type) == REFERENCE_TYPE)
1408     {
1409       if (!is_invisiref_parm (decl))
1410 	return false;
1411       type = TREE_TYPE (type);
1412 
1413       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1414 	{
1415 	  /* NVR doesn't preserve const qualification of the
1416 	     variable's type.  */
1417 	  tree outer = outer_curly_brace_block (current_function_decl);
1418 	  tree var;
1419 
1420 	  if (outer)
1421 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1422 	      if (DECL_NAME (decl) == DECL_NAME (var)
1423 		  && (TYPE_MAIN_VARIANT (type)
1424 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1425 		{
1426 		  if (TYPE_READONLY (TREE_TYPE (var)))
1427 		    type = TREE_TYPE (var);
1428 		  break;
1429 		}
1430 	}
1431     }
1432 
1433   if (type == error_mark_node)
1434     return false;
1435 
1436   /* Variables with const-qualified type having no mutable member
1437      are predetermined shared.  */
1438   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1439     return true;
1440 
1441   return false;
1442 }
1443 
1444 /* True if OpenMP sharing attribute of DECL is predetermined.  */
1445 
1446 enum omp_clause_default_kind
1447 cxx_omp_predetermined_sharing (tree decl)
1448 {
1449   /* Static data members are predetermined shared.  */
1450   if (TREE_STATIC (decl))
1451     {
1452       tree ctx = CP_DECL_CONTEXT (decl);
1453       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1454 	return OMP_CLAUSE_DEFAULT_SHARED;
1455     }
1456 
1457   /* Const qualified vars having no mutable member are predetermined
1458      shared.  */
1459   if (cxx_omp_const_qual_no_mutable (decl))
1460     return OMP_CLAUSE_DEFAULT_SHARED;
1461 
1462   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1463 }
1464 
1465 /* Finalize an implicitly determined clause.  */
1466 
1467 void
1468 cxx_omp_finish_clause (tree c)
1469 {
1470   tree decl, inner_type;
1471   bool make_shared = false;
1472 
1473   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1474     return;
1475 
1476   decl = OMP_CLAUSE_DECL (c);
1477   decl = require_complete_type (decl);
1478   inner_type = TREE_TYPE (decl);
1479   if (decl == error_mark_node)
1480     make_shared = true;
1481   else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1482     {
1483       if (is_invisiref_parm (decl))
1484 	inner_type = TREE_TYPE (inner_type);
1485       else
1486 	{
1487 	  error ("%qE implicitly determined as %<firstprivate%> has reference type",
1488 		 decl);
1489 	  make_shared = true;
1490 	}
1491     }
1492 
1493   /* We're interested in the base element, not arrays.  */
1494   while (TREE_CODE (inner_type) == ARRAY_TYPE)
1495     inner_type = TREE_TYPE (inner_type);
1496 
1497   /* Check for special function availability by building a call to one.
1498      Save the results, because later we won't be in the right context
1499      for making these queries.  */
1500   if (!make_shared
1501       && CLASS_TYPE_P (inner_type)
1502       && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1503     make_shared = true;
1504 
1505   if (make_shared)
1506     OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1507 }
1508