xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cp/cp-gimplify.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
2 
3    Copyright (C) 2002-2020 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 
45 struct cp_fold_data
46 {
47   hash_set<tree> pset;
48   bool genericize; // called from cp_fold_function?
49 
cp_fold_datacp_fold_data50   cp_fold_data (bool g): genericize (g) {}
51 };
52 
53 /* Forward declarations.  */
54 
55 static tree cp_genericize_r (tree *, int *, void *);
56 static tree cp_fold_r (tree *, int *, void *);
57 static void cp_genericize_tree (tree*, bool);
58 static tree cp_fold (tree);
59 
60 /* Local declarations.  */
61 
62 enum bc_t { bc_break = 0, bc_continue = 1 };
63 
64 /* Stack of labels which are targets for "break" or "continue",
65    linked through TREE_CHAIN.  */
66 static tree bc_label[2];
67 
68 /* Begin a scope which can be exited by a break or continue statement.  BC
69    indicates which.
70 
71    Just creates a label with location LOCATION and pushes it into the current
72    context.  */
73 
74 static tree
begin_bc_block(enum bc_t bc,location_t location)75 begin_bc_block (enum bc_t bc, location_t location)
76 {
77   tree label = create_artificial_label (location);
78   DECL_CHAIN (label) = bc_label[bc];
79   bc_label[bc] = label;
80   if (bc == bc_break)
81     LABEL_DECL_BREAK (label) = true;
82   else
83     LABEL_DECL_CONTINUE (label) = true;
84   return label;
85 }
86 
87 /* Finish a scope which can be exited by a break or continue statement.
88    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
89    an expression for the contents of the scope.
90 
91    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
92    BLOCK.  Otherwise, just forget the label.  */
93 
94 static void
finish_bc_block(tree * block,enum bc_t bc,tree label)95 finish_bc_block (tree *block, enum bc_t bc, tree label)
96 {
97   gcc_assert (label == bc_label[bc]);
98 
99   if (TREE_USED (label))
100     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
101 			      block);
102 
103   bc_label[bc] = DECL_CHAIN (label);
104   DECL_CHAIN (label) = NULL_TREE;
105 }
106 
107 /* Get the LABEL_EXPR to represent a break or continue statement
108    in the current block scope.  BC indicates which.  */
109 
110 static tree
get_bc_label(enum bc_t bc)111 get_bc_label (enum bc_t bc)
112 {
113   tree label = bc_label[bc];
114 
115   /* Mark the label used for finish_bc_block.  */
116   TREE_USED (label) = 1;
117   return label;
118 }
119 
120 /* Genericize a TRY_BLOCK.  */
121 
122 static void
genericize_try_block(tree * stmt_p)123 genericize_try_block (tree *stmt_p)
124 {
125   tree body = TRY_STMTS (*stmt_p);
126   tree cleanup = TRY_HANDLERS (*stmt_p);
127 
128   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
129 }
130 
131 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
132 
133 static void
genericize_catch_block(tree * stmt_p)134 genericize_catch_block (tree *stmt_p)
135 {
136   tree type = HANDLER_TYPE (*stmt_p);
137   tree body = HANDLER_BODY (*stmt_p);
138 
139   /* FIXME should the caught type go in TREE_TYPE?  */
140   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
141 }
142 
143 /* A terser interface for building a representation of an exception
144    specification.  */
145 
146 static tree
build_gimple_eh_filter_tree(tree body,tree allowed,tree failure)147 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
148 {
149   tree t;
150 
151   /* FIXME should the allowed types go in TREE_TYPE?  */
152   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
153   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
154 
155   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
156   append_to_statement_list (body, &TREE_OPERAND (t, 0));
157 
158   return t;
159 }
160 
161 /* Genericize an EH_SPEC_BLOCK by converting it to a
162    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
163 
164 static void
genericize_eh_spec_block(tree * stmt_p)165 genericize_eh_spec_block (tree *stmt_p)
166 {
167   tree body = EH_SPEC_STMTS (*stmt_p);
168   tree allowed = EH_SPEC_RAISES (*stmt_p);
169   tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
170 
171   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
172   TREE_NO_WARNING (*stmt_p) = true;
173   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
174 }
175 
176 /* Return the first non-compound statement in STMT.  */
177 
178 tree
first_stmt(tree stmt)179 first_stmt (tree stmt)
180 {
181   switch (TREE_CODE (stmt))
182     {
183     case STATEMENT_LIST:
184       if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
185 	return first_stmt (p->stmt);
186       return void_node;
187 
188     case BIND_EXPR:
189       return first_stmt (BIND_EXPR_BODY (stmt));
190 
191     default:
192       return stmt;
193     }
194 }
195 
196 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
197 
198 static void
genericize_if_stmt(tree * stmt_p)199 genericize_if_stmt (tree *stmt_p)
200 {
201   tree stmt, cond, then_, else_;
202   location_t locus = EXPR_LOCATION (*stmt_p);
203 
204   stmt = *stmt_p;
205   cond = IF_COND (stmt);
206   then_ = THEN_CLAUSE (stmt);
207   else_ = ELSE_CLAUSE (stmt);
208 
209   if (then_ && else_)
210     {
211       tree ft = first_stmt (then_);
212       tree fe = first_stmt (else_);
213       br_predictor pr;
214       if (TREE_CODE (ft) == PREDICT_EXPR
215 	  && TREE_CODE (fe) == PREDICT_EXPR
216 	  && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
217 	  && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
218 	{
219 	  gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
220 	  richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
221 	  warning_at (&richloc, OPT_Wattributes,
222 		      "both branches of %<if%> statement marked as %qs",
223 		      pr == PRED_HOT_LABEL ? "likely" : "unlikely");
224 	}
225     }
226 
227   if (!then_)
228     then_ = build_empty_stmt (locus);
229   if (!else_)
230     else_ = build_empty_stmt (locus);
231 
232   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
233     stmt = then_;
234   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
235     stmt = else_;
236   else
237     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
238   protected_set_expr_location_if_unset (stmt, locus);
239   *stmt_p = stmt;
240 }
241 
242 /* Build a generic representation of one of the C loop forms.  COND is the
243    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
244    controlled by the loop.  INCR is the increment expression of a for-loop,
245    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
246    evaluated before the loop body as in while and for loops, or after the
247    loop body as in do-while loops.  */
248 
249 static void
genericize_cp_loop(tree * stmt_p,location_t start_locus,tree cond,tree body,tree incr,bool cond_is_first,int * walk_subtrees,void * data)250 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
251 		    tree incr, bool cond_is_first, int *walk_subtrees,
252 		    void *data)
253 {
254   tree blab, clab;
255   tree exit = NULL;
256   tree stmt_list = NULL;
257   tree debug_begin = NULL;
258 
259   protected_set_expr_location_if_unset (incr, start_locus);
260 
261   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
262   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
263 
264   blab = begin_bc_block (bc_break, start_locus);
265   clab = begin_bc_block (bc_continue, start_locus);
266 
267   cp_walk_tree (&body, cp_genericize_r, data, NULL);
268   *walk_subtrees = 0;
269 
270   if (MAY_HAVE_DEBUG_MARKER_STMTS
271       && (!cond || !integer_zerop (cond)))
272     {
273       debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
274       SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
275     }
276 
277   if (cond && TREE_CODE (cond) != INTEGER_CST)
278     {
279       /* If COND is constant, don't bother building an exit.  If it's false,
280 	 we won't build a loop.  If it's true, any exits are in the body.  */
281       location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
282       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
283 			 get_bc_label (bc_break));
284       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
285 			      build_empty_stmt (cloc), exit);
286     }
287 
288   if (exit && cond_is_first)
289     {
290       append_to_statement_list (debug_begin, &stmt_list);
291       debug_begin = NULL_TREE;
292       append_to_statement_list (exit, &stmt_list);
293     }
294   append_to_statement_list (body, &stmt_list);
295   finish_bc_block (&stmt_list, bc_continue, clab);
296   if (incr)
297     {
298       if (MAY_HAVE_DEBUG_MARKER_STMTS)
299 	{
300 	  tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
301 	  SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
302 	  append_to_statement_list (d, &stmt_list);
303 	}
304       append_to_statement_list (incr, &stmt_list);
305     }
306   append_to_statement_list (debug_begin, &stmt_list);
307   if (exit && !cond_is_first)
308     append_to_statement_list (exit, &stmt_list);
309 
310   if (!stmt_list)
311     stmt_list = build_empty_stmt (start_locus);
312 
313   tree loop;
314   if (cond && integer_zerop (cond))
315     {
316       if (cond_is_first)
317 	loop = fold_build3_loc (start_locus, COND_EXPR,
318 				void_type_node, cond, stmt_list,
319 				build_empty_stmt (start_locus));
320       else
321 	loop = stmt_list;
322     }
323   else
324     {
325       location_t loc = start_locus;
326       if (!cond || integer_nonzerop (cond))
327 	loc = EXPR_LOCATION (expr_first (body));
328       if (loc == UNKNOWN_LOCATION)
329 	loc = start_locus;
330       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
331     }
332 
333   stmt_list = NULL;
334   append_to_statement_list (loop, &stmt_list);
335   finish_bc_block (&stmt_list, bc_break, blab);
336   if (!stmt_list)
337     stmt_list = build_empty_stmt (start_locus);
338 
339   *stmt_p = stmt_list;
340 }
341 
342 /* Genericize a FOR_STMT node *STMT_P.  */
343 
344 static void
genericize_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)345 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
346 {
347   tree stmt = *stmt_p;
348   tree expr = NULL;
349   tree loop;
350   tree init = FOR_INIT_STMT (stmt);
351 
352   if (init)
353     {
354       cp_walk_tree (&init, cp_genericize_r, data, NULL);
355       append_to_statement_list (init, &expr);
356     }
357 
358   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
359 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
360   append_to_statement_list (loop, &expr);
361   if (expr == NULL_TREE)
362     expr = loop;
363   *stmt_p = expr;
364 }
365 
366 /* Genericize a WHILE_STMT node *STMT_P.  */
367 
368 static void
genericize_while_stmt(tree * stmt_p,int * walk_subtrees,void * data)369 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
370 {
371   tree stmt = *stmt_p;
372   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
373 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
374 }
375 
376 /* Genericize a DO_STMT node *STMT_P.  */
377 
378 static void
genericize_do_stmt(tree * stmt_p,int * walk_subtrees,void * data)379 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
380 {
381   tree stmt = *stmt_p;
382   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
383 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
384 }
385 
386 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
387 
388 static void
genericize_switch_stmt(tree * stmt_p,int * walk_subtrees,void * data)389 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
390 {
391   tree stmt = *stmt_p;
392   tree break_block, body, cond, type;
393   location_t stmt_locus = EXPR_LOCATION (stmt);
394 
395   body = SWITCH_STMT_BODY (stmt);
396   if (!body)
397     body = build_empty_stmt (stmt_locus);
398   cond = SWITCH_STMT_COND (stmt);
399   type = SWITCH_STMT_TYPE (stmt);
400 
401   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
402 
403   break_block = begin_bc_block (bc_break, stmt_locus);
404 
405   cp_walk_tree (&body, cp_genericize_r, data, NULL);
406   cp_walk_tree (&type, cp_genericize_r, data, NULL);
407   *walk_subtrees = 0;
408 
409   if (TREE_USED (break_block))
410     SWITCH_BREAK_LABEL_P (break_block) = 1;
411   finish_bc_block (&body, bc_break, break_block);
412   *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
413   SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
414   gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
415 		       || !TREE_USED (break_block));
416 }
417 
418 /* Genericize a CONTINUE_STMT node *STMT_P.  */
419 
420 static void
genericize_continue_stmt(tree * stmt_p)421 genericize_continue_stmt (tree *stmt_p)
422 {
423   tree stmt_list = NULL;
424   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
425   tree label = get_bc_label (bc_continue);
426   location_t location = EXPR_LOCATION (*stmt_p);
427   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
428   append_to_statement_list_force (pred, &stmt_list);
429   append_to_statement_list (jump, &stmt_list);
430   *stmt_p = stmt_list;
431 }
432 
433 /* Genericize a BREAK_STMT node *STMT_P.  */
434 
435 static void
genericize_break_stmt(tree * stmt_p)436 genericize_break_stmt (tree *stmt_p)
437 {
438   tree label = get_bc_label (bc_break);
439   location_t location = EXPR_LOCATION (*stmt_p);
440   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
441 }
442 
443 /* Genericize a OMP_FOR node *STMT_P.  */
444 
445 static void
genericize_omp_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)446 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
447 {
448   tree stmt = *stmt_p;
449   location_t locus = EXPR_LOCATION (stmt);
450   tree clab = begin_bc_block (bc_continue, locus);
451 
452   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
453   if (TREE_CODE (stmt) != OMP_TASKLOOP)
454     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
455   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
456   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
457   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
458   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
459   *walk_subtrees = 0;
460 
461   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
462 }
463 
464 /* Hook into the middle of gimplifying an OMP_FOR node.  */
465 
466 static enum gimplify_status
cp_gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)467 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
468 {
469   tree for_stmt = *expr_p;
470   gimple_seq seq = NULL;
471 
472   /* Protect ourselves from recursion.  */
473   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
474     return GS_UNHANDLED;
475   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
476 
477   gimplify_and_add (for_stmt, &seq);
478   gimple_seq_add_seq (pre_p, seq);
479 
480   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
481 
482   return GS_ALL_DONE;
483 }
484 
485 /*  Gimplify an EXPR_STMT node.  */
486 
487 static void
gimplify_expr_stmt(tree * stmt_p)488 gimplify_expr_stmt (tree *stmt_p)
489 {
490   tree stmt = EXPR_STMT_EXPR (*stmt_p);
491 
492   if (stmt == error_mark_node)
493     stmt = NULL;
494 
495   /* Gimplification of a statement expression will nullify the
496      statement if all its side effects are moved to *PRE_P and *POST_P.
497 
498      In this case we will not want to emit the gimplified statement.
499      However, we may still want to emit a warning, so we do that before
500      gimplification.  */
501   if (stmt && warn_unused_value)
502     {
503       if (!TREE_SIDE_EFFECTS (stmt))
504 	{
505 	  if (!IS_EMPTY_STMT (stmt)
506 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
507 	      && !TREE_NO_WARNING (stmt))
508 	    warning (OPT_Wunused_value, "statement with no effect");
509 	}
510       else
511 	warn_if_unused_value (stmt, input_location);
512     }
513 
514   if (stmt == NULL_TREE)
515     stmt = alloc_stmt_list ();
516 
517   *stmt_p = stmt;
518 }
519 
520 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
521 
522 static void
cp_gimplify_init_expr(tree * expr_p)523 cp_gimplify_init_expr (tree *expr_p)
524 {
525   tree from = TREE_OPERAND (*expr_p, 1);
526   tree to = TREE_OPERAND (*expr_p, 0);
527   tree t;
528 
529   /* What about code that pulls out the temp and uses it elsewhere?  I
530      think that such code never uses the TARGET_EXPR as an initializer.  If
531      I'm wrong, we'll abort because the temp won't have any RTL.  In that
532      case, I guess we'll need to replace references somehow.  */
533   if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from))
534     from = TARGET_EXPR_INITIAL (from);
535 
536   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
537      inside the TARGET_EXPR.  */
538   for (t = from; t; )
539     {
540       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
541 
542       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
543 	 replace the slot operand with our target.
544 
545 	 Should we add a target parm to gimplify_expr instead?  No, as in this
546 	 case we want to replace the INIT_EXPR.  */
547       if (TREE_CODE (sub) == AGGR_INIT_EXPR
548 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
549 	{
550 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
551 	    AGGR_INIT_EXPR_SLOT (sub) = to;
552 	  else
553 	    VEC_INIT_EXPR_SLOT (sub) = to;
554 	  *expr_p = from;
555 
556 	  /* The initialization is now a side-effect, so the container can
557 	     become void.  */
558 	  if (from != sub)
559 	    TREE_TYPE (from) = void_type_node;
560 	}
561 
562       /* Handle aggregate NSDMI.  */
563       replace_placeholders (sub, to);
564 
565       if (t == sub)
566 	break;
567       else
568 	t = TREE_OPERAND (t, 1);
569     }
570 
571 }
572 
573 /* Gimplify a MUST_NOT_THROW_EXPR.  */
574 
575 static enum gimplify_status
gimplify_must_not_throw_expr(tree * expr_p,gimple_seq * pre_p)576 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
577 {
578   tree stmt = *expr_p;
579   tree temp = voidify_wrapper_expr (stmt, NULL);
580   tree body = TREE_OPERAND (stmt, 0);
581   gimple_seq try_ = NULL;
582   gimple_seq catch_ = NULL;
583   gimple *mnt;
584 
585   gimplify_and_add (body, &try_);
586   mnt = gimple_build_eh_must_not_throw (terminate_fn);
587   gimple_seq_add_stmt_without_update (&catch_, mnt);
588   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
589 
590   gimple_seq_add_stmt_without_update (pre_p, mnt);
591   if (temp)
592     {
593       *expr_p = temp;
594       return GS_OK;
595     }
596 
597   *expr_p = NULL;
598   return GS_ALL_DONE;
599 }
600 
601 /* Return TRUE if an operand (OP) of a given TYPE being copied is
602    really just an empty class copy.
603 
604    Check that the operand has a simple form so that TARGET_EXPRs and
605    non-empty CONSTRUCTORs get reduced properly, and we leave the
606    return slot optimization alone because it isn't a copy.  */
607 
608 bool
simple_empty_class_p(tree type,tree op,tree_code code)609 simple_empty_class_p (tree type, tree op, tree_code code)
610 {
611   if (TREE_CODE (op) == COMPOUND_EXPR)
612     return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
613   if (SIMPLE_TARGET_EXPR_P (op)
614       && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
615     /* The TARGET_EXPR is itself a simple copy, look through it.  */
616     return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
617 
618   if (TREE_CODE (op) == PARM_DECL
619       && TREE_ADDRESSABLE (TREE_TYPE (op)))
620     {
621       tree fn = DECL_CONTEXT (op);
622       if (DECL_THUNK_P (fn)
623 	  || lambda_static_thunk_p (fn))
624 	/* In a thunk, we pass through invisible reference parms, so this isn't
625 	   actually a copy.  */
626 	return false;
627     }
628 
629   return
630     (TREE_CODE (op) == EMPTY_CLASS_EXPR
631      || code == MODIFY_EXPR
632      || is_gimple_lvalue (op)
633      || INDIRECT_REF_P (op)
634      || (TREE_CODE (op) == CONSTRUCTOR
635 	 && CONSTRUCTOR_NELTS (op) == 0)
636      || (TREE_CODE (op) == CALL_EXPR
637 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
638     && !TREE_CLOBBER_P (op)
639     && is_really_empty_class (type, /*ignore_vptr*/true);
640 }
641 
642 /* Returns true if evaluating E as an lvalue has side-effects;
643    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
644    have side-effects until there is a read or write through it.  */
645 
646 static bool
lvalue_has_side_effects(tree e)647 lvalue_has_side_effects (tree e)
648 {
649   if (!TREE_SIDE_EFFECTS (e))
650     return false;
651   while (handled_component_p (e))
652     {
653       if (TREE_CODE (e) == ARRAY_REF
654 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
655 	return true;
656       e = TREE_OPERAND (e, 0);
657     }
658   if (DECL_P (e))
659     /* Just naming a variable has no side-effects.  */
660     return false;
661   else if (INDIRECT_REF_P (e))
662     /* Similarly, indirection has no side-effects.  */
663     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
664   else
665     /* For anything else, trust TREE_SIDE_EFFECTS.  */
666     return TREE_SIDE_EFFECTS (e);
667 }
668 
669 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
670    by expressions with side-effects in other operands.  */
671 
672 static enum gimplify_status
gimplify_to_rvalue(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree))673 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
674 		    bool (*gimple_test_f) (tree))
675 {
676   enum gimplify_status t
677     = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
678   if (t == GS_ERROR)
679     return GS_ERROR;
680   else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
681     *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
682   return t;
683 }
684 
685 /* Like gimplify_arg, but if ORDERED is set (which should be set if
686    any of the arguments this argument is sequenced before has
687    TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
688    are gimplified into SSA_NAME or a fresh temporary and for
689    non-is_gimple_reg_type we don't optimize away TARGET_EXPRs.  */
690 
691 static enum gimplify_status
cp_gimplify_arg(tree * arg_p,gimple_seq * pre_p,location_t call_location,bool ordered)692 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
693 		 bool ordered)
694 {
695   enum gimplify_status t;
696   if (ordered
697       && !is_gimple_reg_type (TREE_TYPE (*arg_p))
698       && TREE_CODE (*arg_p) == TARGET_EXPR)
699     {
700       /* gimplify_arg would strip away the TARGET_EXPR, but
701 	 that can mean we don't copy the argument and some following
702 	 argument with side-effect could modify it.  */
703       protected_set_expr_location (*arg_p, call_location);
704       return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
705     }
706   else
707     {
708       t = gimplify_arg (arg_p, pre_p, call_location);
709       if (t == GS_ERROR)
710 	return GS_ERROR;
711       else if (ordered
712 	       && is_gimple_reg_type (TREE_TYPE (*arg_p))
713 	       && is_gimple_variable (*arg_p)
714 	       && TREE_CODE (*arg_p) != SSA_NAME
715 	       /* No need to force references into register, references
716 		  can't be modified.  */
717 	       && !TYPE_REF_P (TREE_TYPE (*arg_p))
718 	       /* And this can't be modified either.  */
719 	       && *arg_p != current_class_ptr)
720 	*arg_p = get_initialized_tmp_var (*arg_p, pre_p);
721       return t;
722     }
723 
724 }
725 
726 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
727 
728 int
cp_gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)729 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
730 {
731   int saved_stmts_are_full_exprs_p = 0;
732   location_t loc = cp_expr_loc_or_input_loc (*expr_p);
733   enum tree_code code = TREE_CODE (*expr_p);
734   enum gimplify_status ret;
735 
736   if (STATEMENT_CODE_P (code))
737     {
738       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
739       current_stmt_tree ()->stmts_are_full_exprs_p
740 	= STMT_IS_FULL_EXPR_P (*expr_p);
741     }
742 
743   switch (code)
744     {
745     case AGGR_INIT_EXPR:
746       simplify_aggr_init_expr (expr_p);
747       ret = GS_OK;
748       break;
749 
750     case VEC_INIT_EXPR:
751       {
752 	location_t loc = input_location;
753 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
754 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
755 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
756 	input_location = EXPR_LOCATION (*expr_p);
757 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
758 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
759 				  from_array,
760 				  tf_warning_or_error);
761 	cp_fold_data data (/*genericize*/true);
762 	cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
763 	cp_genericize_tree (expr_p, false);
764 	copy_if_shared (expr_p);
765 	ret = GS_OK;
766 	input_location = loc;
767       }
768       break;
769 
770     case THROW_EXPR:
771       /* FIXME communicate throw type to back end, probably by moving
772 	 THROW_EXPR into ../tree.def.  */
773       *expr_p = TREE_OPERAND (*expr_p, 0);
774       ret = GS_OK;
775       break;
776 
777     case MUST_NOT_THROW_EXPR:
778       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
779       break;
780 
781       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
782 	 LHS of an assignment might also be involved in the RHS, as in bug
783 	 25979.  */
784     case INIT_EXPR:
785       cp_gimplify_init_expr (expr_p);
786       if (TREE_CODE (*expr_p) != INIT_EXPR)
787 	return GS_OK;
788       /* Fall through.  */
789     case MODIFY_EXPR:
790     modify_expr_case:
791       {
792 	/* If the back end isn't clever enough to know that the lhs and rhs
793 	   types are the same, add an explicit conversion.  */
794 	tree op0 = TREE_OPERAND (*expr_p, 0);
795 	tree op1 = TREE_OPERAND (*expr_p, 1);
796 
797 	if (!error_operand_p (op0)
798 	    && !error_operand_p (op1)
799 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
800 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
801 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
802 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
803 					      TREE_TYPE (op0), op1);
804 
805 	else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
806 	  {
807 	    while (TREE_CODE (op1) == TARGET_EXPR)
808 	      /* We're disconnecting the initializer from its target,
809 		 don't create a temporary.  */
810 	      op1 = TARGET_EXPR_INITIAL (op1);
811 
812 	    /* Remove any copies of empty classes.  Also drop volatile
813 	       variables on the RHS to avoid infinite recursion from
814 	       gimplify_expr trying to load the value.  */
815 	    if (TREE_SIDE_EFFECTS (op1))
816 	      {
817 		if (TREE_THIS_VOLATILE (op1)
818 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
819 		  op1 = build_fold_addr_expr (op1);
820 
821 		gimplify_and_add (op1, pre_p);
822 	      }
823 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
824 			   is_gimple_lvalue, fb_lvalue);
825 	    *expr_p = TREE_OPERAND (*expr_p, 0);
826 	    if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
827 	      /* Avoid 'return *<retval>;'  */
828 	      *expr_p = TREE_OPERAND (*expr_p, 0);
829 	  }
830 	/* P0145 says that the RHS is sequenced before the LHS.
831 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
832 	   isn't quite strong enough in two cases:
833 
834 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
835 	   mean it's evaluated after the LHS.
836 
837 	   2) the value calculation of the RHS is also sequenced before the
838 	   LHS, so for scalar assignment we need to preevaluate if the
839 	   RHS could be affected by LHS side-effects even if it has no
840 	   side-effects of its own.  We don't need this for classes because
841 	   class assignment takes its RHS by reference.  */
842        else if (flag_strong_eval_order > 1
843                 && TREE_CODE (*expr_p) == MODIFY_EXPR
844                 && lvalue_has_side_effects (op0)
845 		&& (TREE_CODE (op1) == CALL_EXPR
846 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
847 			&& !TREE_CONSTANT (op1))))
848 	 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
849       }
850       ret = GS_OK;
851       break;
852 
853     case EMPTY_CLASS_EXPR:
854       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
855       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
856       ret = GS_OK;
857       break;
858 
859     case BASELINK:
860       *expr_p = BASELINK_FUNCTIONS (*expr_p);
861       ret = GS_OK;
862       break;
863 
864     case TRY_BLOCK:
865       genericize_try_block (expr_p);
866       ret = GS_OK;
867       break;
868 
869     case HANDLER:
870       genericize_catch_block (expr_p);
871       ret = GS_OK;
872       break;
873 
874     case EH_SPEC_BLOCK:
875       genericize_eh_spec_block (expr_p);
876       ret = GS_OK;
877       break;
878 
879     case USING_STMT:
880       gcc_unreachable ();
881 
882     case FOR_STMT:
883     case WHILE_STMT:
884     case DO_STMT:
885     case SWITCH_STMT:
886     case CONTINUE_STMT:
887     case BREAK_STMT:
888       gcc_unreachable ();
889 
890     case OMP_FOR:
891     case OMP_SIMD:
892     case OMP_DISTRIBUTE:
893     case OMP_LOOP:
894     case OMP_TASKLOOP:
895       ret = cp_gimplify_omp_for (expr_p, pre_p);
896       break;
897 
898     case EXPR_STMT:
899       gimplify_expr_stmt (expr_p);
900       ret = GS_OK;
901       break;
902 
903     case UNARY_PLUS_EXPR:
904       {
905 	tree arg = TREE_OPERAND (*expr_p, 0);
906 	tree type = TREE_TYPE (*expr_p);
907 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
908 					    : arg;
909 	ret = GS_OK;
910       }
911       break;
912 
913     case CALL_EXPR:
914       ret = GS_OK;
915       if (flag_strong_eval_order == 2
916 	  && CALL_EXPR_FN (*expr_p)
917 	  && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
918 	{
919 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
920 	  enum gimplify_status t
921 	    = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
922 				  is_gimple_call_addr);
923 	  if (t == GS_ERROR)
924 	    ret = GS_ERROR;
925 	  /* GIMPLE considers most pointer conversion useless, but for
926 	     calls we actually care about the exact function pointer type.  */
927 	  else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
928 	    CALL_EXPR_FN (*expr_p)
929 	      = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
930 	}
931       if (!CALL_EXPR_FN (*expr_p))
932 	/* Internal function call.  */;
933       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
934 	{
935 	  /* This is a call to a (compound) assignment operator that used
936 	     the operator syntax; gimplify the RHS first.  */
937 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
938 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
939 	  enum gimplify_status t
940 	    = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
941 			       TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
942 	  if (t == GS_ERROR)
943 	    ret = GS_ERROR;
944 	}
945       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
946 	{
947 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
948 	     with __builtin_va_arg_pack().  */
949 	  int nargs = call_expr_nargs (*expr_p) - 1;
950 	  int last_side_effects_arg = -1;
951 	  for (int i = nargs; i > 0; --i)
952 	    if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
953 	      {
954 		last_side_effects_arg = i;
955 		break;
956 	      }
957 	  for (int i = 0; i < nargs; ++i)
958 	    {
959 	      enum gimplify_status t
960 		= cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
961 				   i < last_side_effects_arg);
962 	      if (t == GS_ERROR)
963 		ret = GS_ERROR;
964 	    }
965 	}
966       else if (flag_strong_eval_order
967 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
968 	{
969 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
970 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
971 	  if (INDIRECT_TYPE_P (fntype))
972 	    fntype = TREE_TYPE (fntype);
973 	  if (TREE_CODE (fntype) == METHOD_TYPE)
974 	    {
975 	      int nargs = call_expr_nargs (*expr_p);
976 	      bool side_effects = false;
977 	      for (int i = 1; i < nargs; ++i)
978 		if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
979 		  {
980 		    side_effects = true;
981 		    break;
982 		  }
983 	      enum gimplify_status t
984 		= cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
985 				   side_effects);
986 	      if (t == GS_ERROR)
987 		ret = GS_ERROR;
988 	    }
989 	}
990       if (ret != GS_ERROR)
991 	{
992 	  tree decl = cp_get_callee_fndecl_nofold (*expr_p);
993 	  if (decl
994 	      && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
995 				    BUILT_IN_FRONTEND))
996 	    *expr_p = boolean_false_node;
997 	  else if (decl
998 		   && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION,
999 					 BUILT_IN_FRONTEND))
1000 	    *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
1001 	}
1002       break;
1003 
1004     case TARGET_EXPR:
1005       /* A TARGET_EXPR that expresses direct-initialization should have been
1006 	 elided by cp_gimplify_init_expr.  */
1007       gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
1008       ret = GS_UNHANDLED;
1009       break;
1010 
1011     case PTRMEM_CST:
1012       *expr_p = cplus_expand_constant (*expr_p);
1013       if (TREE_CODE (*expr_p) == PTRMEM_CST)
1014 	ret = GS_ERROR;
1015       else
1016 	ret = GS_OK;
1017       break;
1018 
1019     case RETURN_EXPR:
1020       if (TREE_OPERAND (*expr_p, 0)
1021 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
1022 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
1023 	{
1024 	  expr_p = &TREE_OPERAND (*expr_p, 0);
1025 	  /* Avoid going through the INIT_EXPR case, which can
1026 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
1027 	  goto modify_expr_case;
1028 	}
1029       /* Fall through.  */
1030 
1031     default:
1032       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
1033       break;
1034     }
1035 
1036   /* Restore saved state.  */
1037   if (STATEMENT_CODE_P (code))
1038     current_stmt_tree ()->stmts_are_full_exprs_p
1039       = saved_stmts_are_full_exprs_p;
1040 
1041   return ret;
1042 }
1043 
1044 static inline bool
is_invisiref_parm(const_tree t)1045 is_invisiref_parm (const_tree t)
1046 {
1047   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
1048 	  && DECL_BY_REFERENCE (t));
1049 }
1050 
1051 /* Return true if the uid in both int tree maps are equal.  */
1052 
1053 bool
equal(cxx_int_tree_map * a,cxx_int_tree_map * b)1054 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
1055 {
1056   return (a->uid == b->uid);
1057 }
1058 
1059 /* Hash a UID in a cxx_int_tree_map.  */
1060 
1061 unsigned int
hash(cxx_int_tree_map * item)1062 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
1063 {
1064   return item->uid;
1065 }
1066 
1067 /* A stable comparison routine for use with splay trees and DECLs.  */
1068 
1069 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)1070 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
1071 {
1072   tree a = (tree) xa;
1073   tree b = (tree) xb;
1074 
1075   return DECL_UID (a) - DECL_UID (b);
1076 }
1077 
1078 /* OpenMP context during genericization.  */
1079 
1080 struct cp_genericize_omp_taskreg
1081 {
1082   bool is_parallel;
1083   bool default_shared;
1084   struct cp_genericize_omp_taskreg *outer;
1085   splay_tree variables;
1086 };
1087 
1088 /* Return true if genericization should try to determine if
1089    DECL is firstprivate or shared within task regions.  */
1090 
1091 static bool
omp_var_to_track(tree decl)1092 omp_var_to_track (tree decl)
1093 {
1094   tree type = TREE_TYPE (decl);
1095   if (is_invisiref_parm (decl))
1096     type = TREE_TYPE (type);
1097   else if (TYPE_REF_P (type))
1098     type = TREE_TYPE (type);
1099   while (TREE_CODE (type) == ARRAY_TYPE)
1100     type = TREE_TYPE (type);
1101   if (type == error_mark_node || !CLASS_TYPE_P (type))
1102     return false;
1103   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
1104     return false;
1105   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1106     return false;
1107   return true;
1108 }
1109 
1110 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
1111 
1112 static void
omp_cxx_notice_variable(struct cp_genericize_omp_taskreg * omp_ctx,tree decl)1113 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1114 {
1115   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1116 					 (splay_tree_key) decl);
1117   if (n == NULL)
1118     {
1119       int flags = OMP_CLAUSE_DEFAULT_SHARED;
1120       if (omp_ctx->outer)
1121 	omp_cxx_notice_variable (omp_ctx->outer, decl);
1122       if (!omp_ctx->default_shared)
1123 	{
1124 	  struct cp_genericize_omp_taskreg *octx;
1125 
1126 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
1127 	    {
1128 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1129 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1130 		{
1131 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1132 		  break;
1133 		}
1134 	      if (octx->is_parallel)
1135 		break;
1136 	    }
1137 	  if (octx == NULL
1138 	      && (TREE_CODE (decl) == PARM_DECL
1139 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1140 		      && DECL_CONTEXT (decl) == current_function_decl)))
1141 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1142 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1143 	    {
1144 	      /* DECL is implicitly determined firstprivate in
1145 		 the current task construct.  Ensure copy ctor and
1146 		 dtor are instantiated, because during gimplification
1147 		 it will be already too late.  */
1148 	      tree type = TREE_TYPE (decl);
1149 	      if (is_invisiref_parm (decl))
1150 		type = TREE_TYPE (type);
1151 	      else if (TYPE_REF_P (type))
1152 		type = TREE_TYPE (type);
1153 	      while (TREE_CODE (type) == ARRAY_TYPE)
1154 		type = TREE_TYPE (type);
1155 	      get_copy_ctor (type, tf_none);
1156 	      get_dtor (type, tf_none);
1157 	    }
1158 	}
1159       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1160     }
1161 }
1162 
1163 /* Genericization context.  */
1164 
1165 struct cp_genericize_data
1166 {
1167   hash_set<tree> *p_set;
1168   vec<tree> bind_expr_stack;
1169   struct cp_genericize_omp_taskreg *omp_ctx;
1170   tree try_block;
1171   bool no_sanitize_p;
1172   bool handle_invisiref_parm_p;
1173 };
1174 
1175 /* Perform any pre-gimplification folding of C++ front end trees to
1176    GENERIC.
1177    Note:  The folding of none-omp cases is something to move into
1178      the middle-end.  As for now we have most foldings only on GENERIC
1179      in fold-const, we need to perform this before transformation to
1180      GIMPLE-form.  */
1181 
1182 static tree
cp_fold_r(tree * stmt_p,int * walk_subtrees,void * data_)1183 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1184 {
1185   cp_fold_data *data = (cp_fold_data*)data_;
1186   tree stmt = *stmt_p;
1187   enum tree_code code = TREE_CODE (stmt);
1188 
1189   switch (code)
1190     {
1191     case VAR_DECL:
1192       /* In initializers replace anon union artificial VAR_DECLs
1193 	 with their DECL_VALUE_EXPRs, as nothing will do it later.
1194 	 Ditto for structured bindings.  */
1195       if (!data->genericize
1196 	  && DECL_HAS_VALUE_EXPR_P (stmt)
1197 	  && (DECL_ANON_UNION_VAR_P (stmt)
1198 	      || (DECL_DECOMPOSITION_P (stmt) && DECL_DECOMP_BASE (stmt))))
1199 	{
1200 	  *stmt_p = stmt = unshare_expr (DECL_VALUE_EXPR (stmt));
1201 	  break;
1202 	}
1203       break;
1204 
1205     default:
1206       break;
1207     }
1208 
1209   *stmt_p = stmt = cp_fold (*stmt_p);
1210 
1211   if (data->pset.add (stmt))
1212     {
1213       /* Don't walk subtrees of stmts we've already walked once, otherwise
1214 	 we can have exponential complexity with e.g. lots of nested
1215 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
1216 	 always the same tree, which the first time cp_fold_r has been
1217 	 called on it had the subtrees walked.  */
1218       *walk_subtrees = 0;
1219       return NULL;
1220     }
1221 
1222   code = TREE_CODE (stmt);
1223   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1224       || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP)
1225     {
1226       tree x;
1227       int i, n;
1228 
1229       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1230       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1231       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1232       x = OMP_FOR_COND (stmt);
1233       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1234 	{
1235 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1236 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1237 	}
1238       else if (x && TREE_CODE (x) == TREE_VEC)
1239 	{
1240 	  n = TREE_VEC_LENGTH (x);
1241 	  for (i = 0; i < n; i++)
1242 	    {
1243 	      tree o = TREE_VEC_ELT (x, i);
1244 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1245 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1246 	    }
1247 	}
1248       x = OMP_FOR_INCR (stmt);
1249       if (x && TREE_CODE (x) == TREE_VEC)
1250 	{
1251 	  n = TREE_VEC_LENGTH (x);
1252 	  for (i = 0; i < n; i++)
1253 	    {
1254 	      tree o = TREE_VEC_ELT (x, i);
1255 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1256 		o = TREE_OPERAND (o, 1);
1257 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1258 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1259 		{
1260 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1261 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1262 		}
1263 	    }
1264 	}
1265       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1266       *walk_subtrees = 0;
1267     }
1268 
1269   return NULL;
1270 }
1271 
1272 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1273    apparently that still causes optimization regressions.  */
1274 
1275 void
cp_fold_function(tree fndecl)1276 cp_fold_function (tree fndecl)
1277 {
1278   cp_fold_data data (/*genericize*/true);
1279   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1280 }
1281 
1282 /* Turn SPACESHIP_EXPR EXPR into GENERIC.  */
1283 
genericize_spaceship(tree expr)1284 static tree genericize_spaceship (tree expr)
1285 {
1286   iloc_sentinel s (cp_expr_location (expr));
1287   tree type = TREE_TYPE (expr);
1288   tree op0 = TREE_OPERAND (expr, 0);
1289   tree op1 = TREE_OPERAND (expr, 1);
1290   return genericize_spaceship (type, op0, op1);
1291 }
1292 
1293 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1294    to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1295    the middle-end (c++/88256).  */
1296 
1297 static tree
predeclare_vla(tree expr)1298 predeclare_vla (tree expr)
1299 {
1300   tree type = TREE_TYPE (expr);
1301   if (type == error_mark_node)
1302     return expr;
1303 
1304   /* We need to strip pointers for gimplify_type_sizes.  */
1305   tree vla = type;
1306   while (POINTER_TYPE_P (vla))
1307     {
1308       if (TYPE_NAME (vla))
1309 	return expr;
1310       vla = TREE_TYPE (vla);
1311     }
1312   if (TYPE_NAME (vla) || !variably_modified_type_p (vla, NULL_TREE))
1313     return expr;
1314 
1315   tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1316   DECL_ARTIFICIAL (decl) = 1;
1317   TYPE_NAME (vla) = decl;
1318   tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1319   expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1320   return expr;
1321 }
1322 
1323 /* Perform any pre-gimplification lowering of C++ front end trees to
1324    GENERIC.  */
1325 
1326 static tree
cp_genericize_r(tree * stmt_p,int * walk_subtrees,void * data)1327 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1328 {
1329   tree stmt = *stmt_p;
1330   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1331   hash_set<tree> *p_set = wtd->p_set;
1332 
1333   /* If in an OpenMP context, note var uses.  */
1334   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1335       && (VAR_P (stmt)
1336 	  || TREE_CODE (stmt) == PARM_DECL
1337 	  || TREE_CODE (stmt) == RESULT_DECL)
1338       && omp_var_to_track (stmt))
1339     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1340 
1341   /* Don't dereference parms in a thunk, pass the references through. */
1342   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1343       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1344     {
1345       *walk_subtrees = 0;
1346       return NULL;
1347     }
1348 
1349   /* Dereference invisible reference parms.  */
1350   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1351     {
1352       *stmt_p = convert_from_reference (stmt);
1353       p_set->add (*stmt_p);
1354       *walk_subtrees = 0;
1355       return NULL;
1356     }
1357 
1358   /* Map block scope extern declarations to visible declarations with the
1359      same name and type in outer scopes if any.  */
1360   if (cp_function_chain->extern_decl_map
1361       && VAR_OR_FUNCTION_DECL_P (stmt)
1362       && DECL_EXTERNAL (stmt))
1363     {
1364       struct cxx_int_tree_map *h, in;
1365       in.uid = DECL_UID (stmt);
1366       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1367       if (h)
1368 	{
1369 	  *stmt_p = h->to;
1370 	  TREE_USED (h->to) |= TREE_USED (stmt);
1371 	  *walk_subtrees = 0;
1372 	  return NULL;
1373 	}
1374     }
1375 
1376   if (TREE_CODE (stmt) == INTEGER_CST
1377       && TYPE_REF_P (TREE_TYPE (stmt))
1378       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1379       && !wtd->no_sanitize_p)
1380     {
1381       ubsan_maybe_instrument_reference (stmt_p);
1382       if (*stmt_p != stmt)
1383 	{
1384 	  *walk_subtrees = 0;
1385 	  return NULL_TREE;
1386 	}
1387     }
1388 
1389   /* Other than invisiref parms, don't walk the same tree twice.  */
1390   if (p_set->contains (stmt))
1391     {
1392       *walk_subtrees = 0;
1393       return NULL_TREE;
1394     }
1395 
1396   switch (TREE_CODE (stmt))
1397     {
1398     case ADDR_EXPR:
1399       if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1400 	{
1401 	  /* If in an OpenMP context, note var uses.  */
1402 	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1403 	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1404 	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1405 	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1406 	  *walk_subtrees = 0;
1407 	}
1408       break;
1409 
1410     case RETURN_EXPR:
1411       if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1412 	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1413 	*walk_subtrees = 0;
1414       break;
1415 
1416     case OMP_CLAUSE:
1417       switch (OMP_CLAUSE_CODE (stmt))
1418 	{
1419 	case OMP_CLAUSE_LASTPRIVATE:
1420 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1421 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1422 	    {
1423 	      *walk_subtrees = 0;
1424 	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1425 		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1426 			      cp_genericize_r, data, NULL);
1427 	    }
1428 	  break;
1429 	case OMP_CLAUSE_PRIVATE:
1430 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1431 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1432 	    *walk_subtrees = 0;
1433 	  else if (wtd->omp_ctx != NULL)
1434 	    {
1435 	      /* Private clause doesn't cause any references to the
1436 		 var in outer contexts, avoid calling
1437 		 omp_cxx_notice_variable for it.  */
1438 	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1439 	      wtd->omp_ctx = NULL;
1440 	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1441 			    data, NULL);
1442 	      wtd->omp_ctx = old;
1443 	      *walk_subtrees = 0;
1444 	    }
1445 	  break;
1446 	case OMP_CLAUSE_SHARED:
1447 	case OMP_CLAUSE_FIRSTPRIVATE:
1448 	case OMP_CLAUSE_COPYIN:
1449 	case OMP_CLAUSE_COPYPRIVATE:
1450 	case OMP_CLAUSE_INCLUSIVE:
1451 	case OMP_CLAUSE_EXCLUSIVE:
1452 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1453 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1454 	    *walk_subtrees = 0;
1455 	  break;
1456 	case OMP_CLAUSE_REDUCTION:
1457 	case OMP_CLAUSE_IN_REDUCTION:
1458 	case OMP_CLAUSE_TASK_REDUCTION:
1459 	  /* Don't dereference an invisiref in reduction clause's
1460 	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1461 	     still needs to be genericized.  */
1462 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1463 	    {
1464 	      *walk_subtrees = 0;
1465 	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1466 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1467 			      cp_genericize_r, data, NULL);
1468 	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1469 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1470 			      cp_genericize_r, data, NULL);
1471 	    }
1472 	  break;
1473 	default:
1474 	  break;
1475 	}
1476       break;
1477 
1478     /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1479        to lower this construct before scanning it, so we need to lower these
1480        before doing anything else.  */
1481     case CLEANUP_STMT:
1482       *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1483 			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1484 						   : TRY_FINALLY_EXPR,
1485 			    void_type_node,
1486 			    CLEANUP_BODY (stmt),
1487 			    CLEANUP_EXPR (stmt));
1488       break;
1489 
1490     case IF_STMT:
1491       genericize_if_stmt (stmt_p);
1492       /* *stmt_p has changed, tail recurse to handle it again.  */
1493       return cp_genericize_r (stmt_p, walk_subtrees, data);
1494 
1495     /* COND_EXPR might have incompatible types in branches if one or both
1496        arms are bitfields.  Fix it up now.  */
1497     case COND_EXPR:
1498       {
1499 	tree type_left
1500 	  = (TREE_OPERAND (stmt, 1)
1501 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1502 	     : NULL_TREE);
1503 	tree type_right
1504 	  = (TREE_OPERAND (stmt, 2)
1505 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1506 	     : NULL_TREE);
1507 	if (type_left
1508 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1509 					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1510 	  {
1511 	    TREE_OPERAND (stmt, 1)
1512 	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1513 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1514 						   type_left));
1515 	  }
1516 	if (type_right
1517 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1518 					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1519 	  {
1520 	    TREE_OPERAND (stmt, 2)
1521 	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1522 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1523 						   type_right));
1524 	  }
1525       }
1526       break;
1527 
1528     case BIND_EXPR:
1529       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1530 	{
1531 	  tree decl;
1532 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1533 	    if (VAR_P (decl)
1534 		&& !DECL_EXTERNAL (decl)
1535 		&& omp_var_to_track (decl))
1536 	      {
1537 		splay_tree_node n
1538 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1539 				       (splay_tree_key) decl);
1540 		if (n == NULL)
1541 		  splay_tree_insert (wtd->omp_ctx->variables,
1542 				     (splay_tree_key) decl,
1543 				     TREE_STATIC (decl)
1544 				     ? OMP_CLAUSE_DEFAULT_SHARED
1545 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1546 	      }
1547 	}
1548       if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1549 	{
1550 	  /* The point here is to not sanitize static initializers.  */
1551 	  bool no_sanitize_p = wtd->no_sanitize_p;
1552 	  wtd->no_sanitize_p = true;
1553 	  for (tree decl = BIND_EXPR_VARS (stmt);
1554 	       decl;
1555 	       decl = DECL_CHAIN (decl))
1556 	    if (VAR_P (decl)
1557 		&& TREE_STATIC (decl)
1558 		&& DECL_INITIAL (decl))
1559 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1560 	  wtd->no_sanitize_p = no_sanitize_p;
1561 	}
1562       wtd->bind_expr_stack.safe_push (stmt);
1563       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1564 		    cp_genericize_r, data, NULL);
1565       wtd->bind_expr_stack.pop ();
1566       break;
1567 
1568     case USING_STMT:
1569       {
1570 	tree block = NULL_TREE;
1571 
1572 	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1573 	   BLOCK, and append an IMPORTED_DECL to its
1574 	   BLOCK_VARS chained list.  */
1575 	if (wtd->bind_expr_stack.exists ())
1576 	  {
1577 	    int i;
1578 	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1579 	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1580 		break;
1581 	  }
1582 	if (block)
1583 	  {
1584 	    tree decl = TREE_OPERAND (stmt, 0);
1585 	    gcc_assert (decl);
1586 
1587 	    if (undeduced_auto_decl (decl))
1588 	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1589 	    else
1590 	      {
1591 		tree using_directive = make_node (IMPORTED_DECL);
1592 		TREE_TYPE (using_directive) = void_type_node;
1593 		DECL_SOURCE_LOCATION (using_directive)
1594 		  = cp_expr_loc_or_input_loc (stmt);
1595 
1596 		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1597 		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1598 		BLOCK_VARS (block) = using_directive;
1599 	      }
1600 	  }
1601 	/* The USING_STMT won't appear in GENERIC.  */
1602 	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1603 	*walk_subtrees = 0;
1604       }
1605       break;
1606 
1607     case DECL_EXPR:
1608       if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1609 	{
1610 	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1611 	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1612 	  *walk_subtrees = 0;
1613 	}
1614       else
1615 	{
1616 	  tree d = DECL_EXPR_DECL (stmt);
1617 	  if (VAR_P (d))
1618 	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1619 	}
1620       break;
1621 
1622     case OMP_PARALLEL:
1623     case OMP_TASK:
1624     case OMP_TASKLOOP:
1625       {
1626 	struct cp_genericize_omp_taskreg omp_ctx;
1627 	tree c, decl;
1628 	splay_tree_node n;
1629 
1630 	*walk_subtrees = 0;
1631 	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1632 	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1633 	omp_ctx.default_shared = omp_ctx.is_parallel;
1634 	omp_ctx.outer = wtd->omp_ctx;
1635 	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1636 	wtd->omp_ctx = &omp_ctx;
1637 	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1638 	  switch (OMP_CLAUSE_CODE (c))
1639 	    {
1640 	    case OMP_CLAUSE_SHARED:
1641 	    case OMP_CLAUSE_PRIVATE:
1642 	    case OMP_CLAUSE_FIRSTPRIVATE:
1643 	    case OMP_CLAUSE_LASTPRIVATE:
1644 	      decl = OMP_CLAUSE_DECL (c);
1645 	      if (decl == error_mark_node || !omp_var_to_track (decl))
1646 		break;
1647 	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1648 	      if (n != NULL)
1649 		break;
1650 	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1651 				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1652 				 ? OMP_CLAUSE_DEFAULT_SHARED
1653 				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1654 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1655 		omp_cxx_notice_variable (omp_ctx.outer, decl);
1656 	      break;
1657 	    case OMP_CLAUSE_DEFAULT:
1658 	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1659 		omp_ctx.default_shared = true;
1660 	    default:
1661 	      break;
1662 	    }
1663 	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1664 	  genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1665 	else
1666 	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1667 	wtd->omp_ctx = omp_ctx.outer;
1668 	splay_tree_delete (omp_ctx.variables);
1669       }
1670       break;
1671 
1672     case TRY_BLOCK:
1673       {
1674         *walk_subtrees = 0;
1675         tree try_block = wtd->try_block;
1676         wtd->try_block = stmt;
1677         cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1678         wtd->try_block = try_block;
1679         cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1680       }
1681       break;
1682 
1683     case MUST_NOT_THROW_EXPR:
1684       /* MUST_NOT_THROW_COND might be something else with TM.  */
1685       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1686 	{
1687 	  *walk_subtrees = 0;
1688 	  tree try_block = wtd->try_block;
1689 	  wtd->try_block = stmt;
1690 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1691 	  wtd->try_block = try_block;
1692 	}
1693       break;
1694 
1695     case THROW_EXPR:
1696       {
1697 	location_t loc = location_of (stmt);
1698 	if (TREE_NO_WARNING (stmt))
1699 	  /* Never mind.  */;
1700 	else if (wtd->try_block)
1701 	  {
1702 	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1703 	      {
1704 		auto_diagnostic_group d;
1705 		if (warning_at (loc, OPT_Wterminate,
1706 				"%<throw%> will always call %<terminate%>")
1707 		    && cxx_dialect >= cxx11
1708 		    && DECL_DESTRUCTOR_P (current_function_decl))
1709 		  inform (loc, "in C++11 destructors default to %<noexcept%>");
1710 	      }
1711 	  }
1712 	else
1713 	  {
1714 	    if (warn_cxx11_compat && cxx_dialect < cxx11
1715 		&& DECL_DESTRUCTOR_P (current_function_decl)
1716 		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1717 		    == NULL_TREE)
1718 		&& (get_defaulted_eh_spec (current_function_decl)
1719 		    == empty_except_spec))
1720 	      warning_at (loc, OPT_Wc__11_compat,
1721 			  "in C++11 this %<throw%> will call %<terminate%> "
1722 			  "because destructors default to %<noexcept%>");
1723 	  }
1724       }
1725       break;
1726 
1727     case CONVERT_EXPR:
1728       gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1729       break;
1730 
1731     case FOR_STMT:
1732       genericize_for_stmt (stmt_p, walk_subtrees, data);
1733       break;
1734 
1735     case WHILE_STMT:
1736       genericize_while_stmt (stmt_p, walk_subtrees, data);
1737       break;
1738 
1739     case DO_STMT:
1740       genericize_do_stmt (stmt_p, walk_subtrees, data);
1741       break;
1742 
1743     case SWITCH_STMT:
1744       genericize_switch_stmt (stmt_p, walk_subtrees, data);
1745       break;
1746 
1747     case CONTINUE_STMT:
1748       genericize_continue_stmt (stmt_p);
1749       break;
1750 
1751     case BREAK_STMT:
1752       genericize_break_stmt (stmt_p);
1753       break;
1754 
1755     case SPACESHIP_EXPR:
1756       *stmt_p = genericize_spaceship (*stmt_p);
1757       break;
1758 
1759     case OMP_DISTRIBUTE:
1760       /* Need to explicitly instantiate copy ctors on class iterators of
1761 	 composite distribute parallel for.  */
1762       if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1763 	{
1764 	  tree *data[4] = { NULL, NULL, NULL, NULL };
1765 	  tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1766 				  find_combined_omp_for, data, NULL);
1767 	  if (inner != NULL_TREE
1768 	      && TREE_CODE (inner) == OMP_FOR)
1769 	    {
1770 	      for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1771 		if (OMP_FOR_ORIG_DECLS (inner)
1772 		    && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1773 				  i)) == TREE_LIST
1774 		    && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1775 				     i)))
1776 		  {
1777 		    tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1778 		    /* Class iterators aren't allowed on OMP_SIMD, so the only
1779 		       case we need to solve is distribute parallel for.  */
1780 		    gcc_assert (TREE_CODE (inner) == OMP_FOR
1781 				&& data[1]);
1782 		    tree orig_decl = TREE_PURPOSE (orig);
1783 		    tree c, cl = NULL_TREE;
1784 		    for (c = OMP_FOR_CLAUSES (inner);
1785 			 c; c = OMP_CLAUSE_CHAIN (c))
1786 		      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1787 			   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1788 			  && OMP_CLAUSE_DECL (c) == orig_decl)
1789 			{
1790 			  cl = c;
1791 			  break;
1792 			}
1793 		    if (cl == NULL_TREE)
1794 		      {
1795 			for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1796 			     c; c = OMP_CLAUSE_CHAIN (c))
1797 			  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1798 			      && OMP_CLAUSE_DECL (c) == orig_decl)
1799 			    {
1800 			      cl = c;
1801 			      break;
1802 			    }
1803 		      }
1804 		    if (cl)
1805 		      {
1806 			orig_decl = require_complete_type (orig_decl);
1807 			tree inner_type = TREE_TYPE (orig_decl);
1808 			if (orig_decl == error_mark_node)
1809 			  continue;
1810 			if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1811 			  inner_type = TREE_TYPE (inner_type);
1812 
1813 			while (TREE_CODE (inner_type) == ARRAY_TYPE)
1814 			  inner_type = TREE_TYPE (inner_type);
1815 			get_copy_ctor (inner_type, tf_warning_or_error);
1816 		      }
1817 		}
1818 	    }
1819 	}
1820       /* FALLTHRU */
1821     case OMP_FOR:
1822     case OMP_SIMD:
1823     case OMP_LOOP:
1824     case OACC_LOOP:
1825       genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1826       break;
1827 
1828     case PTRMEM_CST:
1829       /* By the time we get here we're handing off to the back end, so we don't
1830 	 need or want to preserve PTRMEM_CST anymore.  */
1831       *stmt_p = cplus_expand_constant (stmt);
1832       *walk_subtrees = 0;
1833       break;
1834 
1835     case MEM_REF:
1836       /* For MEM_REF, make sure not to sanitize the second operand even
1837 	 if it has reference type.  It is just an offset with a type
1838 	 holding other information.  There is no other processing we
1839 	 need to do for INTEGER_CSTs, so just ignore the second argument
1840 	 unconditionally.  */
1841       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1842       *walk_subtrees = 0;
1843       break;
1844 
1845     case NOP_EXPR:
1846       *stmt_p = predeclare_vla (*stmt_p);
1847       if (!wtd->no_sanitize_p
1848 	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1849 	  && TYPE_REF_P (TREE_TYPE (stmt)))
1850 	ubsan_maybe_instrument_reference (stmt_p);
1851       break;
1852 
1853     case CALL_EXPR:
1854       /* Evaluate function concept checks instead of treating them as
1855 	 normal functions.  */
1856       if (concept_check_p (stmt))
1857 	{
1858 	  *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1859 	  * walk_subtrees = 0;
1860 	  break;
1861 	}
1862 
1863       if (!wtd->no_sanitize_p
1864 	  && sanitize_flags_p ((SANITIZE_NULL
1865 				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1866 	{
1867 	  tree fn = CALL_EXPR_FN (stmt);
1868 	  if (fn != NULL_TREE
1869 	      && !error_operand_p (fn)
1870 	      && INDIRECT_TYPE_P (TREE_TYPE (fn))
1871 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1872 	    {
1873 	      bool is_ctor
1874 		= TREE_CODE (fn) == ADDR_EXPR
1875 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1876 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1877 	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1878 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1879 	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1880 		cp_ubsan_maybe_instrument_member_call (stmt);
1881 	    }
1882 	  else if (fn == NULL_TREE
1883 		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1884 		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1885 		   && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1886 	    *walk_subtrees = 0;
1887 	}
1888       /* Fall through.  */
1889     case AGGR_INIT_EXPR:
1890       /* For calls to a multi-versioned function, overload resolution
1891 	 returns the function with the highest target priority, that is,
1892 	 the version that will checked for dispatching first.  If this
1893 	 version is inlinable, a direct call to this version can be made
1894 	 otherwise the call should go through the dispatcher.  */
1895       {
1896 	tree fn = cp_get_callee_fndecl_nofold (stmt);
1897 	if (fn && DECL_FUNCTION_VERSIONED (fn)
1898 	    && (current_function_decl == NULL
1899 		|| !targetm.target_option.can_inline_p (current_function_decl,
1900 							fn)))
1901 	  if (tree dis = get_function_version_dispatcher (fn))
1902 	    {
1903 	      mark_versions_used (dis);
1904 	      dis = build_address (dis);
1905 	      if (TREE_CODE (stmt) == CALL_EXPR)
1906 		CALL_EXPR_FN (stmt) = dis;
1907 	      else
1908 		AGGR_INIT_EXPR_FN (stmt) = dis;
1909 	    }
1910       }
1911       break;
1912 
1913     case TARGET_EXPR:
1914       if (TARGET_EXPR_INITIAL (stmt)
1915 	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1916 	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1917 	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1918       break;
1919 
1920     case REQUIRES_EXPR:
1921       /* Emit the value of the requires-expression.  */
1922       *stmt_p = constant_boolean_node (constraints_satisfied_p (stmt),
1923 				       boolean_type_node);
1924       *walk_subtrees = 0;
1925       break;
1926 
1927     case TEMPLATE_ID_EXPR:
1928       gcc_assert (concept_check_p (stmt));
1929       /* Emit the value of the concept check.  */
1930       *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1931       walk_subtrees = 0;
1932       break;
1933 
1934     case STATEMENT_LIST:
1935       if (TREE_SIDE_EFFECTS (stmt))
1936 	{
1937 	  tree_stmt_iterator i;
1938 	  int nondebug_stmts = 0;
1939 	  bool clear_side_effects = true;
1940 	  /* Genericization can clear TREE_SIDE_EFFECTS, e.g. when
1941 	     transforming an IF_STMT into COND_EXPR.  If such stmt
1942 	     appears in a STATEMENT_LIST that contains only that
1943 	     stmt and some DEBUG_BEGIN_STMTs, without -g where the
1944 	     STATEMENT_LIST wouldn't be present at all the resulting
1945 	     expression wouldn't have TREE_SIDE_EFFECTS set, so make sure
1946 	     to clear it even on the STATEMENT_LIST in such cases.  */
1947 	  for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1948 	    {
1949 	      tree t = tsi_stmt (i);
1950 	      if (TREE_CODE (t) != DEBUG_BEGIN_STMT && nondebug_stmts < 2)
1951 		nondebug_stmts++;
1952 	      cp_walk_tree (tsi_stmt_ptr (i), cp_genericize_r, data, NULL);
1953 	      if (TREE_CODE (t) != DEBUG_BEGIN_STMT
1954 		  && (nondebug_stmts > 1 || TREE_SIDE_EFFECTS (tsi_stmt (i))))
1955 		clear_side_effects = false;
1956 	    }
1957 	  if (clear_side_effects)
1958 	    TREE_SIDE_EFFECTS (stmt) = 0;
1959 	  *walk_subtrees = 0;
1960 	}
1961       break;
1962 
1963     default:
1964       if (IS_TYPE_OR_DECL_P (stmt))
1965 	*walk_subtrees = 0;
1966       break;
1967     }
1968 
1969   p_set->add (*stmt_p);
1970 
1971   return NULL;
1972 }
1973 
1974 /* Lower C++ front end trees to GENERIC in T_P.  */
1975 
1976 static void
cp_genericize_tree(tree * t_p,bool handle_invisiref_parm_p)1977 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1978 {
1979   struct cp_genericize_data wtd;
1980 
1981   wtd.p_set = new hash_set<tree>;
1982   wtd.bind_expr_stack.create (0);
1983   wtd.omp_ctx = NULL;
1984   wtd.try_block = NULL_TREE;
1985   wtd.no_sanitize_p = false;
1986   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1987   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1988   delete wtd.p_set;
1989   wtd.bind_expr_stack.release ();
1990   if (sanitize_flags_p (SANITIZE_VPTR))
1991     cp_ubsan_instrument_member_accesses (t_p);
1992 }
1993 
1994 /* If a function that should end with a return in non-void
1995    function doesn't obviously end with return, add ubsan
1996    instrumentation code to verify it at runtime.  If -fsanitize=return
1997    is not enabled, instrument __builtin_unreachable.  */
1998 
1999 static void
cp_maybe_instrument_return(tree fndecl)2000 cp_maybe_instrument_return (tree fndecl)
2001 {
2002   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
2003       || DECL_CONSTRUCTOR_P (fndecl)
2004       || DECL_DESTRUCTOR_P (fndecl)
2005       || !targetm.warn_func_return (fndecl))
2006     return;
2007 
2008   if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
2009       /* Don't add __builtin_unreachable () if not optimizing, it will not
2010 	 improve any optimizations in that case, just break UB code.
2011 	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2012 	 UBSan covers this with ubsan_instrument_return above where sufficient
2013 	 information is provided, while the __builtin_unreachable () below
2014 	 if return sanitization is disabled will just result in hard to
2015 	 understand runtime error without location.  */
2016       && (!optimize
2017 	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
2018     return;
2019 
2020   tree t = DECL_SAVED_TREE (fndecl);
2021   while (t)
2022     {
2023       switch (TREE_CODE (t))
2024 	{
2025 	case BIND_EXPR:
2026 	  t = BIND_EXPR_BODY (t);
2027 	  continue;
2028 	case TRY_FINALLY_EXPR:
2029 	case CLEANUP_POINT_EXPR:
2030 	  t = TREE_OPERAND (t, 0);
2031 	  continue;
2032 	case STATEMENT_LIST:
2033 	  {
2034 	    tree_stmt_iterator i = tsi_last (t);
2035 	    while (!tsi_end_p (i))
2036 	      {
2037 		tree p = tsi_stmt (i);
2038 		if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
2039 		  break;
2040 		tsi_prev (&i);
2041 	      }
2042 	    if (!tsi_end_p (i))
2043 	      {
2044 		t = tsi_stmt (i);
2045 		continue;
2046 	      }
2047 	  }
2048 	  break;
2049 	case RETURN_EXPR:
2050 	  return;
2051 	default:
2052 	  break;
2053 	}
2054       break;
2055     }
2056   if (t == NULL_TREE)
2057     return;
2058   tree *p = &DECL_SAVED_TREE (fndecl);
2059   if (TREE_CODE (*p) == BIND_EXPR)
2060     p = &BIND_EXPR_BODY (*p);
2061 
2062   location_t loc = DECL_SOURCE_LOCATION (fndecl);
2063   if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
2064     t = ubsan_instrument_return (loc);
2065   else
2066     {
2067       tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
2068       t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
2069     }
2070 
2071   append_to_statement_list (t, p);
2072 }
2073 
2074 void
cp_genericize(tree fndecl)2075 cp_genericize (tree fndecl)
2076 {
2077   tree t;
2078 
2079   /* Fix up the types of parms passed by invisible reference.  */
2080   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
2081     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
2082       {
2083 	/* If a function's arguments are copied to create a thunk,
2084 	   then DECL_BY_REFERENCE will be set -- but the type of the
2085 	   argument will be a pointer type, so we will never get
2086 	   here.  */
2087 	gcc_assert (!DECL_BY_REFERENCE (t));
2088 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
2089 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
2090 	DECL_BY_REFERENCE (t) = 1;
2091 	TREE_ADDRESSABLE (t) = 0;
2092 	relayout_decl (t);
2093       }
2094 
2095   /* Do the same for the return value.  */
2096   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2097     {
2098       t = DECL_RESULT (fndecl);
2099       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2100       DECL_BY_REFERENCE (t) = 1;
2101       TREE_ADDRESSABLE (t) = 0;
2102       relayout_decl (t);
2103       if (DECL_NAME (t))
2104 	{
2105 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
2106 	  tree outer = outer_curly_brace_block (current_function_decl);
2107 	  tree var;
2108 
2109 	  if (outer)
2110 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2111 	      if (VAR_P (var)
2112 		  && DECL_NAME (t) == DECL_NAME (var)
2113 		  && DECL_HAS_VALUE_EXPR_P (var)
2114 		  && DECL_VALUE_EXPR (var) == t)
2115 		{
2116 		  tree val = convert_from_reference (t);
2117 		  SET_DECL_VALUE_EXPR (var, val);
2118 		  break;
2119 		}
2120 	}
2121     }
2122 
2123   /* If we're a clone, the body is already GIMPLE.  */
2124   if (DECL_CLONED_FUNCTION_P (fndecl))
2125     return;
2126 
2127   /* Allow cp_genericize calls to be nested.  */
2128   tree save_bc_label[2];
2129   save_bc_label[bc_break] = bc_label[bc_break];
2130   save_bc_label[bc_continue] = bc_label[bc_continue];
2131   bc_label[bc_break] = NULL_TREE;
2132   bc_label[bc_continue] = NULL_TREE;
2133 
2134   /* We do want to see every occurrence of the parms, so we can't just use
2135      walk_tree's hash functionality.  */
2136   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2137 
2138   cp_maybe_instrument_return (fndecl);
2139 
2140   /* Do everything else.  */
2141   c_genericize (fndecl);
2142 
2143   gcc_assert (bc_label[bc_break] == NULL);
2144   gcc_assert (bc_label[bc_continue] == NULL);
2145   bc_label[bc_break] = save_bc_label[bc_break];
2146   bc_label[bc_continue] = save_bc_label[bc_continue];
2147 }
2148 
2149 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
2150    NULL if there is in fact nothing to do.  ARG2 may be null if FN
2151    actually only takes one argument.  */
2152 
2153 static tree
cxx_omp_clause_apply_fn(tree fn,tree arg1,tree arg2)2154 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2155 {
2156   tree defparm, parm, t;
2157   int i = 0;
2158   int nargs;
2159   tree *argarray;
2160 
2161   if (fn == NULL)
2162     return NULL;
2163 
2164   nargs = list_length (DECL_ARGUMENTS (fn));
2165   argarray = XALLOCAVEC (tree, nargs);
2166 
2167   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2168   if (arg2)
2169     defparm = TREE_CHAIN (defparm);
2170 
2171   bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2172   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2173     {
2174       tree inner_type = TREE_TYPE (arg1);
2175       tree start1, end1, p1;
2176       tree start2 = NULL, p2 = NULL;
2177       tree ret = NULL, lab;
2178 
2179       start1 = arg1;
2180       start2 = arg2;
2181       do
2182 	{
2183 	  inner_type = TREE_TYPE (inner_type);
2184 	  start1 = build4 (ARRAY_REF, inner_type, start1,
2185 			   size_zero_node, NULL, NULL);
2186 	  if (arg2)
2187 	    start2 = build4 (ARRAY_REF, inner_type, start2,
2188 			     size_zero_node, NULL, NULL);
2189 	}
2190       while (TREE_CODE (inner_type) == ARRAY_TYPE);
2191       start1 = build_fold_addr_expr_loc (input_location, start1);
2192       if (arg2)
2193 	start2 = build_fold_addr_expr_loc (input_location, start2);
2194 
2195       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2196       end1 = fold_build_pointer_plus (start1, end1);
2197 
2198       p1 = create_tmp_var (TREE_TYPE (start1));
2199       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2200       append_to_statement_list (t, &ret);
2201 
2202       if (arg2)
2203 	{
2204 	  p2 = create_tmp_var (TREE_TYPE (start2));
2205 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2206 	  append_to_statement_list (t, &ret);
2207 	}
2208 
2209       lab = create_artificial_label (input_location);
2210       t = build1 (LABEL_EXPR, void_type_node, lab);
2211       append_to_statement_list (t, &ret);
2212 
2213       argarray[i++] = p1;
2214       if (arg2)
2215 	argarray[i++] = p2;
2216       /* Handle default arguments.  */
2217       for (parm = defparm; parm && parm != void_list_node;
2218 	   parm = TREE_CHAIN (parm), i++)
2219 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
2220 					   TREE_PURPOSE (parm), fn,
2221 					   i - is_method, tf_warning_or_error);
2222       t = build_call_a (fn, i, argarray);
2223       t = fold_convert (void_type_node, t);
2224       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2225       append_to_statement_list (t, &ret);
2226 
2227       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2228       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2229       append_to_statement_list (t, &ret);
2230 
2231       if (arg2)
2232 	{
2233 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2234 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2235 	  append_to_statement_list (t, &ret);
2236 	}
2237 
2238       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2239       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2240       append_to_statement_list (t, &ret);
2241 
2242       return ret;
2243     }
2244   else
2245     {
2246       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2247       if (arg2)
2248 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2249       /* Handle default arguments.  */
2250       for (parm = defparm; parm && parm != void_list_node;
2251 	   parm = TREE_CHAIN (parm), i++)
2252 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
2253 					   TREE_PURPOSE (parm), fn,
2254 					   i - is_method, tf_warning_or_error);
2255       t = build_call_a (fn, i, argarray);
2256       t = fold_convert (void_type_node, t);
2257       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2258     }
2259 }
2260 
2261 /* Return code to initialize DECL with its default constructor, or
2262    NULL if there's nothing to do.  */
2263 
2264 tree
cxx_omp_clause_default_ctor(tree clause,tree decl,tree)2265 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2266 {
2267   tree info = CP_OMP_CLAUSE_INFO (clause);
2268   tree ret = NULL;
2269 
2270   if (info)
2271     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2272 
2273   return ret;
2274 }
2275 
2276 /* Return code to initialize DST with a copy constructor from SRC.  */
2277 
2278 tree
cxx_omp_clause_copy_ctor(tree clause,tree dst,tree src)2279 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2280 {
2281   tree info = CP_OMP_CLAUSE_INFO (clause);
2282   tree ret = NULL;
2283 
2284   if (info)
2285     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2286   if (ret == NULL)
2287     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2288 
2289   return ret;
2290 }
2291 
2292 /* Similarly, except use an assignment operator instead.  */
2293 
2294 tree
cxx_omp_clause_assign_op(tree clause,tree dst,tree src)2295 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2296 {
2297   tree info = CP_OMP_CLAUSE_INFO (clause);
2298   tree ret = NULL;
2299 
2300   if (info)
2301     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2302   if (ret == NULL)
2303     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2304 
2305   return ret;
2306 }
2307 
2308 /* Return code to destroy DECL.  */
2309 
2310 tree
cxx_omp_clause_dtor(tree clause,tree decl)2311 cxx_omp_clause_dtor (tree clause, tree decl)
2312 {
2313   tree info = CP_OMP_CLAUSE_INFO (clause);
2314   tree ret = NULL;
2315 
2316   if (info)
2317     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2318 
2319   return ret;
2320 }
2321 
2322 /* True if OpenMP should privatize what this DECL points to rather
2323    than the DECL itself.  */
2324 
2325 bool
cxx_omp_privatize_by_reference(const_tree decl)2326 cxx_omp_privatize_by_reference (const_tree decl)
2327 {
2328   return (TYPE_REF_P (TREE_TYPE (decl))
2329 	  || is_invisiref_parm (decl));
2330 }
2331 
2332 /* Return true if DECL is const qualified var having no mutable member.  */
2333 bool
cxx_omp_const_qual_no_mutable(tree decl)2334 cxx_omp_const_qual_no_mutable (tree decl)
2335 {
2336   tree type = TREE_TYPE (decl);
2337   if (TYPE_REF_P (type))
2338     {
2339       if (!is_invisiref_parm (decl))
2340 	return false;
2341       type = TREE_TYPE (type);
2342 
2343       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2344 	{
2345 	  /* NVR doesn't preserve const qualification of the
2346 	     variable's type.  */
2347 	  tree outer = outer_curly_brace_block (current_function_decl);
2348 	  tree var;
2349 
2350 	  if (outer)
2351 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2352 	      if (VAR_P (var)
2353 		  && DECL_NAME (decl) == DECL_NAME (var)
2354 		  && (TYPE_MAIN_VARIANT (type)
2355 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2356 		{
2357 		  if (TYPE_READONLY (TREE_TYPE (var)))
2358 		    type = TREE_TYPE (var);
2359 		  break;
2360 		}
2361 	}
2362     }
2363 
2364   if (type == error_mark_node)
2365     return false;
2366 
2367   /* Variables with const-qualified type having no mutable member
2368      are predetermined shared.  */
2369   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2370     return true;
2371 
2372   return false;
2373 }
2374 
2375 /* True if OpenMP sharing attribute of DECL is predetermined.  */
2376 
2377 enum omp_clause_default_kind
cxx_omp_predetermined_sharing_1(tree decl)2378 cxx_omp_predetermined_sharing_1 (tree decl)
2379 {
2380   /* Static data members are predetermined shared.  */
2381   if (TREE_STATIC (decl))
2382     {
2383       tree ctx = CP_DECL_CONTEXT (decl);
2384       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2385 	return OMP_CLAUSE_DEFAULT_SHARED;
2386 
2387       if (c_omp_predefined_variable (decl))
2388 	return OMP_CLAUSE_DEFAULT_SHARED;
2389     }
2390 
2391   /* this may not be specified in data-sharing clauses, still we need
2392      to predetermined it firstprivate.  */
2393   if (decl == current_class_ptr)
2394     return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2395 
2396   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2397 }
2398 
2399 /* Likewise, but also include the artificial vars.  We don't want to
2400    disallow the artificial vars being mentioned in explicit clauses,
2401    as we use artificial vars e.g. for loop constructs with random
2402    access iterators other than pointers, but during gimplification
2403    we want to treat them as predetermined.  */
2404 
2405 enum omp_clause_default_kind
cxx_omp_predetermined_sharing(tree decl)2406 cxx_omp_predetermined_sharing (tree decl)
2407 {
2408   enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2409   if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2410     return ret;
2411 
2412   /* Predetermine artificial variables holding integral values, those
2413      are usually result of gimplify_one_sizepos or SAVE_EXPR
2414      gimplification.  */
2415   if (VAR_P (decl)
2416       && DECL_ARTIFICIAL (decl)
2417       && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2418       && !(DECL_LANG_SPECIFIC (decl)
2419 	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2420     return OMP_CLAUSE_DEFAULT_SHARED;
2421 
2422   /* Similarly for typeinfo symbols.  */
2423   if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2424     return OMP_CLAUSE_DEFAULT_SHARED;
2425 
2426   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2427 }
2428 
2429 /* Finalize an implicitly determined clause.  */
2430 
2431 void
cxx_omp_finish_clause(tree c,gimple_seq *)2432 cxx_omp_finish_clause (tree c, gimple_seq *)
2433 {
2434   tree decl, inner_type;
2435   bool make_shared = false;
2436 
2437   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2438       && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2439 	  || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2440     return;
2441 
2442   decl = OMP_CLAUSE_DECL (c);
2443   decl = require_complete_type (decl);
2444   inner_type = TREE_TYPE (decl);
2445   if (decl == error_mark_node)
2446     make_shared = true;
2447   else if (TYPE_REF_P (TREE_TYPE (decl)))
2448     inner_type = TREE_TYPE (inner_type);
2449 
2450   /* We're interested in the base element, not arrays.  */
2451   while (TREE_CODE (inner_type) == ARRAY_TYPE)
2452     inner_type = TREE_TYPE (inner_type);
2453 
2454   /* Check for special function availability by building a call to one.
2455      Save the results, because later we won't be in the right context
2456      for making these queries.  */
2457   bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2458   if (!make_shared
2459       && CLASS_TYPE_P (inner_type)
2460       && cxx_omp_create_clause_info (c, inner_type, !first, first, !first,
2461 				     true))
2462     make_shared = true;
2463 
2464   if (make_shared)
2465     {
2466       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2467       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2468       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2469     }
2470 }
2471 
2472 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2473    disregarded in OpenMP construct, because it is going to be
2474    remapped during OpenMP lowering.  SHARED is true if DECL
2475    is going to be shared, false if it is going to be privatized.  */
2476 
2477 bool
cxx_omp_disregard_value_expr(tree decl,bool shared)2478 cxx_omp_disregard_value_expr (tree decl, bool shared)
2479 {
2480   if (shared)
2481     return false;
2482   if (VAR_P (decl)
2483       && DECL_HAS_VALUE_EXPR_P (decl)
2484       && DECL_ARTIFICIAL (decl)
2485       && DECL_LANG_SPECIFIC (decl)
2486       && DECL_OMP_PRIVATIZED_MEMBER (decl))
2487     return true;
2488   if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2489     return true;
2490   return false;
2491 }
2492 
2493 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2494 
2495 tree
cp_fold_maybe_rvalue(tree x,bool rval)2496 cp_fold_maybe_rvalue (tree x, bool rval)
2497 {
2498   while (true)
2499     {
2500       x = cp_fold (x);
2501       if (rval)
2502 	x = mark_rvalue_use (x);
2503       if (rval && DECL_P (x)
2504 	  && !TYPE_REF_P (TREE_TYPE (x)))
2505 	{
2506 	  tree v = decl_constant_value (x);
2507 	  if (v != x && v != error_mark_node)
2508 	    {
2509 	      x = v;
2510 	      continue;
2511 	    }
2512 	}
2513       break;
2514     }
2515   return x;
2516 }
2517 
2518 /* Fold expression X which is used as an rvalue.  */
2519 
2520 tree
cp_fold_rvalue(tree x)2521 cp_fold_rvalue (tree x)
2522 {
2523   return cp_fold_maybe_rvalue (x, true);
2524 }
2525 
2526 /* Perform folding on expression X.  */
2527 
2528 tree
cp_fully_fold(tree x)2529 cp_fully_fold (tree x)
2530 {
2531   if (processing_template_decl)
2532     return x;
2533   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2534      have to call both.  */
2535   if (cxx_dialect >= cxx11)
2536     {
2537       x = maybe_constant_value (x);
2538       /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2539 	 a TARGET_EXPR; undo that here.  */
2540       if (TREE_CODE (x) == TARGET_EXPR)
2541 	x = TARGET_EXPR_INITIAL (x);
2542       else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2543 	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2544 	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2545 	x = TREE_OPERAND (x, 0);
2546     }
2547   return cp_fold_rvalue (x);
2548 }
2549 
2550 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2551    in some cases.  */
2552 
2553 tree
cp_fully_fold_init(tree x)2554 cp_fully_fold_init (tree x)
2555 {
2556   if (processing_template_decl)
2557     return x;
2558   x = cp_fully_fold (x);
2559   cp_fold_data data (/*genericize*/false);
2560   cp_walk_tree (&x, cp_fold_r, &data, NULL);
2561   return x;
2562 }
2563 
2564 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2565    and certain changes are made to the folding done.  Or should be (FIXME).  We
2566    never touch maybe_const, as it is only used for the C front-end
2567    C_MAYBE_CONST_EXPR.  */
2568 
2569 tree
c_fully_fold(tree x,bool,bool *,bool lval)2570 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2571 {
2572   return cp_fold_maybe_rvalue (x, !lval);
2573 }
2574 
2575 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2576 
2577 /* Dispose of the whole FOLD_CACHE.  */
2578 
2579 void
clear_fold_cache(void)2580 clear_fold_cache (void)
2581 {
2582   if (fold_cache != NULL)
2583     fold_cache->empty ();
2584 }
2585 
2586 /*  This function tries to fold an expression X.
2587     To avoid combinatorial explosion, folding results are kept in fold_cache.
2588     If X is invalid, we don't fold at all.
2589     For performance reasons we don't cache expressions representing a
2590     declaration or constant.
2591     Function returns X or its folded variant.  */
2592 
2593 static tree
cp_fold(tree x)2594 cp_fold (tree x)
2595 {
2596   tree op0, op1, op2, op3;
2597   tree org_x = x, r = NULL_TREE;
2598   enum tree_code code;
2599   location_t loc;
2600   bool rval_ops = true;
2601 
2602   if (!x || x == error_mark_node)
2603     return x;
2604 
2605   if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2606     return x;
2607 
2608   /* Don't bother to cache DECLs or constants.  */
2609   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2610     return x;
2611 
2612   if (fold_cache == NULL)
2613     fold_cache = hash_map<tree, tree>::create_ggc (101);
2614 
2615   if (tree *cached = fold_cache->get (x))
2616     return *cached;
2617 
2618   code = TREE_CODE (x);
2619   switch (code)
2620     {
2621     case CLEANUP_POINT_EXPR:
2622       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2623 	 effects.  */
2624       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2625       if (!TREE_SIDE_EFFECTS (r))
2626 	x = r;
2627       break;
2628 
2629     case SIZEOF_EXPR:
2630       x = fold_sizeof_expr (x);
2631       break;
2632 
2633     case VIEW_CONVERT_EXPR:
2634       rval_ops = false;
2635       /* FALLTHRU */
2636     case CONVERT_EXPR:
2637     case NOP_EXPR:
2638     case NON_LVALUE_EXPR:
2639 
2640       if (VOID_TYPE_P (TREE_TYPE (x)))
2641 	{
2642 	  /* This is just to make sure we don't end up with casts to
2643 	     void from error_mark_node.  If we just return x, then
2644 	     cp_fold_r might fold the operand into error_mark_node and
2645 	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2646 	     during gimplification doesn't like such casts.
2647 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2648 	     folding of the operand should be in the caches and if in cp_fold_r
2649 	     it will modify it in place.  */
2650 	  op0 = cp_fold (TREE_OPERAND (x, 0));
2651 	  if (op0 == error_mark_node)
2652 	    x = error_mark_node;
2653 	  break;
2654 	}
2655 
2656       loc = EXPR_LOCATION (x);
2657       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2658 
2659       if (code == CONVERT_EXPR
2660 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2661 	  && op0 != void_node)
2662 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2663 	   folding variants, since fold() doesn't do those transformations.  */
2664 	x = fold (convert (TREE_TYPE (x), op0));
2665       else if (op0 != TREE_OPERAND (x, 0))
2666 	{
2667 	  if (op0 == error_mark_node)
2668 	    x = error_mark_node;
2669 	  else
2670 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2671 	}
2672       else
2673 	x = fold (x);
2674 
2675       /* Conversion of an out-of-range value has implementation-defined
2676 	 behavior; the language considers it different from arithmetic
2677 	 overflow, which is undefined.  */
2678       if (TREE_CODE (op0) == INTEGER_CST
2679 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2680 	TREE_OVERFLOW (x) = false;
2681 
2682       break;
2683 
2684     case INDIRECT_REF:
2685       /* We don't need the decltype(auto) obfuscation anymore.  */
2686       if (REF_PARENTHESIZED_P (x))
2687 	{
2688 	  tree p = maybe_undo_parenthesized_ref (x);
2689 	  return cp_fold (p);
2690 	}
2691       goto unary;
2692 
2693     case ADDR_EXPR:
2694       loc = EXPR_LOCATION (x);
2695       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2696 
2697       /* Cope with user tricks that amount to offsetof.  */
2698       if (op0 != error_mark_node
2699 	  && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2700 	{
2701 	  tree val = get_base_address (op0);
2702 	  if (val
2703 	      && INDIRECT_REF_P (val)
2704 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2705 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2706 	    {
2707 	      val = TREE_OPERAND (val, 0);
2708 	      STRIP_NOPS (val);
2709 	      val = maybe_constant_value (val);
2710 	      if (TREE_CODE (val) == INTEGER_CST)
2711 		return fold_offsetof (op0, TREE_TYPE (x));
2712 	    }
2713 	}
2714       goto finish_unary;
2715 
2716     case REALPART_EXPR:
2717     case IMAGPART_EXPR:
2718       rval_ops = false;
2719       /* FALLTHRU */
2720     case CONJ_EXPR:
2721     case FIX_TRUNC_EXPR:
2722     case FLOAT_EXPR:
2723     case NEGATE_EXPR:
2724     case ABS_EXPR:
2725     case ABSU_EXPR:
2726     case BIT_NOT_EXPR:
2727     case TRUTH_NOT_EXPR:
2728     case FIXED_CONVERT_EXPR:
2729     unary:
2730 
2731       loc = EXPR_LOCATION (x);
2732       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2733 
2734     finish_unary:
2735       if (op0 != TREE_OPERAND (x, 0))
2736 	{
2737 	  if (op0 == error_mark_node)
2738 	    x = error_mark_node;
2739 	  else
2740 	    {
2741 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2742 	      if (code == INDIRECT_REF
2743 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2744 		{
2745 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2746 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2747 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2748 		}
2749 	    }
2750 	}
2751       else
2752 	x = fold (x);
2753 
2754       gcc_assert (TREE_CODE (x) != COND_EXPR
2755 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2756       break;
2757 
2758     case UNARY_PLUS_EXPR:
2759       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2760       if (op0 == error_mark_node)
2761 	x = error_mark_node;
2762       else
2763 	x = fold_convert (TREE_TYPE (x), op0);
2764       break;
2765 
2766     case POSTDECREMENT_EXPR:
2767     case POSTINCREMENT_EXPR:
2768     case INIT_EXPR:
2769     case PREDECREMENT_EXPR:
2770     case PREINCREMENT_EXPR:
2771     case COMPOUND_EXPR:
2772     case MODIFY_EXPR:
2773       rval_ops = false;
2774       /* FALLTHRU */
2775     case POINTER_PLUS_EXPR:
2776     case PLUS_EXPR:
2777     case POINTER_DIFF_EXPR:
2778     case MINUS_EXPR:
2779     case MULT_EXPR:
2780     case TRUNC_DIV_EXPR:
2781     case CEIL_DIV_EXPR:
2782     case FLOOR_DIV_EXPR:
2783     case ROUND_DIV_EXPR:
2784     case TRUNC_MOD_EXPR:
2785     case CEIL_MOD_EXPR:
2786     case ROUND_MOD_EXPR:
2787     case RDIV_EXPR:
2788     case EXACT_DIV_EXPR:
2789     case MIN_EXPR:
2790     case MAX_EXPR:
2791     case LSHIFT_EXPR:
2792     case RSHIFT_EXPR:
2793     case LROTATE_EXPR:
2794     case RROTATE_EXPR:
2795     case BIT_AND_EXPR:
2796     case BIT_IOR_EXPR:
2797     case BIT_XOR_EXPR:
2798     case TRUTH_AND_EXPR:
2799     case TRUTH_ANDIF_EXPR:
2800     case TRUTH_OR_EXPR:
2801     case TRUTH_ORIF_EXPR:
2802     case TRUTH_XOR_EXPR:
2803     case LT_EXPR: case LE_EXPR:
2804     case GT_EXPR: case GE_EXPR:
2805     case EQ_EXPR: case NE_EXPR:
2806     case UNORDERED_EXPR: case ORDERED_EXPR:
2807     case UNLT_EXPR: case UNLE_EXPR:
2808     case UNGT_EXPR: case UNGE_EXPR:
2809     case UNEQ_EXPR: case LTGT_EXPR:
2810     case RANGE_EXPR: case COMPLEX_EXPR:
2811 
2812       loc = EXPR_LOCATION (x);
2813       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2814       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2815 
2816       /* decltype(nullptr) has only one value, so optimize away all comparisons
2817 	 with that type right away, keeping them in the IL causes troubles for
2818 	 various optimizations.  */
2819       if (COMPARISON_CLASS_P (org_x)
2820 	  && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2821 	  && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2822 	{
2823 	  switch (code)
2824 	    {
2825 	    case EQ_EXPR:
2826 	    case LE_EXPR:
2827 	    case GE_EXPR:
2828 	      x = constant_boolean_node (true, TREE_TYPE (x));
2829 	      break;
2830 	    case NE_EXPR:
2831 	    case LT_EXPR:
2832 	    case GT_EXPR:
2833 	      x = constant_boolean_node (false, TREE_TYPE (x));
2834 	      break;
2835 	    default:
2836 	      gcc_unreachable ();
2837 	    }
2838 	  return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2839 					op0, op1);
2840 	}
2841 
2842       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2843 	{
2844 	  if (op0 == error_mark_node || op1 == error_mark_node)
2845 	    x = error_mark_node;
2846 	  else
2847 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2848 	}
2849       else
2850 	x = fold (x);
2851 
2852       /* This is only needed for -Wnonnull-compare and only if
2853 	 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2854 	 generation, we do it always.  */
2855       if (COMPARISON_CLASS_P (org_x))
2856 	{
2857 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2858 	    ;
2859 	  else if (COMPARISON_CLASS_P (x))
2860 	    {
2861 	      if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2862 		TREE_NO_WARNING (x) = 1;
2863 	    }
2864 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2865 	     optimize those later on.  */
2866 	  else if (op0 != TREE_OPERAND (org_x, 0)
2867 		   || op1 != TREE_OPERAND (org_x, 1))
2868 	    {
2869 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2870 	      if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2871 		TREE_NO_WARNING (x) = 1;
2872 	    }
2873 	  else
2874 	    x = org_x;
2875 	}
2876 
2877       break;
2878 
2879     case VEC_COND_EXPR:
2880     case COND_EXPR:
2881       loc = EXPR_LOCATION (x);
2882       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2883       op1 = cp_fold (TREE_OPERAND (x, 1));
2884       op2 = cp_fold (TREE_OPERAND (x, 2));
2885 
2886       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2887 	{
2888 	  warning_sentinel s (warn_int_in_bool_context);
2889 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2890 	    op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2891 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2892 	    op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2893 	}
2894       else if (VOID_TYPE_P (TREE_TYPE (x)))
2895 	{
2896 	  if (TREE_CODE (op0) == INTEGER_CST)
2897 	    {
2898 	      /* If the condition is constant, fold can fold away
2899 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2900 		 have one of the branches NULL, avoid folding crash.  */
2901 	      if (!op1)
2902 		op1 = build_empty_stmt (loc);
2903 	      if (!op2)
2904 		op2 = build_empty_stmt (loc);
2905 	    }
2906 	  else
2907 	    {
2908 	      /* Otherwise, don't bother folding a void condition, since
2909 		 it can't produce a constant value.  */
2910 	      if (op0 != TREE_OPERAND (x, 0)
2911 		  || op1 != TREE_OPERAND (x, 1)
2912 		  || op2 != TREE_OPERAND (x, 2))
2913 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2914 	      break;
2915 	    }
2916 	}
2917 
2918       if (op0 != TREE_OPERAND (x, 0)
2919 	  || op1 != TREE_OPERAND (x, 1)
2920 	  || op2 != TREE_OPERAND (x, 2))
2921 	{
2922 	  if (op0 == error_mark_node
2923 	      || op1 == error_mark_node
2924 	      || op2 == error_mark_node)
2925 	    x = error_mark_node;
2926 	  else
2927 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2928 	}
2929       else
2930 	x = fold (x);
2931 
2932       /* A COND_EXPR might have incompatible types in branches if one or both
2933 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2934       if (TREE_CODE (x) != code
2935 	  && x != error_mark_node
2936 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2937 	x = fold_convert (TREE_TYPE (org_x), x);
2938 
2939       break;
2940 
2941     case CALL_EXPR:
2942       {
2943 	int i, m, sv = optimize, nw = sv, changed = 0;
2944 	tree callee = get_callee_fndecl (x);
2945 
2946 	/* Some built-in function calls will be evaluated at compile-time in
2947 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2948 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2949 	if (callee && fndecl_built_in_p (callee) && !optimize
2950 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2951 	    && current_function_decl
2952 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2953 	  nw = 1;
2954 
2955 	/* Defer folding __builtin_is_constant_evaluated.  */
2956 	if (callee
2957 	    && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2958 				  BUILT_IN_FRONTEND))
2959 	  break;
2960 
2961 	if (callee
2962 	    && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2963 				  BUILT_IN_FRONTEND))
2964 	  {
2965 	    x = fold_builtin_source_location (EXPR_LOCATION (x));
2966 	    break;
2967 	  }
2968 
2969 	x = copy_node (x);
2970 
2971 	m = call_expr_nargs (x);
2972 	for (i = 0; i < m; i++)
2973 	  {
2974 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2975 	    if (r != CALL_EXPR_ARG (x, i))
2976 	      {
2977 		if (r == error_mark_node)
2978 		  {
2979 		    x = error_mark_node;
2980 		    break;
2981 		  }
2982 		changed = 1;
2983 	      }
2984 	    CALL_EXPR_ARG (x, i) = r;
2985 	  }
2986 	if (x == error_mark_node)
2987 	  break;
2988 
2989 	optimize = nw;
2990 	r = fold (x);
2991 	optimize = sv;
2992 
2993 	if (TREE_CODE (r) != CALL_EXPR)
2994 	  {
2995 	    x = cp_fold (r);
2996 	    break;
2997 	  }
2998 
2999 	optimize = nw;
3000 
3001 	/* Invoke maybe_constant_value for functions declared
3002 	   constexpr and not called with AGGR_INIT_EXPRs.
3003 	   TODO:
3004 	   Do constexpr expansion of expressions where the call itself is not
3005 	   constant, but the call followed by an INDIRECT_REF is.  */
3006 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3007 	    && !flag_no_inline)
3008 	  r = maybe_constant_value (x);
3009 	optimize = sv;
3010 
3011         if (TREE_CODE (r) != CALL_EXPR)
3012 	  {
3013 	    if (DECL_CONSTRUCTOR_P (callee))
3014 	      {
3015 		loc = EXPR_LOCATION (x);
3016 		tree s = build_fold_indirect_ref_loc (loc,
3017 						      CALL_EXPR_ARG (x, 0));
3018 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
3019 	      }
3020 	    x = r;
3021 	    break;
3022 	  }
3023 
3024 	if (!changed)
3025 	  x = org_x;
3026 	break;
3027       }
3028 
3029     case CONSTRUCTOR:
3030       {
3031 	unsigned i;
3032 	constructor_elt *p;
3033 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3034 	vec<constructor_elt, va_gc> *nelts = NULL;
3035 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3036 	  {
3037 	    tree op = cp_fold (p->value);
3038 	    if (op != p->value)
3039 	      {
3040 		if (op == error_mark_node)
3041 		  {
3042 		    x = error_mark_node;
3043 		    vec_free (nelts);
3044 		    break;
3045 		  }
3046 		if (nelts == NULL)
3047 		  nelts = elts->copy ();
3048 		(*nelts)[i].value = op;
3049 	      }
3050 	  }
3051 	if (nelts)
3052 	  {
3053 	    x = build_constructor (TREE_TYPE (x), nelts);
3054 	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3055 	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3056 	  }
3057 	if (VECTOR_TYPE_P (TREE_TYPE (x)))
3058 	  x = fold (x);
3059 	break;
3060       }
3061     case TREE_VEC:
3062       {
3063 	bool changed = false;
3064 	releasing_vec vec;
3065 	int i, n = TREE_VEC_LENGTH (x);
3066 	vec_safe_reserve (vec, n);
3067 
3068 	for (i = 0; i < n; i++)
3069 	  {
3070 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
3071 	    vec->quick_push (op);
3072 	    if (op != TREE_VEC_ELT (x, i))
3073 	      changed = true;
3074 	  }
3075 
3076 	if (changed)
3077 	  {
3078 	    r = copy_node (x);
3079 	    for (i = 0; i < n; i++)
3080 	      TREE_VEC_ELT (r, i) = (*vec)[i];
3081 	    x = r;
3082 	  }
3083       }
3084 
3085       break;
3086 
3087     case ARRAY_REF:
3088     case ARRAY_RANGE_REF:
3089 
3090       loc = EXPR_LOCATION (x);
3091       op0 = cp_fold (TREE_OPERAND (x, 0));
3092       op1 = cp_fold (TREE_OPERAND (x, 1));
3093       op2 = cp_fold (TREE_OPERAND (x, 2));
3094       op3 = cp_fold (TREE_OPERAND (x, 3));
3095 
3096       if (op0 != TREE_OPERAND (x, 0)
3097 	  || op1 != TREE_OPERAND (x, 1)
3098 	  || op2 != TREE_OPERAND (x, 2)
3099 	  || op3 != TREE_OPERAND (x, 3))
3100 	{
3101 	  if (op0 == error_mark_node
3102 	      || op1 == error_mark_node
3103 	      || op2 == error_mark_node
3104 	      || op3 == error_mark_node)
3105 	    x = error_mark_node;
3106 	  else
3107 	    {
3108 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3109 	      TREE_READONLY (x) = TREE_READONLY (org_x);
3110 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3111 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3112 	    }
3113 	}
3114 
3115       x = fold (x);
3116       break;
3117 
3118     case SAVE_EXPR:
3119       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3120 	 folding, evaluates to an invariant.  In that case no need to wrap
3121 	 this folded tree with a SAVE_EXPR.  */
3122       r = cp_fold (TREE_OPERAND (x, 0));
3123       if (tree_invariant_p (r))
3124 	x = r;
3125       break;
3126 
3127     default:
3128       return org_x;
3129     }
3130 
3131   if (EXPR_P (x) && TREE_CODE (x) == code)
3132     {
3133       TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3134       TREE_NO_WARNING (x) = TREE_NO_WARNING (org_x);
3135     }
3136 
3137   fold_cache->put (org_x, x);
3138   /* Prevent that we try to fold an already folded result again.  */
3139   if (x != org_x)
3140     fold_cache->put (x, x);
3141 
3142   return x;
3143 }
3144 
3145 /* Look up either "hot" or "cold" in attribute list LIST.  */
3146 
3147 tree
lookup_hotness_attribute(tree list)3148 lookup_hotness_attribute (tree list)
3149 {
3150   for (; list; list = TREE_CHAIN (list))
3151     {
3152       tree name = get_attribute_name (list);
3153       if (is_attribute_p ("hot", name)
3154 	  || is_attribute_p ("cold", name)
3155 	  || is_attribute_p ("likely", name)
3156 	  || is_attribute_p ("unlikely", name))
3157 	break;
3158     }
3159   return list;
3160 }
3161 
3162 /* Remove both "hot" and "cold" attributes from LIST.  */
3163 
3164 static tree
remove_hotness_attribute(tree list)3165 remove_hotness_attribute (tree list)
3166 {
3167   list = remove_attribute ("hot", list);
3168   list = remove_attribute ("cold", list);
3169   list = remove_attribute ("likely", list);
3170   list = remove_attribute ("unlikely", list);
3171   return list;
3172 }
3173 
3174 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3175    PREDICT_EXPR.  */
3176 
3177 tree
process_stmt_hotness_attribute(tree std_attrs,location_t attrs_loc)3178 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3179 {
3180   if (std_attrs == error_mark_node)
3181     return std_attrs;
3182   if (tree attr = lookup_hotness_attribute (std_attrs))
3183     {
3184       tree name = get_attribute_name (attr);
3185       bool hot = (is_attribute_p ("hot", name)
3186 		  || is_attribute_p ("likely", name));
3187       tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3188 				      hot ? TAKEN : NOT_TAKEN);
3189       SET_EXPR_LOCATION (pred, attrs_loc);
3190       add_stmt (pred);
3191       if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3192 	warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3193 		 get_attribute_name (other), name);
3194       std_attrs = remove_hotness_attribute (std_attrs);
3195     }
3196   return std_attrs;
3197 }
3198 
3199 /* Helper of fold_builtin_source_location, return the
3200    std::source_location::__impl type after performing verification
3201    on it.  LOC is used for reporting any errors.  */
3202 
3203 static tree
get_source_location_impl_type(location_t loc)3204 get_source_location_impl_type (location_t loc)
3205 {
3206   tree name = get_identifier ("source_location");
3207   tree decl = lookup_qualified_name (std_node, name);
3208   if (TREE_CODE (decl) != TYPE_DECL)
3209     {
3210       auto_diagnostic_group d;
3211       if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3212 	qualified_name_lookup_error (std_node, name, decl, loc);
3213       else
3214 	error_at (loc, "%qD is not a type", decl);
3215       return error_mark_node;
3216     }
3217   name = get_identifier ("__impl");
3218   tree type = TREE_TYPE (decl);
3219   decl = lookup_qualified_name (type, name);
3220   if (TREE_CODE (decl) != TYPE_DECL)
3221     {
3222       auto_diagnostic_group d;
3223       if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3224 	qualified_name_lookup_error (type, name, decl, loc);
3225       else
3226 	error_at (loc, "%qD is not a type", decl);
3227       return error_mark_node;
3228     }
3229   type = TREE_TYPE (decl);
3230   if (TREE_CODE (type) != RECORD_TYPE)
3231     {
3232       error_at (loc, "%qD is not a class type", decl);
3233       return error_mark_node;
3234     }
3235 
3236   int cnt = 0;
3237   for (tree field = TYPE_FIELDS (type);
3238        (field = next_initializable_field (field)) != NULL_TREE;
3239        field = DECL_CHAIN (field))
3240     {
3241       if (DECL_NAME (field) != NULL_TREE)
3242 	{
3243 	  const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3244 	  if (strcmp (n, "_M_file_name") == 0
3245 	      || strcmp (n, "_M_function_name") == 0)
3246 	    {
3247 	      if (TREE_TYPE (field) != const_string_type_node)
3248 		{
3249 		  error_at (loc, "%qD does not have %<const char *%> type",
3250 			    field);
3251 		  return error_mark_node;
3252 		}
3253 	      cnt++;
3254 	      continue;
3255 	    }
3256 	  else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3257 	    {
3258 	      if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3259 		{
3260 		  error_at (loc, "%qD does not have integral type", field);
3261 		  return error_mark_node;
3262 		}
3263 	      cnt++;
3264 	      continue;
3265 	    }
3266 	}
3267       cnt = 0;
3268       break;
3269     }
3270   if (cnt != 4)
3271     {
3272       error_at (loc, "%<std::source_location::__impl%> does not contain only "
3273 		     "non-static data members %<_M_file_name%>, "
3274 		     "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3275       return error_mark_node;
3276     }
3277   return build_qualified_type (type, TYPE_QUAL_CONST);
3278 }
3279 
3280 /* Type for source_location_table hash_set.  */
3281 struct GTY((for_user)) source_location_table_entry {
3282   location_t loc;
3283   unsigned uid;
3284   tree var;
3285 };
3286 
3287 /* Traits class for function start hash maps below.  */
3288 
3289 struct source_location_table_entry_hash
3290   : ggc_remove <source_location_table_entry>
3291 {
3292   typedef source_location_table_entry value_type;
3293   typedef source_location_table_entry compare_type;
3294 
3295   static hashval_t
hashsource_location_table_entry_hash3296   hash (const source_location_table_entry &ref)
3297   {
3298     inchash::hash hstate (0);
3299     hstate.add_int (ref.loc);
3300     hstate.add_int (ref.uid);
3301     return hstate.end ();
3302   }
3303 
3304   static bool
equalsource_location_table_entry_hash3305   equal (const source_location_table_entry &ref1,
3306 	 const source_location_table_entry &ref2)
3307   {
3308     return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3309   }
3310 
3311   static void
mark_deletedsource_location_table_entry_hash3312   mark_deleted (source_location_table_entry &ref)
3313   {
3314     ref.loc = UNKNOWN_LOCATION;
3315     ref.uid = -1U;
3316     ref.var = NULL_TREE;
3317   }
3318 
3319   static const bool empty_zero_p = true;
3320 
3321   static void
mark_emptysource_location_table_entry_hash3322   mark_empty (source_location_table_entry &ref)
3323   {
3324     ref.loc = UNKNOWN_LOCATION;
3325     ref.uid = 0;
3326     ref.var = NULL_TREE;
3327   }
3328 
3329   static bool
is_deletedsource_location_table_entry_hash3330   is_deleted (const source_location_table_entry &ref)
3331   {
3332     return (ref.loc == UNKNOWN_LOCATION
3333 	    && ref.uid == -1U
3334 	    && ref.var == NULL_TREE);
3335   }
3336 
3337   static bool
is_emptysource_location_table_entry_hash3338   is_empty (const source_location_table_entry &ref)
3339   {
3340     return (ref.loc == UNKNOWN_LOCATION
3341 	    && ref.uid == 0
3342 	    && ref.var == NULL_TREE);
3343   }
3344 };
3345 
3346 static GTY(()) hash_table <source_location_table_entry_hash>
3347   *source_location_table;
3348 static GTY(()) unsigned int source_location_id;
3349 
3350 /* Fold __builtin_source_location () call.  LOC is the location
3351    of the call.  */
3352 
3353 tree
fold_builtin_source_location(location_t loc)3354 fold_builtin_source_location (location_t loc)
3355 {
3356   if (source_location_impl == NULL_TREE)
3357     {
3358       auto_diagnostic_group d;
3359       source_location_impl = get_source_location_impl_type (loc);
3360       if (source_location_impl == error_mark_node)
3361 	inform (loc, "evaluating %qs", "__builtin_source_location");
3362     }
3363   if (source_location_impl == error_mark_node)
3364     return build_zero_cst (const_ptr_type_node);
3365   if (source_location_table == NULL)
3366     source_location_table
3367       = hash_table <source_location_table_entry_hash>::create_ggc (64);
3368   const line_map_ordinary *map;
3369   source_location_table_entry entry;
3370   entry.loc
3371     = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3372 				&map);
3373   entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3374   entry.var = error_mark_node;
3375   source_location_table_entry *entryp
3376     = source_location_table->find_slot (entry, INSERT);
3377   tree var;
3378   if (entryp->var)
3379     var = entryp->var;
3380   else
3381     {
3382       char tmp_name[32];
3383       ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3384       var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3385 			source_location_impl);
3386       TREE_STATIC (var) = 1;
3387       TREE_PUBLIC (var) = 0;
3388       DECL_ARTIFICIAL (var) = 1;
3389       DECL_IGNORED_P (var) = 1;
3390       DECL_EXTERNAL (var) = 0;
3391       DECL_DECLARED_CONSTEXPR_P (var) = 1;
3392       DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3393       layout_decl (var, 0);
3394 
3395       vec<constructor_elt, va_gc> *v = NULL;
3396       vec_alloc (v, 4);
3397       for (tree field = TYPE_FIELDS (source_location_impl);
3398 	   (field = next_initializable_field (field)) != NULL_TREE;
3399 	   field = DECL_CHAIN (field))
3400 	{
3401 	  const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3402 	  tree val = NULL_TREE;
3403 	  if (strcmp (n, "_M_file_name") == 0)
3404 	    {
3405 	      if (const char *fname = LOCATION_FILE (loc))
3406 		{
3407 		  fname = remap_macro_filename (fname);
3408 		  val = build_string_literal (strlen (fname) + 1, fname);
3409 		}
3410 	      else
3411 		val = build_string_literal (1, "");
3412 	    }
3413 	  else if (strcmp (n, "_M_function_name") == 0)
3414 	    {
3415 	      const char *name = "";
3416 
3417 	      if (current_function_decl)
3418 		name = cxx_printable_name (current_function_decl, 0);
3419 
3420 	      val = build_string_literal (strlen (name) + 1, name);
3421 	    }
3422 	  else if (strcmp (n, "_M_line") == 0)
3423 	    val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3424 	  else if (strcmp (n, "_M_column") == 0)
3425 	    val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3426 	  else
3427 	    gcc_unreachable ();
3428 	  CONSTRUCTOR_APPEND_ELT (v, field, val);
3429 	}
3430 
3431       tree ctor = build_constructor (source_location_impl, v);
3432       TREE_CONSTANT (ctor) = 1;
3433       TREE_STATIC (ctor) = 1;
3434       DECL_INITIAL (var) = ctor;
3435       varpool_node::finalize_decl (var);
3436       *entryp = entry;
3437       entryp->var = var;
3438     }
3439 
3440   return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3441 }
3442 
3443 #include "gt-cp-cp-gimplify.h"
3444