xref: /openbsd-src/gnu/usr.bin/gcc/gcc/predict.c (revision c87b03e512fc05ed6e0222f6fb0ae86264b1d05b)
1 /* Branch prediction routines for the GNU compiler.
2    Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING.  If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA.  */
20 
21 /* References:
22 
23    [1] "Branch Prediction for Free"
24        Ball and Larus; PLDI '93.
25    [2] "Static Branch Frequency and Program Profile Analysis"
26        Wu and Larus; MICRO-27.
27    [3] "Corpus-based Static Branch Prediction"
28        Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95.  */
29 
30 
31 #include "config.h"
32 #include "system.h"
33 #include "tree.h"
34 #include "rtl.h"
35 #include "tm_p.h"
36 #include "hard-reg-set.h"
37 #include "basic-block.h"
38 #include "insn-config.h"
39 #include "regs.h"
40 #include "flags.h"
41 #include "output.h"
42 #include "function.h"
43 #include "except.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "expr.h"
47 #include "predict.h"
48 #include "profile.h"
49 #include "real.h"
50 #include "params.h"
51 #include "target.h"
52 #include "loop.h"
53 
54 /* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
55 		   1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX.  */
56 static REAL_VALUE_TYPE real_zero, real_one, real_almost_one, real_br_prob_base,
57 		       real_inv_br_prob_base, real_one_half, real_bb_freq_max;
58 
59 /* Random guesstimation given names.  */
60 #define PROB_VERY_UNLIKELY	(REG_BR_PROB_BASE / 10 - 1)
61 #define PROB_EVEN		(REG_BR_PROB_BASE / 2)
62 #define PROB_VERY_LIKELY	(REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
63 #define PROB_ALWAYS		(REG_BR_PROB_BASE)
64 
65 static bool predicted_by_p		 PARAMS ((basic_block,
66 						  enum br_predictor));
67 static void combine_predictions_for_insn PARAMS ((rtx, basic_block));
68 static void dump_prediction		 PARAMS ((enum br_predictor, int,
69 						  basic_block, int));
70 static void estimate_loops_at_level	 PARAMS ((struct loop *loop));
71 static void propagate_freq		 PARAMS ((struct loop *));
72 static void estimate_bb_frequencies	 PARAMS ((struct loops *));
73 static void counts_to_freqs		 PARAMS ((void));
74 static void process_note_predictions	 PARAMS ((basic_block, int *,
75 						  dominance_info,
76 						  dominance_info));
77 static void process_note_prediction	 PARAMS ((basic_block, int *,
78 						  dominance_info,
79 						  dominance_info, int, int));
80 static bool last_basic_block_p           PARAMS ((basic_block));
81 static void compute_function_frequency	 PARAMS ((void));
82 static void choose_function_section	 PARAMS ((void));
83 static bool can_predict_insn_p		 PARAMS ((rtx));
84 
85 /* Information we hold about each branch predictor.
86    Filled using information from predict.def.  */
87 
88 struct predictor_info
89 {
90   const char *const name;	/* Name used in the debugging dumps.  */
91   const int hitrate;		/* Expected hitrate used by
92 				   predict_insn_def call.  */
93   const int flags;
94 };
95 
96 /* Use given predictor without Dempster-Shaffer theory if it matches
97    using first_match heuristics.  */
98 #define PRED_FLAG_FIRST_MATCH 1
99 
100 /* Recompute hitrate in percent to our representation.  */
101 
102 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
103 
104 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
105 static const struct predictor_info predictor_info[]= {
106 #include "predict.def"
107 
108   /* Upper bound on predictors.  */
109   {NULL, 0, 0}
110 };
111 #undef DEF_PREDICTOR
112 
113 /* Return true in case BB can be CPU intensive and should be optimized
114    for maximal perofmrance.  */
115 
116 bool
maybe_hot_bb_p(bb)117 maybe_hot_bb_p (bb)
118      basic_block bb;
119 {
120   if (profile_info.count_profiles_merged
121       && flag_branch_probabilities
122       && (bb->count
123 	  < profile_info.max_counter_in_program
124 	  / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
125     return false;
126   if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
127     return false;
128   return true;
129 }
130 
131 /* Return true in case BB is cold and should be optimized for size.  */
132 
133 bool
probably_cold_bb_p(bb)134 probably_cold_bb_p (bb)
135      basic_block bb;
136 {
137   if (profile_info.count_profiles_merged
138       && flag_branch_probabilities
139       && (bb->count
140 	  < profile_info.max_counter_in_program
141 	  / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
142     return true;
143   if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
144     return true;
145   return false;
146 }
147 
148 /* Return true in case BB is probably never executed.  */
149 bool
probably_never_executed_bb_p(bb)150 probably_never_executed_bb_p (bb)
151 	basic_block bb;
152 {
153   if (profile_info.count_profiles_merged
154       && flag_branch_probabilities)
155     return ((bb->count + profile_info.count_profiles_merged / 2)
156 	    / profile_info.count_profiles_merged) == 0;
157   return false;
158 }
159 
160 /* Return true if the one of outgoing edges is already predicted by
161    PREDICTOR.  */
162 
163 static bool
predicted_by_p(bb,predictor)164 predicted_by_p (bb, predictor)
165      basic_block bb;
166      enum br_predictor predictor;
167 {
168   rtx note;
169   if (!INSN_P (bb->end))
170     return false;
171   for (note = REG_NOTES (bb->end); note; note = XEXP (note, 1))
172     if (REG_NOTE_KIND (note) == REG_BR_PRED
173 	&& INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
174       return true;
175   return false;
176 }
177 
178 void
predict_insn(insn,predictor,probability)179 predict_insn (insn, predictor, probability)
180      rtx insn;
181      int probability;
182      enum br_predictor predictor;
183 {
184   if (!any_condjump_p (insn))
185     abort ();
186   if (!flag_guess_branch_prob)
187     return;
188 
189   REG_NOTES (insn)
190     = gen_rtx_EXPR_LIST (REG_BR_PRED,
191 			 gen_rtx_CONCAT (VOIDmode,
192 					 GEN_INT ((int) predictor),
193 					 GEN_INT ((int) probability)),
194 			 REG_NOTES (insn));
195 }
196 
197 /* Predict insn by given predictor.  */
198 
199 void
predict_insn_def(insn,predictor,taken)200 predict_insn_def (insn, predictor, taken)
201      rtx insn;
202      enum br_predictor predictor;
203      enum prediction taken;
204 {
205    int probability = predictor_info[(int) predictor].hitrate;
206 
207    if (taken != TAKEN)
208      probability = REG_BR_PROB_BASE - probability;
209 
210    predict_insn (insn, predictor, probability);
211 }
212 
213 /* Predict edge E with given probability if possible.  */
214 
215 void
predict_edge(e,predictor,probability)216 predict_edge (e, predictor, probability)
217      edge e;
218      int probability;
219      enum br_predictor predictor;
220 {
221   rtx last_insn;
222   last_insn = e->src->end;
223 
224   /* We can store the branch prediction information only about
225      conditional jumps.  */
226   if (!any_condjump_p (last_insn))
227     return;
228 
229   /* We always store probability of branching.  */
230   if (e->flags & EDGE_FALLTHRU)
231     probability = REG_BR_PROB_BASE - probability;
232 
233   predict_insn (last_insn, predictor, probability);
234 }
235 
236 /* Return true when we can store prediction on insn INSN.
237    At the moment we represent predictions only on conditional
238    jumps, not at computed jump or other complicated cases.  */
239 static bool
can_predict_insn_p(insn)240 can_predict_insn_p (insn)
241 	rtx insn;
242 {
243   return (GET_CODE (insn) == JUMP_INSN
244 	  && any_condjump_p (insn)
245 	  && BLOCK_FOR_INSN (insn)->succ->succ_next);
246 }
247 
248 /* Predict edge E by given predictor if possible.  */
249 
250 void
predict_edge_def(e,predictor,taken)251 predict_edge_def (e, predictor, taken)
252      edge e;
253      enum br_predictor predictor;
254      enum prediction taken;
255 {
256    int probability = predictor_info[(int) predictor].hitrate;
257 
258    if (taken != TAKEN)
259      probability = REG_BR_PROB_BASE - probability;
260 
261    predict_edge (e, predictor, probability);
262 }
263 
264 /* Invert all branch predictions or probability notes in the INSN.  This needs
265    to be done each time we invert the condition used by the jump.  */
266 
267 void
invert_br_probabilities(insn)268 invert_br_probabilities (insn)
269      rtx insn;
270 {
271   rtx note;
272 
273   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
274     if (REG_NOTE_KIND (note) == REG_BR_PROB)
275       XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
276     else if (REG_NOTE_KIND (note) == REG_BR_PRED)
277       XEXP (XEXP (note, 0), 1)
278 	= GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
279 }
280 
281 /* Dump information about the branch prediction to the output file.  */
282 
283 static void
dump_prediction(predictor,probability,bb,used)284 dump_prediction (predictor, probability, bb, used)
285      enum br_predictor predictor;
286      int probability;
287      basic_block bb;
288      int used;
289 {
290   edge e = bb->succ;
291 
292   if (!rtl_dump_file)
293     return;
294 
295   while (e && (e->flags & EDGE_FALLTHRU))
296     e = e->succ_next;
297 
298   fprintf (rtl_dump_file, "  %s heuristics%s: %.1f%%",
299 	   predictor_info[predictor].name,
300 	   used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
301 
302   if (bb->count)
303     {
304       fprintf (rtl_dump_file, "  exec ");
305       fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
306       if (e)
307 	{
308 	  fprintf (rtl_dump_file, " hit ");
309 	  fprintf (rtl_dump_file, HOST_WIDEST_INT_PRINT_DEC, e->count);
310 	  fprintf (rtl_dump_file, " (%.1f%%)", e->count * 100.0 / bb->count);
311 	}
312     }
313 
314   fprintf (rtl_dump_file, "\n");
315 }
316 
317 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
318    note if not already present.  Remove now useless REG_BR_PRED notes.  */
319 
320 static void
combine_predictions_for_insn(insn,bb)321 combine_predictions_for_insn (insn, bb)
322      rtx insn;
323      basic_block bb;
324 {
325   rtx prob_note = find_reg_note (insn, REG_BR_PROB, 0);
326   rtx *pnote = &REG_NOTES (insn);
327   rtx note;
328   int best_probability = PROB_EVEN;
329   int best_predictor = END_PREDICTORS;
330   int combined_probability = REG_BR_PROB_BASE / 2;
331   int d;
332   bool first_match = false;
333   bool found = false;
334 
335   if (rtl_dump_file)
336     fprintf (rtl_dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
337 	     bb->index);
338 
339   /* We implement "first match" heuristics and use probability guessed
340      by predictor with smallest index.  In the future we will use better
341      probability combination techniques.  */
342   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
343     if (REG_NOTE_KIND (note) == REG_BR_PRED)
344       {
345 	int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
346 	int probability = INTVAL (XEXP (XEXP (note, 0), 1));
347 
348 	found = true;
349 	if (best_predictor > predictor)
350 	  best_probability = probability, best_predictor = predictor;
351 
352 	d = (combined_probability * probability
353 	     + (REG_BR_PROB_BASE - combined_probability)
354 	     * (REG_BR_PROB_BASE - probability));
355 
356 	/* Use FP math to avoid overflows of 32bit integers.  */
357 	if (d == 0)
358 	  /* If one probability is 0% and one 100%, avoid division by zero.  */
359 	  combined_probability = REG_BR_PROB_BASE / 2;
360 	else
361 	  combined_probability = (((double) combined_probability) * probability
362 				  * REG_BR_PROB_BASE / d + 0.5);
363       }
364 
365   /* Decide which heuristic to use.  In case we didn't match anything,
366      use no_prediction heuristic, in case we did match, use either
367      first match or Dempster-Shaffer theory depending on the flags.  */
368 
369   if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
370     first_match = true;
371 
372   if (!found)
373     dump_prediction (PRED_NO_PREDICTION, combined_probability, bb, true);
374   else
375     {
376       dump_prediction (PRED_DS_THEORY, combined_probability, bb, !first_match);
377       dump_prediction (PRED_FIRST_MATCH, best_probability, bb, first_match);
378     }
379 
380   if (first_match)
381     combined_probability = best_probability;
382   dump_prediction (PRED_COMBINED, combined_probability, bb, true);
383 
384   while (*pnote)
385     {
386       if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
387 	{
388 	  int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
389 	  int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
390 
391 	  dump_prediction (predictor, probability, bb,
392 			   !first_match || best_predictor == predictor);
393 	  *pnote = XEXP (*pnote, 1);
394 	}
395       else
396 	pnote = &XEXP (*pnote, 1);
397     }
398 
399   if (!prob_note)
400     {
401       REG_NOTES (insn)
402 	= gen_rtx_EXPR_LIST (REG_BR_PROB,
403 			     GEN_INT (combined_probability), REG_NOTES (insn));
404 
405       /* Save the prediction into CFG in case we are seeing non-degenerated
406 	 conditional jump.  */
407       if (bb->succ->succ_next)
408 	{
409 	  BRANCH_EDGE (bb)->probability = combined_probability;
410 	  FALLTHRU_EDGE (bb)->probability
411 	    = REG_BR_PROB_BASE - combined_probability;
412 	}
413     }
414 }
415 
416 /* Statically estimate the probability that a branch will be taken.
417    ??? In the next revision there will be a number of other predictors added
418    from the above references. Further, each heuristic will be factored out
419    into its own function for clarity (and to facilitate the combination of
420    predictions).  */
421 
422 void
estimate_probability(loops_info)423 estimate_probability (loops_info)
424      struct loops *loops_info;
425 {
426   dominance_info dominators, post_dominators;
427   basic_block bb;
428   int i;
429 
430   connect_infinite_loops_to_exit ();
431   dominators = calculate_dominance_info (CDI_DOMINATORS);
432   post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
433 
434   /* Try to predict out blocks in a loop that are not part of a
435      natural loop.  */
436   for (i = 1; i < loops_info->num; i++)
437     {
438       basic_block bb, *bbs;
439       int j;
440       int exits;
441       struct loop *loop = loops_info->parray[i];
442 
443       flow_loop_scan (loops_info, loop, LOOP_EXIT_EDGES);
444       exits = loop->num_exits;
445 
446       bbs = get_loop_body (loop);
447       for (j = 0; j < loop->num_nodes; j++)
448 	{
449 	  int header_found = 0;
450 	  edge e;
451 
452 	  bb = bbs[j];
453 
454 	  /* Bypass loop heuristics on continue statement.  These
455 	     statements construct loops via "non-loop" constructs
456 	     in the source language and are better to be handled
457 	     separately.  */
458 	  if (!can_predict_insn_p (bb->end)
459 	      || predicted_by_p (bb, PRED_CONTINUE))
460 	    continue;
461 
462 	  /* Loop branch heuristics - predict an edge back to a
463 	     loop's head as taken.  */
464 	  for (e = bb->succ; e; e = e->succ_next)
465 	    if (e->dest == loop->header
466 		&& e->src == loop->latch)
467 	      {
468 		header_found = 1;
469 		predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
470 	      }
471 
472 	  /* Loop exit heuristics - predict an edge exiting the loop if the
473 	     conditinal has no loop header successors as not taken.  */
474 	  if (!header_found)
475 	    for (e = bb->succ; e; e = e->succ_next)
476 	      if (e->dest->index < 0
477 		  || !flow_bb_inside_loop_p (loop, e->dest))
478 		predict_edge
479 		  (e, PRED_LOOP_EXIT,
480 		   (REG_BR_PROB_BASE
481 		    - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
482 		   / exits);
483 	}
484     }
485 
486   /* Attempt to predict conditional jumps using a number of heuristics.  */
487   FOR_EACH_BB (bb)
488     {
489       rtx last_insn = bb->end;
490       rtx cond, earliest;
491       edge e;
492 
493       if (! can_predict_insn_p (last_insn))
494 	continue;
495 
496       for (e = bb->succ; e; e = e->succ_next)
497 	{
498 	  /* Predict early returns to be probable, as we've already taken
499 	     care for error returns and other are often used for fast paths
500 	     trought function.  */
501 	  if ((e->dest == EXIT_BLOCK_PTR
502 	       || (e->dest->succ && !e->dest->succ->succ_next
503 		   && e->dest->succ->dest == EXIT_BLOCK_PTR))
504 	       && !predicted_by_p (bb, PRED_NULL_RETURN)
505 	       && !predicted_by_p (bb, PRED_CONST_RETURN)
506 	       && !predicted_by_p (bb, PRED_NEGATIVE_RETURN)
507 	       && !last_basic_block_p (e->dest))
508 	    predict_edge_def (e, PRED_EARLY_RETURN, TAKEN);
509 
510 	  /* Look for block we are guarding (ie we dominate it,
511 	     but it doesn't postdominate us).  */
512 	  if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
513 	      && dominated_by_p (dominators, e->dest, e->src)
514 	      && !dominated_by_p (post_dominators, e->src, e->dest))
515 	    {
516 	      rtx insn;
517 
518 	      /* The call heuristic claims that a guarded function call
519 		 is improbable.  This is because such calls are often used
520 		 to signal exceptional situations such as printing error
521 		 messages.  */
522 	      for (insn = e->dest->head; insn != NEXT_INSN (e->dest->end);
523 		   insn = NEXT_INSN (insn))
524 		if (GET_CODE (insn) == CALL_INSN
525 		    /* Constant and pure calls are hardly used to signalize
526 		       something exceptional.  */
527 		    && ! CONST_OR_PURE_CALL_P (insn))
528 		  {
529 		    predict_edge_def (e, PRED_CALL, NOT_TAKEN);
530 		    break;
531 		  }
532 	    }
533 	}
534 
535       cond = get_condition (last_insn, &earliest);
536       if (! cond)
537 	continue;
538 
539       /* Try "pointer heuristic."
540 	 A comparison ptr == 0 is predicted as false.
541 	 Similarly, a comparison ptr1 == ptr2 is predicted as false.  */
542       if (GET_RTX_CLASS (GET_CODE (cond)) == '<'
543 	  && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
544 	      || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
545 	{
546 	  if (GET_CODE (cond) == EQ)
547 	    predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
548 	  else if (GET_CODE (cond) == NE)
549 	    predict_insn_def (last_insn, PRED_POINTER, TAKEN);
550 	}
551       else
552 
553       /* Try "opcode heuristic."
554 	 EQ tests are usually false and NE tests are usually true. Also,
555 	 most quantities are positive, so we can make the appropriate guesses
556 	 about signed comparisons against zero.  */
557 	switch (GET_CODE (cond))
558 	  {
559 	  case CONST_INT:
560 	    /* Unconditional branch.  */
561 	    predict_insn_def (last_insn, PRED_UNCONDITIONAL,
562 			      cond == const0_rtx ? NOT_TAKEN : TAKEN);
563 	    break;
564 
565 	  case EQ:
566 	  case UNEQ:
567 	    /* Floating point comparisons appears to behave in a very
568 	       inpredictable way because of special role of = tests in
569 	       FP code.  */
570 	    if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
571 	      ;
572 	    /* Comparisons with 0 are often used for booleans and there is
573 	       nothing usefull to predict about them.  */
574 	    else if (XEXP (cond, 1) == const0_rtx
575 		     || XEXP (cond, 0) == const0_rtx)
576 	      ;
577 	    else
578 	      predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
579 	    break;
580 
581 	  case NE:
582 	  case LTGT:
583 	    /* Floating point comparisons appears to behave in a very
584 	       inpredictable way because of special role of = tests in
585 	       FP code.  */
586 	    if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
587 	      ;
588 	    /* Comparisons with 0 are often used for booleans and there is
589 	       nothing usefull to predict about them.  */
590 	    else if (XEXP (cond, 1) == const0_rtx
591 		     || XEXP (cond, 0) == const0_rtx)
592 	      ;
593 	    else
594 	      predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
595 	    break;
596 
597 	  case ORDERED:
598 	    predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
599 	    break;
600 
601 	  case UNORDERED:
602 	    predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
603 	    break;
604 
605 	  case LE:
606 	  case LT:
607 	    if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
608 		|| XEXP (cond, 1) == constm1_rtx)
609 	      predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
610 	    break;
611 
612 	  case GE:
613 	  case GT:
614 	    if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
615 		|| XEXP (cond, 1) == constm1_rtx)
616 	      predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
617 	    break;
618 
619 	  default:
620 	    break;
621 	  }
622     }
623 
624   /* Attach the combined probability to each conditional jump.  */
625   FOR_EACH_BB (bb)
626     if (GET_CODE (bb->end) == JUMP_INSN
627 	&& any_condjump_p (bb->end)
628 	&& bb->succ->succ_next != NULL)
629       combine_predictions_for_insn (bb->end, bb);
630 
631   free_dominance_info (post_dominators);
632   free_dominance_info (dominators);
633 
634   remove_fake_edges ();
635   estimate_bb_frequencies (loops_info);
636 }
637 
638 /* __builtin_expect dropped tokens into the insn stream describing expected
639    values of registers.  Generate branch probabilities based off these
640    values.  */
641 
642 void
expected_value_to_br_prob()643 expected_value_to_br_prob ()
644 {
645   rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
646 
647   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
648     {
649       switch (GET_CODE (insn))
650 	{
651 	case NOTE:
652 	  /* Look for expected value notes.  */
653 	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
654 	    {
655 	      ev = NOTE_EXPECTED_VALUE (insn);
656 	      ev_reg = XEXP (ev, 0);
657 	      delete_insn (insn);
658 	    }
659 	  continue;
660 
661 	case CODE_LABEL:
662 	  /* Never propagate across labels.  */
663 	  ev = NULL_RTX;
664 	  continue;
665 
666 	case JUMP_INSN:
667 	  /* Look for simple conditional branches.  If we haven't got an
668 	     expected value yet, no point going further.  */
669 	  if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
670 	      || ! any_condjump_p (insn))
671 	    continue;
672 	  break;
673 
674 	default:
675 	  /* Look for insns that clobber the EV register.  */
676 	  if (ev && reg_set_p (ev_reg, insn))
677 	    ev = NULL_RTX;
678 	  continue;
679 	}
680 
681       /* Collect the branch condition, hopefully relative to EV_REG.  */
682       /* ???  At present we'll miss things like
683 		(expected_value (eq r70 0))
684 		(set r71 -1)
685 		(set r80 (lt r70 r71))
686 		(set pc (if_then_else (ne r80 0) ...))
687 	 as canonicalize_condition will render this to us as
688 		(lt r70, r71)
689 	 Could use cselib to try and reduce this further.  */
690       cond = XEXP (SET_SRC (pc_set (insn)), 0);
691       cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg);
692       if (! cond || XEXP (cond, 0) != ev_reg
693 	  || GET_CODE (XEXP (cond, 1)) != CONST_INT)
694 	continue;
695 
696       /* Substitute and simplify.  Given that the expression we're
697 	 building involves two constants, we should wind up with either
698 	 true or false.  */
699       cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
700 			     XEXP (ev, 1), XEXP (cond, 1));
701       cond = simplify_rtx (cond);
702 
703       /* Turn the condition into a scaled branch probability.  */
704       if (cond != const_true_rtx && cond != const0_rtx)
705 	abort ();
706       predict_insn_def (insn, PRED_BUILTIN_EXPECT,
707 		        cond == const_true_rtx ? TAKEN : NOT_TAKEN);
708     }
709 }
710 
711 /* Check whether this is the last basic block of function.  Commonly tehre
712    is one extra common cleanup block.  */
713 static bool
last_basic_block_p(bb)714 last_basic_block_p (bb)
715      basic_block bb;
716 {
717   if (bb == EXIT_BLOCK_PTR)
718     return false;
719 
720   return (bb->next_bb == EXIT_BLOCK_PTR
721 	  || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
722 	      && bb->succ && !bb->succ->succ_next
723 	      && bb->succ->dest->next_bb == EXIT_BLOCK_PTR));
724 }
725 
726 /* Sets branch probabilities according to PREDiction and FLAGS. HEADS[bb->index]
727    should be index of basic block in that we need to alter branch predictions
728    (i.e. the first of our dominators such that we do not post-dominate it)
729    (but we fill this information on demand, so -1 may be there in case this
730    was not needed yet).  */
731 
732 static void
process_note_prediction(bb,heads,dominators,post_dominators,pred,flags)733 process_note_prediction (bb, heads, dominators, post_dominators, pred, flags)
734      basic_block bb;
735      int *heads;
736      dominance_info dominators;
737      dominance_info post_dominators;
738      int pred;
739      int flags;
740 {
741   edge e;
742   int y;
743   bool taken;
744 
745   taken = flags & IS_TAKEN;
746 
747   if (heads[bb->index] < 0)
748     {
749       /* This is first time we need this field in heads array; so
750          find first dominator that we do not post-dominate (we are
751          using already known members of heads array).  */
752       basic_block ai = bb;
753       basic_block next_ai = get_immediate_dominator (dominators, bb);
754       int head;
755 
756       while (heads[next_ai->index] < 0)
757 	{
758 	  if (!dominated_by_p (post_dominators, next_ai, bb))
759 	    break;
760 	  heads[next_ai->index] = ai->index;
761 	  ai = next_ai;
762 	  next_ai = get_immediate_dominator (dominators, next_ai);
763 	}
764       if (!dominated_by_p (post_dominators, next_ai, bb))
765 	head = next_ai->index;
766       else
767 	head = heads[next_ai->index];
768       while (next_ai != bb)
769 	{
770 	  next_ai = ai;
771 	  if (heads[ai->index] == ENTRY_BLOCK)
772 	    ai = ENTRY_BLOCK_PTR;
773 	  else
774 	    ai = BASIC_BLOCK (heads[ai->index]);
775 	  heads[next_ai->index] = head;
776 	}
777     }
778   y = heads[bb->index];
779 
780   /* Now find the edge that leads to our branch and aply the prediction.  */
781 
782   if (y == last_basic_block || !can_predict_insn_p (BASIC_BLOCK (y)->end))
783     return;
784   for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
785     if (e->dest->index >= 0
786 	&& dominated_by_p (post_dominators, e->dest, bb))
787       predict_edge_def (e, pred, taken);
788 }
789 
790 /* Gathers NOTE_INSN_PREDICTIONs in given basic block and turns them
791    into branch probabilities.  For description of heads array, see
792    process_note_prediction.  */
793 
794 static void
process_note_predictions(bb,heads,dominators,post_dominators)795 process_note_predictions (bb, heads, dominators, post_dominators)
796      basic_block bb;
797      int *heads;
798      dominance_info dominators;
799      dominance_info post_dominators;
800 {
801   rtx insn;
802   edge e;
803 
804   /* Additionaly, we check here for blocks with no successors.  */
805   int contained_noreturn_call = 0;
806   int was_bb_head = 0;
807   int noreturn_block = 1;
808 
809   for (insn = bb->end; insn;
810        was_bb_head |= (insn == bb->head), insn = PREV_INSN (insn))
811     {
812       if (GET_CODE (insn) != NOTE)
813 	{
814 	  if (was_bb_head)
815 	    break;
816 	  else
817 	    {
818 	      /* Noreturn calls cause program to exit, therefore they are
819 	         always predicted as not taken.  */
820 	      if (GET_CODE (insn) == CALL_INSN
821 		  && find_reg_note (insn, REG_NORETURN, NULL))
822 		contained_noreturn_call = 1;
823 	      continue;
824 	    }
825 	}
826       if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
827 	{
828 	  int alg = (int) NOTE_PREDICTION_ALG (insn);
829 	  /* Process single prediction note.  */
830 	  process_note_prediction (bb,
831 				   heads,
832 				   dominators,
833 				   post_dominators,
834 				   alg, (int) NOTE_PREDICTION_FLAGS (insn));
835 	  delete_insn (insn);
836 	}
837     }
838   for (e = bb->succ; e; e = e->succ_next)
839     if (!(e->flags & EDGE_FAKE))
840       noreturn_block = 0;
841   if (contained_noreturn_call)
842     {
843       /* This block ended from other reasons than because of return.
844          If it is because of noreturn call, this should certainly not
845          be taken.  Otherwise it is probably some error recovery.  */
846       process_note_prediction (bb,
847 			       heads,
848 			       dominators,
849 			       post_dominators, PRED_NORETURN, NOT_TAKEN);
850     }
851 }
852 
853 /* Gathers NOTE_INSN_PREDICTIONs and turns them into
854    branch probabilities.  */
855 
856 void
note_prediction_to_br_prob()857 note_prediction_to_br_prob ()
858 {
859   basic_block bb;
860   dominance_info post_dominators, dominators;
861   int *heads;
862 
863   /* To enable handling of noreturn blocks.  */
864   add_noreturn_fake_exit_edges ();
865   connect_infinite_loops_to_exit ();
866 
867   post_dominators = calculate_dominance_info (CDI_POST_DOMINATORS);
868   dominators = calculate_dominance_info (CDI_DOMINATORS);
869 
870   heads = xmalloc (sizeof (int) * last_basic_block);
871   memset (heads, -1, sizeof (int) * last_basic_block);
872   heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
873 
874   /* Process all prediction notes.  */
875 
876   FOR_EACH_BB (bb)
877     process_note_predictions (bb, heads, dominators, post_dominators);
878 
879   free_dominance_info (post_dominators);
880   free_dominance_info (dominators);
881   free (heads);
882 
883   remove_fake_edges ();
884 }
885 
886 /* This is used to carry information about basic blocks.  It is
887    attached to the AUX field of the standard CFG block.  */
888 
889 typedef struct block_info_def
890 {
891   /* Estimated frequency of execution of basic_block.  */
892   REAL_VALUE_TYPE frequency;
893 
894   /* To keep queue of basic blocks to process.  */
895   basic_block next;
896 
897   /* True if block needs to be visited in prop_freqency.  */
898   int tovisit:1;
899 
900   /* Number of predecessors we need to visit first.  */
901   int npredecessors;
902 } *block_info;
903 
904 /* Similar information for edges.  */
905 typedef struct edge_info_def
906 {
907   /* In case edge is an loopback edge, the probability edge will be reached
908      in case header is.  Estimated number of iterations of the loop can be
909      then computed as 1 / (1 - back_edge_prob).  */
910   REAL_VALUE_TYPE back_edge_prob;
911   /* True if the edge is an loopback edge in the natural loop.  */
912   int back_edge:1;
913 } *edge_info;
914 
915 #define BLOCK_INFO(B)	((block_info) (B)->aux)
916 #define EDGE_INFO(E)	((edge_info) (E)->aux)
917 
918 /* Helper function for estimate_bb_frequencies.
919    Propagate the frequencies for LOOP.  */
920 
921 static void
propagate_freq(loop)922 propagate_freq (loop)
923      struct loop *loop;
924 {
925   basic_block head = loop->header;
926   basic_block bb;
927   basic_block last;
928   edge e;
929   basic_block nextbb;
930 
931   /* For each basic block we need to visit count number of his predecessors
932      we need to visit first.  */
933   FOR_EACH_BB (bb)
934     {
935       if (BLOCK_INFO (bb)->tovisit)
936 	{
937 	  int count = 0;
938 
939 	  for (e = bb->pred; e; e = e->pred_next)
940 	    if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
941 	      count++;
942 	    else if (BLOCK_INFO (e->src)->tovisit
943 		     && rtl_dump_file && !EDGE_INFO (e)->back_edge)
944 	      fprintf (rtl_dump_file,
945 		       "Irreducible region hit, ignoring edge to %i->%i\n",
946 		       e->src->index, bb->index);
947 	  BLOCK_INFO (bb)->npredecessors = count;
948 	}
949     }
950 
951   memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
952   last = head;
953   for (bb = head; bb; bb = nextbb)
954     {
955       REAL_VALUE_TYPE cyclic_probability, frequency;
956 
957       memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
958       memcpy (&frequency, &real_zero, sizeof (real_zero));
959 
960       nextbb = BLOCK_INFO (bb)->next;
961       BLOCK_INFO (bb)->next = NULL;
962 
963       /* Compute frequency of basic block.  */
964       if (bb != head)
965 	{
966 #ifdef ENABLE_CHECKING
967 	  for (e = bb->pred; e; e = e->pred_next)
968 	    if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
969 	      abort ();
970 #endif
971 
972 	  for (e = bb->pred; e; e = e->pred_next)
973 	    if (EDGE_INFO (e)->back_edge)
974 	      {
975 		REAL_ARITHMETIC (cyclic_probability, PLUS_EXPR,
976 				 cyclic_probability,
977 				 EDGE_INFO (e)->back_edge_prob);
978 	      }
979 	    else if (!(e->flags & EDGE_DFS_BACK))
980 	      {
981 		REAL_VALUE_TYPE tmp;
982 
983 		/*  frequency += (e->probability
984 				  * BLOCK_INFO (e->src)->frequency /
985 				  REG_BR_PROB_BASE);  */
986 
987 		REAL_VALUE_FROM_INT (tmp, e->probability, 0,
988 				     TYPE_MODE (double_type_node));
989 		REAL_ARITHMETIC (tmp, MULT_EXPR, tmp,
990 				 BLOCK_INFO (e->src)->frequency);
991 		REAL_ARITHMETIC (tmp, MULT_EXPR, tmp, real_inv_br_prob_base);
992 		REAL_ARITHMETIC (frequency, PLUS_EXPR, frequency, tmp);
993 	      }
994 
995 	  if (REAL_VALUES_IDENTICAL (cyclic_probability, real_zero))
996 	    memcpy (&BLOCK_INFO (bb)->frequency, &frequency, sizeof (frequency));
997 	  else
998 	    {
999 	      if (REAL_VALUES_LESS (real_almost_one, cyclic_probability))
1000 		memcpy (&cyclic_probability, &real_almost_one, sizeof (real_zero));
1001 
1002 	      /* BLOCK_INFO (bb)->frequency = frequency / (1 - cyclic_probability)
1003 	       */
1004 
1005 	      REAL_ARITHMETIC (cyclic_probability, MINUS_EXPR, real_one,
1006 			   cyclic_probability);
1007 	      REAL_ARITHMETIC (BLOCK_INFO (bb)->frequency,
1008 			       RDIV_EXPR, frequency, cyclic_probability);
1009 	    }
1010 	}
1011 
1012       BLOCK_INFO (bb)->tovisit = 0;
1013 
1014       /* Compute back edge frequencies.  */
1015       for (e = bb->succ; e; e = e->succ_next)
1016 	if (e->dest == head)
1017 	  {
1018 	    REAL_VALUE_TYPE tmp;
1019 
1020 	    /* EDGE_INFO (e)->back_edge_prob
1021 		  = ((e->probability * BLOCK_INFO (bb)->frequency)
1022 		     / REG_BR_PROB_BASE); */
1023 	    REAL_VALUE_FROM_INT (tmp, e->probability, 0,
1024 				 TYPE_MODE (double_type_node));
1025 	    REAL_ARITHMETIC (tmp, MULT_EXPR, tmp,
1026 			     BLOCK_INFO (bb)->frequency);
1027 	    REAL_ARITHMETIC (EDGE_INFO (e)->back_edge_prob,
1028 			     MULT_EXPR, tmp, real_inv_br_prob_base);
1029 
1030 	  }
1031 
1032       /* Propagate to successor blocks.  */
1033       for (e = bb->succ; e; e = e->succ_next)
1034 	if (!(e->flags & EDGE_DFS_BACK)
1035 	    && BLOCK_INFO (e->dest)->npredecessors)
1036 	  {
1037 	    BLOCK_INFO (e->dest)->npredecessors--;
1038 	    if (!BLOCK_INFO (e->dest)->npredecessors)
1039 	      {
1040 		if (!nextbb)
1041 		  nextbb = e->dest;
1042 		else
1043 		  BLOCK_INFO (last)->next = e->dest;
1044 
1045 		last = e->dest;
1046 	      }
1047 	   }
1048     }
1049 }
1050 
1051 /* Estimate probabilities of loopback edges in loops at same nest level.  */
1052 
1053 static void
estimate_loops_at_level(first_loop)1054 estimate_loops_at_level (first_loop)
1055      struct loop *first_loop;
1056 {
1057   struct loop *loop;
1058 
1059   for (loop = first_loop; loop; loop = loop->next)
1060     {
1061       edge e;
1062       basic_block *bbs;
1063       int i;
1064 
1065       estimate_loops_at_level (loop->inner);
1066 
1067       if (loop->latch->succ)  /* Do not do this for dummy function loop.  */
1068 	{
1069 	  /* Find current loop back edge and mark it.  */
1070 	  e = loop_latch_edge (loop);
1071 	  EDGE_INFO (e)->back_edge = 1;
1072        }
1073 
1074       bbs = get_loop_body (loop);
1075       for (i = 0; i < loop->num_nodes; i++)
1076 	BLOCK_INFO (bbs[i])->tovisit = 1;
1077       free (bbs);
1078       propagate_freq (loop);
1079     }
1080 }
1081 
1082 /* Convert counts measured by profile driven feedback to frequencies.  */
1083 
1084 static void
counts_to_freqs()1085 counts_to_freqs ()
1086 {
1087   HOST_WIDEST_INT count_max = 1;
1088   basic_block bb;
1089 
1090   FOR_EACH_BB (bb)
1091     count_max = MAX (bb->count, count_max);
1092 
1093   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1094     bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1095 }
1096 
1097 /* Return true if function is likely to be expensive, so there is no point to
1098    optimize performance of prologue, epilogue or do inlining at the expense
1099    of code size growth.  THRESHOLD is the limit of number of isntructions
1100    function can execute at average to be still considered not expensive.  */
1101 
1102 bool
expensive_function_p(threshold)1103 expensive_function_p (threshold)
1104 	int threshold;
1105 {
1106   unsigned int sum = 0;
1107   basic_block bb;
1108   unsigned int limit;
1109 
1110   /* We can not compute accurately for large thresholds due to scaled
1111      frequencies.  */
1112   if (threshold > BB_FREQ_MAX)
1113     abort ();
1114 
1115   /* Frequencies are out of range.  This either means that function contains
1116      internal loop executing more than BB_FREQ_MAX times or profile feedback
1117      is available and function has not been executed at all.  */
1118   if (ENTRY_BLOCK_PTR->frequency == 0)
1119     return true;
1120 
1121   /* Maximally BB_FREQ_MAX^2 so overflow won't happen.  */
1122   limit = ENTRY_BLOCK_PTR->frequency * threshold;
1123   FOR_EACH_BB (bb)
1124     {
1125       rtx insn;
1126 
1127       for (insn = bb->head; insn != NEXT_INSN (bb->end);
1128 	   insn = NEXT_INSN (insn))
1129 	if (active_insn_p (insn))
1130 	  {
1131 	    sum += bb->frequency;
1132 	    if (sum > limit)
1133 	      return true;
1134 	}
1135     }
1136 
1137   return false;
1138 }
1139 
1140 /* Estimate basic blocks frequency by given branch probabilities.  */
1141 
1142 static void
estimate_bb_frequencies(loops)1143 estimate_bb_frequencies (loops)
1144      struct loops *loops;
1145 {
1146   basic_block bb;
1147   REAL_VALUE_TYPE freq_max;
1148   enum machine_mode double_mode = TYPE_MODE (double_type_node);
1149 
1150   if (flag_branch_probabilities)
1151     counts_to_freqs ();
1152   else
1153     {
1154       REAL_VALUE_FROM_INT (real_zero, 0, 0, double_mode);
1155       REAL_VALUE_FROM_INT (real_one, 1, 0, double_mode);
1156       REAL_VALUE_FROM_INT (real_br_prob_base, REG_BR_PROB_BASE, 0, double_mode);
1157       REAL_VALUE_FROM_INT (real_bb_freq_max, BB_FREQ_MAX, 0, double_mode);
1158       REAL_VALUE_FROM_INT (real_one_half, 2, 0, double_mode);
1159       REAL_ARITHMETIC (real_one_half, RDIV_EXPR, real_one, real_one_half);
1160       REAL_ARITHMETIC (real_inv_br_prob_base, RDIV_EXPR, real_one, real_br_prob_base);
1161       REAL_ARITHMETIC (real_almost_one, MINUS_EXPR, real_one, real_inv_br_prob_base);
1162 
1163       mark_dfs_back_edges ();
1164       /* Fill in the probability values in flowgraph based on the REG_BR_PROB
1165          notes.  */
1166       FOR_EACH_BB (bb)
1167 	{
1168 	  rtx last_insn = bb->end;
1169 
1170 	  if (!can_predict_insn_p (last_insn))
1171 	    {
1172 	      /* We can predict only conditional jumps at the moment.
1173 	         Expect each edge to be equally probable.
1174 	         ?? In the future we want to make abnormal edges improbable.  */
1175 	      int nedges = 0;
1176 	      edge e;
1177 
1178 	      for (e = bb->succ; e; e = e->succ_next)
1179 		{
1180 		  nedges++;
1181 		  if (e->probability != 0)
1182 		    break;
1183 		}
1184 	      if (!e)
1185 		for (e = bb->succ; e; e = e->succ_next)
1186 		  e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
1187 	    }
1188 	}
1189 
1190       ENTRY_BLOCK_PTR->succ->probability = REG_BR_PROB_BASE;
1191 
1192       /* Set up block info for each basic block.  */
1193       alloc_aux_for_blocks (sizeof (struct block_info_def));
1194       alloc_aux_for_edges (sizeof (struct edge_info_def));
1195       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1196 	{
1197 	  edge e;
1198 
1199 	  BLOCK_INFO (bb)->tovisit = 0;
1200 	  for (e = bb->succ; e; e = e->succ_next)
1201 	    {
1202 	      REAL_VALUE_FROM_INT (EDGE_INFO (e)->back_edge_prob,
1203 				   e->probability, 0, double_mode);
1204 	      REAL_ARITHMETIC (EDGE_INFO (e)->back_edge_prob,
1205 			       MULT_EXPR, EDGE_INFO (e)->back_edge_prob,
1206 			       real_inv_br_prob_base);
1207 	    }
1208 	}
1209 
1210       /* First compute probabilities locally for each loop from innermost
1211          to outermost to examine probabilities for back edges.  */
1212       estimate_loops_at_level (loops->tree_root);
1213 
1214       memcpy (&freq_max, &real_zero, sizeof (real_zero));
1215       FOR_EACH_BB (bb)
1216 	if (REAL_VALUES_LESS
1217 	    (freq_max, BLOCK_INFO (bb)->frequency))
1218 	  memcpy (&freq_max, &BLOCK_INFO (bb)->frequency,
1219 		  sizeof (freq_max));
1220 
1221       REAL_ARITHMETIC (freq_max, RDIV_EXPR, real_bb_freq_max, freq_max);
1222 
1223       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1224 	{
1225 	  REAL_VALUE_TYPE tmp;
1226 
1227 	  REAL_ARITHMETIC (tmp, MULT_EXPR, BLOCK_INFO (bb)->frequency,
1228 			   freq_max);
1229 	  REAL_ARITHMETIC (tmp, PLUS_EXPR, tmp, real_one_half);
1230 	  bb->frequency = REAL_VALUE_UNSIGNED_FIX (tmp);
1231 	}
1232 
1233       free_aux_for_blocks ();
1234       free_aux_for_edges ();
1235     }
1236   compute_function_frequency ();
1237   if (flag_reorder_functions)
1238     choose_function_section ();
1239 }
1240 
1241 /* Decide whether function is hot, cold or unlikely executed.  */
1242 static void
compute_function_frequency()1243 compute_function_frequency ()
1244 {
1245   basic_block bb;
1246 
1247   if (!profile_info.count_profiles_merged
1248       || !flag_branch_probabilities)
1249     return;
1250   cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1251   FOR_EACH_BB (bb)
1252     {
1253       if (maybe_hot_bb_p (bb))
1254 	{
1255 	  cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1256 	  return;
1257 	}
1258       if (!probably_never_executed_bb_p (bb))
1259 	cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1260     }
1261 }
1262 
1263 /* Choose appropriate section for the function.  */
1264 static void
choose_function_section()1265 choose_function_section ()
1266 {
1267   if (DECL_SECTION_NAME (current_function_decl)
1268       || !targetm.have_named_sections
1269       /* Theoretically we can split the gnu.linkonce text section too,
1270  	 but this requires more work as the frequency needs to match
1271 	 for all generated objects so we need to merge the frequency
1272 	 of all instances.  For now just never set frequency for these.  */
1273       || DECL_ONE_ONLY (current_function_decl))
1274     return;
1275   if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1276     DECL_SECTION_NAME (current_function_decl) =
1277       build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1278   if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1279     DECL_SECTION_NAME (current_function_decl) =
1280       build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1281 		    UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1282 }
1283