xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/final.c (revision 7330f729ccf0bd976a06f95fad452fe774fc7fd1)
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2    Copyright (C) 1987-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This is the final pass of the compiler.
21    It looks at the rtl code for a function and outputs assembler code.
22 
23    Call `final_start_function' to output the assembler code for function entry,
24    `final' to output assembler code for some RTL code,
25    `final_end_function' to output assembler code for function exit.
26    If a function is compiled in several pieces, each piece is
27    output separately with `final'.
28 
29    Some optimizations are also done at this level.
30    Move instructions that were made unnecessary by good register allocation
31    are detected and omitted from the output.  (Though most of these
32    are removed by the last jump pass.)
33 
34    Instructions to set the condition codes are omitted when it can be
35    seen that the condition codes already had the desired values.
36 
37    In some cases it is sufficient if the inherited condition codes
38    have related values, but this may require the following insn
39    (the one that tests the condition codes) to be modified.
40 
41    The code for the function prologue and epilogue are generated
42    directly in assembler by the target functions function_prologue and
43    function_epilogue.  Those instructions never exist as rtl.  */
44 
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "params.h"
79 #include "asan.h"
80 #include "rtl-iter.h"
81 #include "print-rtl.h"
82 
83 #ifdef XCOFF_DEBUGGING_INFO
84 #include "xcoffout.h"		/* Needed for external data declarations.  */
85 #endif
86 
87 #include "dwarf2out.h"
88 
89 #ifdef DBX_DEBUGGING_INFO
90 #include "dbxout.h"
91 #endif
92 
93 #include "sdbout.h"
94 
95 /* Most ports that aren't using cc0 don't need to define CC_STATUS_INIT.
96    So define a null default for it to save conditionalization later.  */
97 #ifndef CC_STATUS_INIT
98 #define CC_STATUS_INIT
99 #endif
100 
101 /* Is the given character a logical line separator for the assembler?  */
102 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
103 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
104 #endif
105 
106 #ifndef JUMP_TABLES_IN_TEXT_SECTION
107 #define JUMP_TABLES_IN_TEXT_SECTION 0
108 #endif
109 
110 /* Bitflags used by final_scan_insn.  */
111 #define SEEN_NOTE	1
112 #define SEEN_EMITTED	2
113 
114 /* Last insn processed by final_scan_insn.  */
115 static rtx_insn *debug_insn;
116 rtx_insn *current_output_insn;
117 
118 /* Line number of last NOTE.  */
119 static int last_linenum;
120 
121 /* Column number of last NOTE.  */
122 static int last_columnnum;
123 
124 /* Last discriminator written to assembly.  */
125 static int last_discriminator;
126 
127 /* Discriminator of current block.  */
128 static int discriminator;
129 
130 /* Highest line number in current block.  */
131 static int high_block_linenum;
132 
133 /* Likewise for function.  */
134 static int high_function_linenum;
135 
136 /* Filename of last NOTE.  */
137 static const char *last_filename;
138 
139 /* Override filename, line and column number.  */
140 static const char *override_filename;
141 static int override_linenum;
142 static int override_columnnum;
143 
144 /* Whether to force emission of a line note before the next insn.  */
145 static bool force_source_line = false;
146 
147 extern const int length_unit_log; /* This is defined in insn-attrtab.c.  */
148 
149 /* Nonzero while outputting an `asm' with operands.
150    This means that inconsistencies are the user's fault, so don't die.
151    The precise value is the insn being output, to pass to error_for_asm.  */
152 const rtx_insn *this_is_asm_operands;
153 
154 /* Number of operands of this insn, for an `asm' with operands.  */
155 static unsigned int insn_noperands;
156 
157 /* Compare optimization flag.  */
158 
159 static rtx last_ignored_compare = 0;
160 
161 /* Assign a unique number to each insn that is output.
162    This can be used to generate unique local labels.  */
163 
164 static int insn_counter = 0;
165 
166 /* This variable contains machine-dependent flags (defined in tm.h)
167    set and examined by output routines
168    that describe how to interpret the condition codes properly.  */
169 
170 CC_STATUS cc_status;
171 
172 /* During output of an insn, this contains a copy of cc_status
173    from before the insn.  */
174 
175 CC_STATUS cc_prev_status;
176 
177 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
178 
179 static int block_depth;
180 
181 /* Nonzero if have enabled APP processing of our assembler output.  */
182 
183 static int app_on;
184 
185 /* If we are outputting an insn sequence, this contains the sequence rtx.
186    Zero otherwise.  */
187 
188 rtx_sequence *final_sequence;
189 
190 #ifdef ASSEMBLER_DIALECT
191 
192 /* Number of the assembler dialect to use, starting at 0.  */
193 static int dialect_number;
194 #endif
195 
196 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
197 rtx current_insn_predicate;
198 
199 /* True if printing into -fdump-final-insns= dump.  */
200 bool final_insns_dump_p;
201 
202 /* True if profile_function should be called, but hasn't been called yet.  */
203 static bool need_profile_function;
204 
205 static int asm_insn_count (rtx);
206 static void profile_function (FILE *);
207 static void profile_after_prologue (FILE *);
208 static bool notice_source_line (rtx_insn *, bool *);
209 static rtx walk_alter_subreg (rtx *, bool *);
210 static void output_asm_name (void);
211 static void output_alternate_entry_point (FILE *, rtx_insn *);
212 static tree get_mem_expr_from_op (rtx, int *);
213 static void output_asm_operand_names (rtx *, int *, int);
214 #ifdef LEAF_REGISTERS
215 static void leaf_renumber_regs (rtx_insn *);
216 #endif
217 #if HAVE_cc0
218 static int alter_cond (rtx);
219 #endif
220 #ifndef ADDR_VEC_ALIGN
221 static int final_addr_vec_align (rtx_insn *);
222 #endif
223 static int align_fuzz (rtx, rtx, int, unsigned);
224 static void collect_fn_hard_reg_usage (void);
225 static tree get_call_fndecl (rtx_insn *);
226 
227 /* Initialize data in final at the beginning of a compilation.  */
228 
229 void
230 init_final (const char *filename ATTRIBUTE_UNUSED)
231 {
232   app_on = 0;
233   final_sequence = 0;
234 
235 #ifdef ASSEMBLER_DIALECT
236   dialect_number = ASSEMBLER_DIALECT;
237 #endif
238 }
239 
240 /* Default target function prologue and epilogue assembler output.
241 
242    If not overridden for epilogue code, then the function body itself
243    contains return instructions wherever needed.  */
244 void
245 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
246 			       HOST_WIDE_INT size ATTRIBUTE_UNUSED)
247 {
248 }
249 
250 void
251 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
252 					 tree decl ATTRIBUTE_UNUSED,
253 					 bool new_is_cold ATTRIBUTE_UNUSED)
254 {
255 }
256 
257 /* Default target hook that outputs nothing to a stream.  */
258 void
259 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
260 {
261 }
262 
263 /* Enable APP processing of subsequent output.
264    Used before the output from an `asm' statement.  */
265 
266 void
267 app_enable (void)
268 {
269   if (! app_on)
270     {
271       fputs (ASM_APP_ON, asm_out_file);
272       app_on = 1;
273     }
274 }
275 
276 /* Disable APP processing of subsequent output.
277    Called from varasm.c before most kinds of output.  */
278 
279 void
280 app_disable (void)
281 {
282   if (app_on)
283     {
284       fputs (ASM_APP_OFF, asm_out_file);
285       app_on = 0;
286     }
287 }
288 
289 /* Return the number of slots filled in the current
290    delayed branch sequence (we don't count the insn needing the
291    delay slot).   Zero if not in a delayed branch sequence.  */
292 
293 int
294 dbr_sequence_length (void)
295 {
296   if (final_sequence != 0)
297     return XVECLEN (final_sequence, 0) - 1;
298   else
299     return 0;
300 }
301 
302 /* The next two pages contain routines used to compute the length of an insn
303    and to shorten branches.  */
304 
305 /* Arrays for insn lengths, and addresses.  The latter is referenced by
306    `insn_current_length'.  */
307 
308 static int *insn_lengths;
309 
310 vec<int> insn_addresses_;
311 
312 /* Max uid for which the above arrays are valid.  */
313 static int insn_lengths_max_uid;
314 
315 /* Address of insn being processed.  Used by `insn_current_length'.  */
316 int insn_current_address;
317 
318 /* Address of insn being processed in previous iteration.  */
319 int insn_last_address;
320 
321 /* known invariant alignment of insn being processed.  */
322 int insn_current_align;
323 
324 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
325    gives the next following alignment insn that increases the known
326    alignment, or NULL_RTX if there is no such insn.
327    For any alignment obtained this way, we can again index uid_align with
328    its uid to obtain the next following align that in turn increases the
329    alignment, till we reach NULL_RTX; the sequence obtained this way
330    for each insn we'll call the alignment chain of this insn in the following
331    comments.  */
332 
333 struct label_alignment
334 {
335   short alignment;
336   short max_skip;
337 };
338 
339 static rtx *uid_align;
340 static int *uid_shuid;
341 static struct label_alignment *label_align;
342 
343 /* Indicate that branch shortening hasn't yet been done.  */
344 
345 void
346 init_insn_lengths (void)
347 {
348   if (uid_shuid)
349     {
350       free (uid_shuid);
351       uid_shuid = 0;
352     }
353   if (insn_lengths)
354     {
355       free (insn_lengths);
356       insn_lengths = 0;
357       insn_lengths_max_uid = 0;
358     }
359   if (HAVE_ATTR_length)
360     INSN_ADDRESSES_FREE ();
361   if (uid_align)
362     {
363       free (uid_align);
364       uid_align = 0;
365     }
366 }
367 
368 /* Obtain the current length of an insn.  If branch shortening has been done,
369    get its actual length.  Otherwise, use FALLBACK_FN to calculate the
370    length.  */
371 static int
372 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
373 {
374   rtx body;
375   int i;
376   int length = 0;
377 
378   if (!HAVE_ATTR_length)
379     return 0;
380 
381   if (insn_lengths_max_uid > INSN_UID (insn))
382     return insn_lengths[INSN_UID (insn)];
383   else
384     switch (GET_CODE (insn))
385       {
386       case NOTE:
387       case BARRIER:
388       case CODE_LABEL:
389       case DEBUG_INSN:
390 	return 0;
391 
392       case CALL_INSN:
393       case JUMP_INSN:
394 	length = fallback_fn (insn);
395 	break;
396 
397       case INSN:
398 	body = PATTERN (insn);
399 	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
400 	  return 0;
401 
402 	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
403 	  length = asm_insn_count (body) * fallback_fn (insn);
404 	else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
405 	  for (i = 0; i < seq->len (); i++)
406 	    length += get_attr_length_1 (seq->insn (i), fallback_fn);
407 	else
408 	  length = fallback_fn (insn);
409 	break;
410 
411       default:
412 	break;
413       }
414 
415 #ifdef ADJUST_INSN_LENGTH
416   ADJUST_INSN_LENGTH (insn, length);
417 #endif
418   return length;
419 }
420 
421 /* Obtain the current length of an insn.  If branch shortening has been done,
422    get its actual length.  Otherwise, get its maximum length.  */
423 int
424 get_attr_length (rtx_insn *insn)
425 {
426   return get_attr_length_1 (insn, insn_default_length);
427 }
428 
429 /* Obtain the current length of an insn.  If branch shortening has been done,
430    get its actual length.  Otherwise, get its minimum length.  */
431 int
432 get_attr_min_length (rtx_insn *insn)
433 {
434   return get_attr_length_1 (insn, insn_min_length);
435 }
436 
437 /* Code to handle alignment inside shorten_branches.  */
438 
439 /* Here is an explanation how the algorithm in align_fuzz can give
440    proper results:
441 
442    Call a sequence of instructions beginning with alignment point X
443    and continuing until the next alignment point `block X'.  When `X'
444    is used in an expression, it means the alignment value of the
445    alignment point.
446 
447    Call the distance between the start of the first insn of block X, and
448    the end of the last insn of block X `IX', for the `inner size of X'.
449    This is clearly the sum of the instruction lengths.
450 
451    Likewise with the next alignment-delimited block following X, which we
452    shall call block Y.
453 
454    Call the distance between the start of the first insn of block X, and
455    the start of the first insn of block Y `OX', for the `outer size of X'.
456 
457    The estimated padding is then OX - IX.
458 
459    OX can be safely estimated as
460 
461            if (X >= Y)
462                    OX = round_up(IX, Y)
463            else
464                    OX = round_up(IX, X) + Y - X
465 
466    Clearly est(IX) >= real(IX), because that only depends on the
467    instruction lengths, and those being overestimated is a given.
468 
469    Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
470    we needn't worry about that when thinking about OX.
471 
472    When X >= Y, the alignment provided by Y adds no uncertainty factor
473    for branch ranges starting before X, so we can just round what we have.
474    But when X < Y, we don't know anything about the, so to speak,
475    `middle bits', so we have to assume the worst when aligning up from an
476    address mod X to one mod Y, which is Y - X.  */
477 
478 #ifndef LABEL_ALIGN
479 #define LABEL_ALIGN(LABEL) align_labels_log
480 #endif
481 
482 #ifndef LOOP_ALIGN
483 #define LOOP_ALIGN(LABEL) align_loops_log
484 #endif
485 
486 #ifndef LABEL_ALIGN_AFTER_BARRIER
487 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
488 #endif
489 
490 #ifndef JUMP_ALIGN
491 #define JUMP_ALIGN(LABEL) align_jumps_log
492 #endif
493 
494 int
495 default_label_align_after_barrier_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
496 {
497   return 0;
498 }
499 
500 int
501 default_loop_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
502 {
503   return align_loops_max_skip;
504 }
505 
506 int
507 default_label_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
508 {
509   return align_labels_max_skip;
510 }
511 
512 int
513 default_jump_align_max_skip (rtx_insn *insn ATTRIBUTE_UNUSED)
514 {
515   return align_jumps_max_skip;
516 }
517 
518 #ifndef ADDR_VEC_ALIGN
519 static int
520 final_addr_vec_align (rtx_insn *addr_vec)
521 {
522   int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
523 
524   if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
525     align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
526   return exact_log2 (align);
527 
528 }
529 
530 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
531 #endif
532 
533 #ifndef INSN_LENGTH_ALIGNMENT
534 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
535 #endif
536 
537 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
538 
539 static int min_labelno, max_labelno;
540 
541 #define LABEL_TO_ALIGNMENT(LABEL) \
542   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
543 
544 #define LABEL_TO_MAX_SKIP(LABEL) \
545   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
546 
547 /* For the benefit of port specific code do this also as a function.  */
548 
549 int
550 label_to_alignment (rtx label)
551 {
552   if (CODE_LABEL_NUMBER (label) <= max_labelno)
553     return LABEL_TO_ALIGNMENT (label);
554   return 0;
555 }
556 
557 int
558 label_to_max_skip (rtx label)
559 {
560   if (CODE_LABEL_NUMBER (label) <= max_labelno)
561     return LABEL_TO_MAX_SKIP (label);
562   return 0;
563 }
564 
565 /* The differences in addresses
566    between a branch and its target might grow or shrink depending on
567    the alignment the start insn of the range (the branch for a forward
568    branch or the label for a backward branch) starts out on; if these
569    differences are used naively, they can even oscillate infinitely.
570    We therefore want to compute a 'worst case' address difference that
571    is independent of the alignment the start insn of the range end
572    up on, and that is at least as large as the actual difference.
573    The function align_fuzz calculates the amount we have to add to the
574    naively computed difference, by traversing the part of the alignment
575    chain of the start insn of the range that is in front of the end insn
576    of the range, and considering for each alignment the maximum amount
577    that it might contribute to a size increase.
578 
579    For casesi tables, we also want to know worst case minimum amounts of
580    address difference, in case a machine description wants to introduce
581    some common offset that is added to all offsets in a table.
582    For this purpose, align_fuzz with a growth argument of 0 computes the
583    appropriate adjustment.  */
584 
585 /* Compute the maximum delta by which the difference of the addresses of
586    START and END might grow / shrink due to a different address for start
587    which changes the size of alignment insns between START and END.
588    KNOWN_ALIGN_LOG is the alignment known for START.
589    GROWTH should be ~0 if the objective is to compute potential code size
590    increase, and 0 if the objective is to compute potential shrink.
591    The return value is undefined for any other value of GROWTH.  */
592 
593 static int
594 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
595 {
596   int uid = INSN_UID (start);
597   rtx align_label;
598   int known_align = 1 << known_align_log;
599   int end_shuid = INSN_SHUID (end);
600   int fuzz = 0;
601 
602   for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
603     {
604       int align_addr, new_align;
605 
606       uid = INSN_UID (align_label);
607       align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
608       if (uid_shuid[uid] > end_shuid)
609 	break;
610       known_align_log = LABEL_TO_ALIGNMENT (align_label);
611       new_align = 1 << known_align_log;
612       if (new_align < known_align)
613 	continue;
614       fuzz += (-align_addr ^ growth) & (new_align - known_align);
615       known_align = new_align;
616     }
617   return fuzz;
618 }
619 
620 /* Compute a worst-case reference address of a branch so that it
621    can be safely used in the presence of aligned labels.  Since the
622    size of the branch itself is unknown, the size of the branch is
623    not included in the range.  I.e. for a forward branch, the reference
624    address is the end address of the branch as known from the previous
625    branch shortening pass, minus a value to account for possible size
626    increase due to alignment.  For a backward branch, it is the start
627    address of the branch as known from the current pass, plus a value
628    to account for possible size increase due to alignment.
629    NB.: Therefore, the maximum offset allowed for backward branches needs
630    to exclude the branch size.  */
631 
632 int
633 insn_current_reference_address (rtx_insn *branch)
634 {
635   rtx dest;
636   int seq_uid;
637 
638   if (! INSN_ADDRESSES_SET_P ())
639     return 0;
640 
641   rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
642   seq_uid = INSN_UID (seq);
643   if (!JUMP_P (branch))
644     /* This can happen for example on the PA; the objective is to know the
645        offset to address something in front of the start of the function.
646        Thus, we can treat it like a backward branch.
647        We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
648        any alignment we'd encounter, so we skip the call to align_fuzz.  */
649     return insn_current_address;
650   dest = JUMP_LABEL (branch);
651 
652   /* BRANCH has no proper alignment chain set, so use SEQ.
653      BRANCH also has no INSN_SHUID.  */
654   if (INSN_SHUID (seq) < INSN_SHUID (dest))
655     {
656       /* Forward branch.  */
657       return (insn_last_address + insn_lengths[seq_uid]
658 	      - align_fuzz (seq, dest, length_unit_log, ~0));
659     }
660   else
661     {
662       /* Backward branch.  */
663       return (insn_current_address
664 	      + align_fuzz (dest, seq, length_unit_log, ~0));
665     }
666 }
667 
668 /* Compute branch alignments based on frequency information in the
669    CFG.  */
670 
671 unsigned int
672 compute_alignments (void)
673 {
674   int log, max_skip, max_log;
675   basic_block bb;
676   int freq_max = 0;
677   int freq_threshold = 0;
678 
679   if (label_align)
680     {
681       free (label_align);
682       label_align = 0;
683     }
684 
685   max_labelno = max_label_num ();
686   min_labelno = get_first_label_num ();
687   label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
688 
689   /* If not optimizing or optimizing for size, don't assign any alignments.  */
690   if (! optimize || optimize_function_for_size_p (cfun))
691     return 0;
692 
693   if (dump_file)
694     {
695       dump_reg_info (dump_file);
696       dump_flow_info (dump_file, TDF_DETAILS);
697       flow_loops_dump (dump_file, NULL, 1);
698     }
699   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
700   FOR_EACH_BB_FN (bb, cfun)
701     if (bb->frequency > freq_max)
702       freq_max = bb->frequency;
703   freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
704 
705   if (dump_file)
706     fprintf (dump_file, "freq_max: %i\n",freq_max);
707   FOR_EACH_BB_FN (bb, cfun)
708     {
709       rtx_insn *label = BB_HEAD (bb);
710       int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
711       edge e;
712       edge_iterator ei;
713 
714       if (!LABEL_P (label)
715 	  || optimize_bb_for_size_p (bb))
716 	{
717 	  if (dump_file)
718 	    fprintf (dump_file,
719 		     "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
720 		     bb->index, bb->frequency, bb->loop_father->num,
721 		     bb_loop_depth (bb));
722 	  continue;
723 	}
724       max_log = LABEL_ALIGN (label);
725       max_skip = targetm.asm_out.label_align_max_skip (label);
726 
727       FOR_EACH_EDGE (e, ei, bb->preds)
728 	{
729 	  if (e->flags & EDGE_FALLTHRU)
730 	    has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
731 	  else
732 	    branch_frequency += EDGE_FREQUENCY (e);
733 	}
734       if (dump_file)
735 	{
736 	  fprintf (dump_file, "BB %4i freq %4i loop %2i loop_depth"
737 		   " %2i fall %4i branch %4i",
738 		   bb->index, bb->frequency, bb->loop_father->num,
739 		   bb_loop_depth (bb),
740 		   fallthru_frequency, branch_frequency);
741 	  if (!bb->loop_father->inner && bb->loop_father->num)
742 	    fprintf (dump_file, " inner_loop");
743 	  if (bb->loop_father->header == bb)
744 	    fprintf (dump_file, " loop_header");
745 	  fprintf (dump_file, "\n");
746 	}
747 
748       /* There are two purposes to align block with no fallthru incoming edge:
749 	 1) to avoid fetch stalls when branch destination is near cache boundary
750 	 2) to improve cache efficiency in case the previous block is not executed
751 	    (so it does not need to be in the cache).
752 
753 	 We to catch first case, we align frequently executed blocks.
754 	 To catch the second, we align blocks that are executed more frequently
755 	 than the predecessor and the predecessor is likely to not be executed
756 	 when function is called.  */
757 
758       if (!has_fallthru
759 	  && (branch_frequency > freq_threshold
760 	      || (bb->frequency > bb->prev_bb->frequency * 10
761 		  && (bb->prev_bb->frequency
762 		      <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
763 	{
764 	  log = JUMP_ALIGN (label);
765 	  if (dump_file)
766 	    fprintf (dump_file, "  jump alignment added.\n");
767 	  if (max_log < log)
768 	    {
769 	      max_log = log;
770 	      max_skip = targetm.asm_out.jump_align_max_skip (label);
771 	    }
772 	}
773       /* In case block is frequent and reached mostly by non-fallthru edge,
774 	 align it.  It is most likely a first block of loop.  */
775       if (has_fallthru
776 	  && !(single_succ_p (bb)
777 	       && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
778 	  && optimize_bb_for_speed_p (bb)
779 	  && branch_frequency + fallthru_frequency > freq_threshold
780 	  && (branch_frequency
781 	      > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
782 	{
783 	  log = LOOP_ALIGN (label);
784 	  if (dump_file)
785 	    fprintf (dump_file, "  internal loop alignment added.\n");
786 	  if (max_log < log)
787 	    {
788 	      max_log = log;
789 	      max_skip = targetm.asm_out.loop_align_max_skip (label);
790 	    }
791 	}
792       LABEL_TO_ALIGNMENT (label) = max_log;
793       LABEL_TO_MAX_SKIP (label) = max_skip;
794     }
795 
796   loop_optimizer_finalize ();
797   free_dominance_info (CDI_DOMINATORS);
798   return 0;
799 }
800 
801 /* Grow the LABEL_ALIGN array after new labels are created.  */
802 
803 static void
804 grow_label_align (void)
805 {
806   int old = max_labelno;
807   int n_labels;
808   int n_old_labels;
809 
810   max_labelno = max_label_num ();
811 
812   n_labels = max_labelno - min_labelno + 1;
813   n_old_labels = old - min_labelno + 1;
814 
815   label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
816 
817   /* Range of labels grows monotonically in the function.  Failing here
818      means that the initialization of array got lost.  */
819   gcc_assert (n_old_labels <= n_labels);
820 
821   memset (label_align + n_old_labels, 0,
822           (n_labels - n_old_labels) * sizeof (struct label_alignment));
823 }
824 
825 /* Update the already computed alignment information.  LABEL_PAIRS is a vector
826    made up of pairs of labels for which the alignment information of the first
827    element will be copied from that of the second element.  */
828 
829 void
830 update_alignments (vec<rtx> &label_pairs)
831 {
832   unsigned int i = 0;
833   rtx iter, label = NULL_RTX;
834 
835   if (max_labelno != max_label_num ())
836     grow_label_align ();
837 
838   FOR_EACH_VEC_ELT (label_pairs, i, iter)
839     if (i & 1)
840       {
841 	LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
842 	LABEL_TO_MAX_SKIP (label) = LABEL_TO_MAX_SKIP (iter);
843       }
844     else
845       label = iter;
846 }
847 
848 namespace {
849 
850 const pass_data pass_data_compute_alignments =
851 {
852   RTL_PASS, /* type */
853   "alignments", /* name */
854   OPTGROUP_NONE, /* optinfo_flags */
855   TV_NONE, /* tv_id */
856   0, /* properties_required */
857   0, /* properties_provided */
858   0, /* properties_destroyed */
859   0, /* todo_flags_start */
860   0, /* todo_flags_finish */
861 };
862 
863 class pass_compute_alignments : public rtl_opt_pass
864 {
865 public:
866   pass_compute_alignments (gcc::context *ctxt)
867     : rtl_opt_pass (pass_data_compute_alignments, ctxt)
868   {}
869 
870   /* opt_pass methods: */
871   virtual unsigned int execute (function *) { return compute_alignments (); }
872 
873 }; // class pass_compute_alignments
874 
875 } // anon namespace
876 
877 rtl_opt_pass *
878 make_pass_compute_alignments (gcc::context *ctxt)
879 {
880   return new pass_compute_alignments (ctxt);
881 }
882 
883 
884 /* Make a pass over all insns and compute their actual lengths by shortening
885    any branches of variable length if possible.  */
886 
887 /* shorten_branches might be called multiple times:  for example, the SH
888    port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
889    In order to do this, it needs proper length information, which it obtains
890    by calling shorten_branches.  This cannot be collapsed with
891    shorten_branches itself into a single pass unless we also want to integrate
892    reorg.c, since the branch splitting exposes new instructions with delay
893    slots.  */
894 
895 void
896 shorten_branches (rtx_insn *first)
897 {
898   rtx_insn *insn;
899   int max_uid;
900   int i;
901   int max_log;
902   int max_skip;
903 #define MAX_CODE_ALIGN 16
904   rtx_insn *seq;
905   int something_changed = 1;
906   char *varying_length;
907   rtx body;
908   int uid;
909   rtx align_tab[MAX_CODE_ALIGN + 1];
910 
911   /* Compute maximum UID and allocate label_align / uid_shuid.  */
912   max_uid = get_max_uid ();
913 
914   /* Free uid_shuid before reallocating it.  */
915   free (uid_shuid);
916 
917   uid_shuid = XNEWVEC (int, max_uid);
918 
919   if (max_labelno != max_label_num ())
920     grow_label_align ();
921 
922   /* Initialize label_align and set up uid_shuid to be strictly
923      monotonically rising with insn order.  */
924   /* We use max_log here to keep track of the maximum alignment we want to
925      impose on the next CODE_LABEL (or the current one if we are processing
926      the CODE_LABEL itself).  */
927 
928   max_log = 0;
929   max_skip = 0;
930 
931   for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
932     {
933       int log;
934 
935       INSN_SHUID (insn) = i++;
936       if (INSN_P (insn))
937 	continue;
938 
939       if (LABEL_P (insn))
940 	{
941 	  rtx_insn *next;
942 	  bool next_is_jumptable;
943 
944 	  /* Merge in alignments computed by compute_alignments.  */
945 	  log = LABEL_TO_ALIGNMENT (insn);
946 	  if (max_log < log)
947 	    {
948 	      max_log = log;
949 	      max_skip = LABEL_TO_MAX_SKIP (insn);
950 	    }
951 
952 	  next = next_nonnote_insn (insn);
953 	  next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
954 	  if (!next_is_jumptable)
955 	    {
956 	      log = LABEL_ALIGN (insn);
957 	      if (max_log < log)
958 		{
959 		  max_log = log;
960 		  max_skip = targetm.asm_out.label_align_max_skip (insn);
961 		}
962 	    }
963 	  /* ADDR_VECs only take room if read-only data goes into the text
964 	     section.  */
965 	  if ((JUMP_TABLES_IN_TEXT_SECTION
966 	       || readonly_data_section == text_section)
967 	      && next_is_jumptable)
968 	    {
969 	      log = ADDR_VEC_ALIGN (next);
970 	      if (max_log < log)
971 		{
972 		  max_log = log;
973 		  max_skip = targetm.asm_out.label_align_max_skip (insn);
974 		}
975 	    }
976 	  LABEL_TO_ALIGNMENT (insn) = max_log;
977 	  LABEL_TO_MAX_SKIP (insn) = max_skip;
978 	  max_log = 0;
979 	  max_skip = 0;
980 	}
981       else if (BARRIER_P (insn))
982 	{
983 	  rtx_insn *label;
984 
985 	  for (label = insn; label && ! INSN_P (label);
986 	       label = NEXT_INSN (label))
987 	    if (LABEL_P (label))
988 	      {
989 		log = LABEL_ALIGN_AFTER_BARRIER (insn);
990 		if (max_log < log)
991 		  {
992 		    max_log = log;
993 		    max_skip = targetm.asm_out.label_align_after_barrier_max_skip (label);
994 		  }
995 		break;
996 	      }
997 	}
998     }
999   if (!HAVE_ATTR_length)
1000     return;
1001 
1002   /* Allocate the rest of the arrays.  */
1003   insn_lengths = XNEWVEC (int, max_uid);
1004   insn_lengths_max_uid = max_uid;
1005   /* Syntax errors can lead to labels being outside of the main insn stream.
1006      Initialize insn_addresses, so that we get reproducible results.  */
1007   INSN_ADDRESSES_ALLOC (max_uid);
1008 
1009   varying_length = XCNEWVEC (char, max_uid);
1010 
1011   /* Initialize uid_align.  We scan instructions
1012      from end to start, and keep in align_tab[n] the last seen insn
1013      that does an alignment of at least n+1, i.e. the successor
1014      in the alignment chain for an insn that does / has a known
1015      alignment of n.  */
1016   uid_align = XCNEWVEC (rtx, max_uid);
1017 
1018   for (i = MAX_CODE_ALIGN + 1; --i >= 0;)
1019     align_tab[i] = NULL_RTX;
1020   seq = get_last_insn ();
1021   for (; seq; seq = PREV_INSN (seq))
1022     {
1023       int uid = INSN_UID (seq);
1024       int log;
1025       log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
1026       uid_align[uid] = align_tab[0];
1027       if (log)
1028 	{
1029 	  /* Found an alignment label.  */
1030 	  uid_align[uid] = align_tab[log];
1031 	  for (i = log - 1; i >= 0; i--)
1032 	    align_tab[i] = seq;
1033 	}
1034     }
1035 
1036   /* When optimizing, we start assuming minimum length, and keep increasing
1037      lengths as we find the need for this, till nothing changes.
1038      When not optimizing, we start assuming maximum lengths, and
1039      do a single pass to update the lengths.  */
1040   bool increasing = optimize != 0;
1041 
1042 #ifdef CASE_VECTOR_SHORTEN_MODE
1043   if (optimize)
1044     {
1045       /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1046          label fields.  */
1047 
1048       int min_shuid = INSN_SHUID (get_insns ()) - 1;
1049       int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1050       int rel;
1051 
1052       for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1053 	{
1054 	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1055 	  int len, i, min, max, insn_shuid;
1056 	  int min_align;
1057 	  addr_diff_vec_flags flags;
1058 
1059 	  if (! JUMP_TABLE_DATA_P (insn)
1060 	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1061 	    continue;
1062 	  pat = PATTERN (insn);
1063 	  len = XVECLEN (pat, 1);
1064 	  gcc_assert (len > 0);
1065 	  min_align = MAX_CODE_ALIGN;
1066 	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1067 	    {
1068 	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1069 	      int shuid = INSN_SHUID (lab);
1070 	      if (shuid < min)
1071 		{
1072 		  min = shuid;
1073 		  min_lab = lab;
1074 		}
1075 	      if (shuid > max)
1076 		{
1077 		  max = shuid;
1078 		  max_lab = lab;
1079 		}
1080 	      if (min_align > LABEL_TO_ALIGNMENT (lab))
1081 		min_align = LABEL_TO_ALIGNMENT (lab);
1082 	    }
1083 	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1084 	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1085 	  insn_shuid = INSN_SHUID (insn);
1086 	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1087 	  memset (&flags, 0, sizeof (flags));
1088 	  flags.min_align = min_align;
1089 	  flags.base_after_vec = rel > insn_shuid;
1090 	  flags.min_after_vec  = min > insn_shuid;
1091 	  flags.max_after_vec  = max > insn_shuid;
1092 	  flags.min_after_base = min > rel;
1093 	  flags.max_after_base = max > rel;
1094 	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
1095 
1096 	  if (increasing)
1097 	    PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1098 	}
1099     }
1100 #endif /* CASE_VECTOR_SHORTEN_MODE */
1101 
1102   /* Compute initial lengths, addresses, and varying flags for each insn.  */
1103   int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1104 
1105   for (insn_current_address = 0, insn = first;
1106        insn != 0;
1107        insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1108     {
1109       uid = INSN_UID (insn);
1110 
1111       insn_lengths[uid] = 0;
1112 
1113       if (LABEL_P (insn))
1114 	{
1115 	  int log = LABEL_TO_ALIGNMENT (insn);
1116 	  if (log)
1117 	    {
1118 	      int align = 1 << log;
1119 	      int new_address = (insn_current_address + align - 1) & -align;
1120 	      insn_lengths[uid] = new_address - insn_current_address;
1121 	    }
1122 	}
1123 
1124       INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1125 
1126       if (NOTE_P (insn) || BARRIER_P (insn)
1127 	  || LABEL_P (insn) || DEBUG_INSN_P (insn))
1128 	continue;
1129       if (insn->deleted ())
1130 	continue;
1131 
1132       body = PATTERN (insn);
1133       if (JUMP_TABLE_DATA_P (insn))
1134 	{
1135 	  /* This only takes room if read-only data goes into the text
1136 	     section.  */
1137 	  if (JUMP_TABLES_IN_TEXT_SECTION
1138 	      || readonly_data_section == text_section)
1139 	    insn_lengths[uid] = (XVECLEN (body,
1140 					  GET_CODE (body) == ADDR_DIFF_VEC)
1141 				 * GET_MODE_SIZE (GET_MODE (body)));
1142 	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
1143 	}
1144       else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1145 	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1146       else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1147 	{
1148 	  int i;
1149 	  int const_delay_slots;
1150 	  if (DELAY_SLOTS)
1151 	    const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1152 	  else
1153 	    const_delay_slots = 0;
1154 
1155 	  int (*inner_length_fun) (rtx_insn *)
1156 	    = const_delay_slots ? length_fun : insn_default_length;
1157 	  /* Inside a delay slot sequence, we do not do any branch shortening
1158 	     if the shortening could change the number of delay slots
1159 	     of the branch.  */
1160 	  for (i = 0; i < body_seq->len (); i++)
1161 	    {
1162 	      rtx_insn *inner_insn = body_seq->insn (i);
1163 	      int inner_uid = INSN_UID (inner_insn);
1164 	      int inner_length;
1165 
1166 	      if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1167 		  || asm_noperands (PATTERN (inner_insn)) >= 0)
1168 		inner_length = (asm_insn_count (PATTERN (inner_insn))
1169 				* insn_default_length (inner_insn));
1170 	      else
1171 		inner_length = inner_length_fun (inner_insn);
1172 
1173 	      insn_lengths[inner_uid] = inner_length;
1174 	      if (const_delay_slots)
1175 		{
1176 		  if ((varying_length[inner_uid]
1177 		       = insn_variable_length_p (inner_insn)) != 0)
1178 		    varying_length[uid] = 1;
1179 		  INSN_ADDRESSES (inner_uid) = (insn_current_address
1180 						+ insn_lengths[uid]);
1181 		}
1182 	      else
1183 		varying_length[inner_uid] = 0;
1184 	      insn_lengths[uid] += inner_length;
1185 	    }
1186 	}
1187       else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1188 	{
1189 	  insn_lengths[uid] = length_fun (insn);
1190 	  varying_length[uid] = insn_variable_length_p (insn);
1191 	}
1192 
1193       /* If needed, do any adjustment.  */
1194 #ifdef ADJUST_INSN_LENGTH
1195       ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1196       if (insn_lengths[uid] < 0)
1197 	fatal_insn ("negative insn length", insn);
1198 #endif
1199     }
1200 
1201   /* Now loop over all the insns finding varying length insns.  For each,
1202      get the current insn length.  If it has changed, reflect the change.
1203      When nothing changes for a full pass, we are done.  */
1204 
1205   while (something_changed)
1206     {
1207       something_changed = 0;
1208       insn_current_align = MAX_CODE_ALIGN - 1;
1209       for (insn_current_address = 0, insn = first;
1210 	   insn != 0;
1211 	   insn = NEXT_INSN (insn))
1212 	{
1213 	  int new_length;
1214 #ifdef ADJUST_INSN_LENGTH
1215 	  int tmp_length;
1216 #endif
1217 	  int length_align;
1218 
1219 	  uid = INSN_UID (insn);
1220 
1221 	  if (LABEL_P (insn))
1222 	    {
1223 	      int log = LABEL_TO_ALIGNMENT (insn);
1224 
1225 #ifdef CASE_VECTOR_SHORTEN_MODE
1226 	      /* If the mode of a following jump table was changed, we
1227 		 may need to update the alignment of this label.  */
1228 	      rtx_insn *next;
1229 	      bool next_is_jumptable;
1230 
1231 	      next = next_nonnote_insn (insn);
1232 	      next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
1233 	      if ((JUMP_TABLES_IN_TEXT_SECTION
1234 		   || readonly_data_section == text_section)
1235 		  && next_is_jumptable)
1236 		{
1237 		  int newlog = ADDR_VEC_ALIGN (next);
1238 		  if (newlog != log)
1239 		    {
1240 		      log = newlog;
1241 		      LABEL_TO_ALIGNMENT (insn) = log;
1242 		      something_changed = 1;
1243 		    }
1244 		}
1245 #endif
1246 
1247 	      if (log > insn_current_align)
1248 		{
1249 		  int align = 1 << log;
1250 		  int new_address= (insn_current_address + align - 1) & -align;
1251 		  insn_lengths[uid] = new_address - insn_current_address;
1252 		  insn_current_align = log;
1253 		  insn_current_address = new_address;
1254 		}
1255 	      else
1256 		insn_lengths[uid] = 0;
1257 	      INSN_ADDRESSES (uid) = insn_current_address;
1258 	      continue;
1259 	    }
1260 
1261 	  length_align = INSN_LENGTH_ALIGNMENT (insn);
1262 	  if (length_align < insn_current_align)
1263 	    insn_current_align = length_align;
1264 
1265 	  insn_last_address = INSN_ADDRESSES (uid);
1266 	  INSN_ADDRESSES (uid) = insn_current_address;
1267 
1268 #ifdef CASE_VECTOR_SHORTEN_MODE
1269 	  if (optimize
1270 	      && JUMP_TABLE_DATA_P (insn)
1271 	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1272 	    {
1273 	      rtx body = PATTERN (insn);
1274 	      int old_length = insn_lengths[uid];
1275 	      rtx_insn *rel_lab =
1276 		safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1277 	      rtx min_lab = XEXP (XEXP (body, 2), 0);
1278 	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1279 	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1280 	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1281 	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1282 	      rtx_insn *prev;
1283 	      int rel_align = 0;
1284 	      addr_diff_vec_flags flags;
1285 	      machine_mode vec_mode;
1286 
1287 	      /* Avoid automatic aggregate initialization.  */
1288 	      flags = ADDR_DIFF_VEC_FLAGS (body);
1289 
1290 	      /* Try to find a known alignment for rel_lab.  */
1291 	      for (prev = rel_lab;
1292 		   prev
1293 		   && ! insn_lengths[INSN_UID (prev)]
1294 		   && ! (varying_length[INSN_UID (prev)] & 1);
1295 		   prev = PREV_INSN (prev))
1296 		if (varying_length[INSN_UID (prev)] & 2)
1297 		  {
1298 		    rel_align = LABEL_TO_ALIGNMENT (prev);
1299 		    break;
1300 		  }
1301 
1302 	      /* See the comment on addr_diff_vec_flags in rtl.h for the
1303 		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
1304 	      /* Anything after INSN has still addresses from the last
1305 		 pass; adjust these so that they reflect our current
1306 		 estimate for this pass.  */
1307 	      if (flags.base_after_vec)
1308 		rel_addr += insn_current_address - insn_last_address;
1309 	      if (flags.min_after_vec)
1310 		min_addr += insn_current_address - insn_last_address;
1311 	      if (flags.max_after_vec)
1312 		max_addr += insn_current_address - insn_last_address;
1313 	      /* We want to know the worst case, i.e. lowest possible value
1314 		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
1315 		 its offset is positive, and we have to be wary of code shrink;
1316 		 otherwise, it is negative, and we have to be vary of code
1317 		 size increase.  */
1318 	      if (flags.min_after_base)
1319 		{
1320 		  /* If INSN is between REL_LAB and MIN_LAB, the size
1321 		     changes we are about to make can change the alignment
1322 		     within the observed offset, therefore we have to break
1323 		     it up into two parts that are independent.  */
1324 		  if (! flags.base_after_vec && flags.min_after_vec)
1325 		    {
1326 		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1327 		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
1328 		    }
1329 		  else
1330 		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1331 		}
1332 	      else
1333 		{
1334 		  if (flags.base_after_vec && ! flags.min_after_vec)
1335 		    {
1336 		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1337 		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1338 		    }
1339 		  else
1340 		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1341 		}
1342 	      /* Likewise, determine the highest lowest possible value
1343 		 for the offset of MAX_LAB.  */
1344 	      if (flags.max_after_base)
1345 		{
1346 		  if (! flags.base_after_vec && flags.max_after_vec)
1347 		    {
1348 		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1349 		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
1350 		    }
1351 		  else
1352 		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1353 		}
1354 	      else
1355 		{
1356 		  if (flags.base_after_vec && ! flags.max_after_vec)
1357 		    {
1358 		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1359 		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
1360 		    }
1361 		  else
1362 		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1363 		}
1364 	      vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1365 						   max_addr - rel_addr, body);
1366 	      if (!increasing
1367 		  || (GET_MODE_SIZE (vec_mode)
1368 		      >= GET_MODE_SIZE (GET_MODE (body))))
1369 		PUT_MODE (body, vec_mode);
1370 	      if (JUMP_TABLES_IN_TEXT_SECTION
1371 		  || readonly_data_section == text_section)
1372 		{
1373 		  insn_lengths[uid]
1374 		    = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1375 		  insn_current_address += insn_lengths[uid];
1376 		  if (insn_lengths[uid] != old_length)
1377 		    something_changed = 1;
1378 		}
1379 
1380 	      continue;
1381 	    }
1382 #endif /* CASE_VECTOR_SHORTEN_MODE */
1383 
1384 	  if (! (varying_length[uid]))
1385 	    {
1386 	      if (NONJUMP_INSN_P (insn)
1387 		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
1388 		{
1389 		  int i;
1390 
1391 		  body = PATTERN (insn);
1392 		  for (i = 0; i < XVECLEN (body, 0); i++)
1393 		    {
1394 		      rtx inner_insn = XVECEXP (body, 0, i);
1395 		      int inner_uid = INSN_UID (inner_insn);
1396 
1397 		      INSN_ADDRESSES (inner_uid) = insn_current_address;
1398 
1399 		      insn_current_address += insn_lengths[inner_uid];
1400 		    }
1401 		}
1402 	      else
1403 		insn_current_address += insn_lengths[uid];
1404 
1405 	      continue;
1406 	    }
1407 
1408 	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1409 	    {
1410 	      rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1411 	      int i;
1412 
1413 	      body = PATTERN (insn);
1414 	      new_length = 0;
1415 	      for (i = 0; i < seqn->len (); i++)
1416 		{
1417 		  rtx_insn *inner_insn = seqn->insn (i);
1418 		  int inner_uid = INSN_UID (inner_insn);
1419 		  int inner_length;
1420 
1421 		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1422 
1423 		  /* insn_current_length returns 0 for insns with a
1424 		     non-varying length.  */
1425 		  if (! varying_length[inner_uid])
1426 		    inner_length = insn_lengths[inner_uid];
1427 		  else
1428 		    inner_length = insn_current_length (inner_insn);
1429 
1430 		  if (inner_length != insn_lengths[inner_uid])
1431 		    {
1432 		      if (!increasing || inner_length > insn_lengths[inner_uid])
1433 			{
1434 			  insn_lengths[inner_uid] = inner_length;
1435 			  something_changed = 1;
1436 			}
1437 		      else
1438 			inner_length = insn_lengths[inner_uid];
1439 		    }
1440 		  insn_current_address += inner_length;
1441 		  new_length += inner_length;
1442 		}
1443 	    }
1444 	  else
1445 	    {
1446 	      new_length = insn_current_length (insn);
1447 	      insn_current_address += new_length;
1448 	    }
1449 
1450 #ifdef ADJUST_INSN_LENGTH
1451 	  /* If needed, do any adjustment.  */
1452 	  tmp_length = new_length;
1453 	  ADJUST_INSN_LENGTH (insn, new_length);
1454 	  insn_current_address += (new_length - tmp_length);
1455 #endif
1456 
1457 	  if (new_length != insn_lengths[uid]
1458 	      && (!increasing || new_length > insn_lengths[uid]))
1459 	    {
1460 	      insn_lengths[uid] = new_length;
1461 	      something_changed = 1;
1462 	    }
1463 	  else
1464 	    insn_current_address += insn_lengths[uid] - new_length;
1465 	}
1466       /* For a non-optimizing compile, do only a single pass.  */
1467       if (!increasing)
1468 	break;
1469     }
1470   crtl->max_insn_address = insn_current_address;
1471   free (varying_length);
1472 }
1473 
1474 /* Given the body of an INSN known to be generated by an ASM statement, return
1475    the number of machine instructions likely to be generated for this insn.
1476    This is used to compute its length.  */
1477 
1478 static int
1479 asm_insn_count (rtx body)
1480 {
1481   const char *templ;
1482 
1483   if (GET_CODE (body) == ASM_INPUT)
1484     templ = XSTR (body, 0);
1485   else
1486     templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1487 
1488   return asm_str_count (templ);
1489 }
1490 
1491 /* Return the number of machine instructions likely to be generated for the
1492    inline-asm template. */
1493 int
1494 asm_str_count (const char *templ)
1495 {
1496   int count = 1;
1497 
1498   if (!*templ)
1499     return 0;
1500 
1501   for (; *templ; templ++)
1502     if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1503 	|| *templ == '\n')
1504       count++;
1505 
1506   return count;
1507 }
1508 
1509 /* ??? This is probably the wrong place for these.  */
1510 /* Structure recording the mapping from source file and directory
1511    names at compile time to those to be embedded in debug
1512    information.  */
1513 struct debug_prefix_map
1514 {
1515   const char *old_prefix;
1516   const char *new_prefix;
1517   size_t old_len;
1518   size_t new_len;
1519   struct debug_prefix_map *next;
1520 };
1521 
1522 /* Linked list of such structures.  */
1523 static debug_prefix_map *debug_prefix_maps;
1524 
1525 
1526 /* Record a debug file prefix mapping.  ARG is the argument to
1527    -fdebug-prefix-map and must be of the form OLD=NEW.  */
1528 
1529 void
1530 add_debug_prefix_map (const char *arg)
1531 {
1532   debug_prefix_map *map;
1533   const char *p;
1534   char *env;
1535   const char *old;
1536   size_t oldlen;
1537 
1538   p = strchr (arg, '=');
1539   if (!p)
1540     {
1541       error ("invalid argument %qs to -fdebug-prefix-map", arg);
1542       return;
1543     }
1544   if (*arg == '$')
1545     {
1546       env = xstrndup (arg+1, p - (arg+1));
1547       old = getenv(env);
1548       if (!old)
1549 	{
1550 	  warning (0, "environment variable %qs not set in argument to "
1551 		   "-fdebug-prefix-map", env);
1552 	  free(env);
1553 	  return;
1554 	}
1555       oldlen = strlen(old);
1556       free(env);
1557     }
1558   else
1559     {
1560       old = xstrndup (arg, p - arg);
1561       oldlen = p - arg;
1562     }
1563 
1564   map = XNEW (debug_prefix_map);
1565   map->old_prefix = old;
1566   map->old_len = oldlen;
1567   p++;
1568   map->new_prefix = xstrdup (p);
1569   map->new_len = strlen (p);
1570   map->next = debug_prefix_maps;
1571   debug_prefix_maps = map;
1572 }
1573 
1574 /* Perform user-specified mapping of debug filename prefixes.  Return
1575    the new name corresponding to FILENAME.  */
1576 
1577 static const char *
1578 remap_debug_prefix_filename (const char *filename)
1579 {
1580   debug_prefix_map *map;
1581   char *s;
1582   const char *name;
1583   size_t name_len;
1584 
1585   for (map = debug_prefix_maps; map; map = map->next)
1586     if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0)
1587       break;
1588   if (!map)
1589     return filename;
1590   name = filename + map->old_len;
1591   name_len = strlen (name) + 1;
1592   s = (char *) alloca (name_len + map->new_len);
1593   memcpy (s, map->new_prefix, map->new_len);
1594   memcpy (s + map->new_len, name, name_len);
1595   return ggc_strdup (s);
1596 }
1597 
1598 #include <regex.h>
1599 
1600 typedef struct debug_regex_map
1601 {
1602   regex_t re;
1603   const char *sub;
1604   struct debug_regex_map *next;
1605 } debug_regex_map;
1606 
1607 /* Linked list of such structures.  */
1608 debug_regex_map *debug_regex_maps;
1609 
1610 
1611 /* Record a debug file regex mapping.  ARG is the argument to
1612    -fdebug-regex-map and must be of the form OLD=NEW.  */
1613 
1614 void
1615 add_debug_regex_map (const char *arg)
1616 {
1617   debug_regex_map *map;
1618   const char *p;
1619   char *old;
1620   char buf[1024];
1621   regex_t re;
1622   int e;
1623 
1624   p = strchr (arg, '=');
1625   if (!p)
1626     {
1627       error ("invalid argument %qs to -fdebug-regex-map", arg);
1628       return;
1629     }
1630 
1631   old = xstrndup (arg, p - arg);
1632   if ((e = regcomp(&re, old, REG_EXTENDED)) != 0)
1633     {
1634       regerror(e, &re, buf, sizeof(buf));
1635       warning (0, "regular expression compilation for %qs in argument to "
1636 	       "-fdebug-regex-map failed: %qs", old, buf);
1637       free(old);
1638       return;
1639     }
1640   free(old);
1641 
1642   map = XNEW (debug_regex_map);
1643   map->re = re;
1644   p++;
1645   map->sub = xstrdup (p);
1646   map->next = debug_regex_maps;
1647   debug_regex_maps = map;
1648 }
1649 
1650 extern "C" ssize_t regasub(char **, const char *,
1651   const regmatch_t *rm, const char *);
1652 
1653 /* Perform user-specified mapping of debug filename regular expressions.  Return
1654    the new name corresponding to FILENAME.  */
1655 
1656 static const char *
1657 remap_debug_regex_filename (const char *filename)
1658 {
1659   debug_regex_map *map;
1660   char *s;
1661   regmatch_t rm[10];
1662 
1663   for (map = debug_regex_maps; map; map = map->next)
1664     if (regexec (&map->re, filename, 10, rm, 0) == 0
1665        && regasub (&s, map->sub, rm, filename) >= 0)
1666       {
1667 	 const char *name = ggc_strdup(s);
1668 	 free(s);
1669 	 return name;
1670       }
1671   return filename;
1672 }
1673 
1674 const char *
1675 remap_debug_filename (const char *filename)
1676 {
1677    return remap_debug_regex_filename (remap_debug_prefix_filename (filename));
1678 }
1679 
1680 /* Return true if DWARF2 debug info can be emitted for DECL.  */
1681 
1682 static bool
1683 dwarf2_debug_info_emitted_p (tree decl)
1684 {
1685   if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1686     return false;
1687 
1688   if (DECL_IGNORED_P (decl))
1689     return false;
1690 
1691   return true;
1692 }
1693 
1694 /* Return scope resulting from combination of S1 and S2.  */
1695 static tree
1696 choose_inner_scope (tree s1, tree s2)
1697 {
1698    if (!s1)
1699      return s2;
1700    if (!s2)
1701      return s1;
1702    if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1703      return s1;
1704    return s2;
1705 }
1706 
1707 /* Emit lexical block notes needed to change scope from S1 to S2.  */
1708 
1709 static void
1710 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1711 {
1712   rtx_insn *insn = orig_insn;
1713   tree com = NULL_TREE;
1714   tree ts1 = s1, ts2 = s2;
1715   tree s;
1716 
1717   while (ts1 != ts2)
1718     {
1719       gcc_assert (ts1 && ts2);
1720       if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1721 	ts1 = BLOCK_SUPERCONTEXT (ts1);
1722       else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1723 	ts2 = BLOCK_SUPERCONTEXT (ts2);
1724       else
1725 	{
1726 	  ts1 = BLOCK_SUPERCONTEXT (ts1);
1727 	  ts2 = BLOCK_SUPERCONTEXT (ts2);
1728 	}
1729     }
1730   com = ts1;
1731 
1732   /* Close scopes.  */
1733   s = s1;
1734   while (s != com)
1735     {
1736       rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1737       NOTE_BLOCK (note) = s;
1738       s = BLOCK_SUPERCONTEXT (s);
1739     }
1740 
1741   /* Open scopes.  */
1742   s = s2;
1743   while (s != com)
1744     {
1745       insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1746       NOTE_BLOCK (insn) = s;
1747       s = BLOCK_SUPERCONTEXT (s);
1748     }
1749 }
1750 
1751 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1752    on the scope tree and the newly reordered instructions.  */
1753 
1754 static void
1755 reemit_insn_block_notes (void)
1756 {
1757   tree cur_block = DECL_INITIAL (cfun->decl);
1758   rtx_insn *insn;
1759   rtx_note *note;
1760 
1761   insn = get_insns ();
1762   for (; insn; insn = NEXT_INSN (insn))
1763     {
1764       tree this_block;
1765 
1766       /* Prevent lexical blocks from straddling section boundaries.  */
1767       if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
1768         {
1769           for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1770                s = BLOCK_SUPERCONTEXT (s))
1771             {
1772               rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1773               NOTE_BLOCK (note) = s;
1774               note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1775               NOTE_BLOCK (note) = s;
1776             }
1777         }
1778 
1779       if (!active_insn_p (insn))
1780         continue;
1781 
1782       /* Avoid putting scope notes between jump table and its label.  */
1783       if (JUMP_TABLE_DATA_P (insn))
1784 	continue;
1785 
1786       this_block = insn_scope (insn);
1787       /* For sequences compute scope resulting from merging all scopes
1788 	 of instructions nested inside.  */
1789       if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1790 	{
1791 	  int i;
1792 
1793 	  this_block = NULL;
1794 	  for (i = 0; i < body->len (); i++)
1795 	    this_block = choose_inner_scope (this_block,
1796 					     insn_scope (body->insn (i)));
1797 	}
1798       if (! this_block)
1799 	{
1800 	  if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1801 	    continue;
1802 	  else
1803 	    this_block = DECL_INITIAL (cfun->decl);
1804 	}
1805 
1806       if (this_block != cur_block)
1807 	{
1808 	  change_scope (insn, cur_block, this_block);
1809 	  cur_block = this_block;
1810 	}
1811     }
1812 
1813   /* change_scope emits before the insn, not after.  */
1814   note = emit_note (NOTE_INSN_DELETED);
1815   change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1816   delete_insn (note);
1817 
1818   reorder_blocks ();
1819 }
1820 
1821 static const char *some_local_dynamic_name;
1822 
1823 /* Locate some local-dynamic symbol still in use by this function
1824    so that we can print its name in local-dynamic base patterns.
1825    Return null if there are no local-dynamic references.  */
1826 
1827 const char *
1828 get_some_local_dynamic_name ()
1829 {
1830   subrtx_iterator::array_type array;
1831   rtx_insn *insn;
1832 
1833   if (some_local_dynamic_name)
1834     return some_local_dynamic_name;
1835 
1836   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1837     if (NONDEBUG_INSN_P (insn))
1838       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1839 	{
1840 	  const_rtx x = *iter;
1841 	  if (GET_CODE (x) == SYMBOL_REF)
1842 	    {
1843 	      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1844 		return some_local_dynamic_name = XSTR (x, 0);
1845 	      if (CONSTANT_POOL_ADDRESS_P (x))
1846 		iter.substitute (get_pool_constant (x));
1847 	    }
1848 	}
1849 
1850   return 0;
1851 }
1852 
1853 /* Output assembler code for the start of a function,
1854    and initialize some of the variables in this file
1855    for the new function.  The label for the function and associated
1856    assembler pseudo-ops have already been output in `assemble_start_function'.
1857 
1858    FIRST is the first insn of the rtl for the function being compiled.
1859    FILE is the file to write assembler code to.
1860    OPTIMIZE_P is nonzero if we should eliminate redundant
1861      test and compare insns.  */
1862 
1863 void
1864 final_start_function (rtx_insn *first, FILE *file,
1865 		      int optimize_p ATTRIBUTE_UNUSED)
1866 {
1867   block_depth = 0;
1868 
1869   this_is_asm_operands = 0;
1870 
1871   need_profile_function = false;
1872 
1873   last_filename = LOCATION_FILE (prologue_location);
1874   last_linenum = LOCATION_LINE (prologue_location);
1875   last_columnnum = LOCATION_COLUMN (prologue_location);
1876   last_discriminator = discriminator = 0;
1877 
1878   high_block_linenum = high_function_linenum = last_linenum;
1879 
1880   if (flag_sanitize & SANITIZE_ADDRESS)
1881     asan_function_start ();
1882 
1883   if (!DECL_IGNORED_P (current_function_decl))
1884     debug_hooks->begin_prologue (last_linenum, last_columnnum, last_filename);
1885 
1886   if (!dwarf2_debug_info_emitted_p (current_function_decl))
1887     dwarf2out_begin_prologue (0, 0, NULL);
1888 
1889 #ifdef LEAF_REG_REMAP
1890   if (crtl->uses_only_leaf_regs)
1891     leaf_renumber_regs (first);
1892 #endif
1893 
1894   /* The Sun386i and perhaps other machines don't work right
1895      if the profiling code comes after the prologue.  */
1896   if (targetm.profile_before_prologue () && crtl->profile)
1897     {
1898       if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1899 	  && targetm.have_prologue ())
1900 	{
1901 	  rtx_insn *insn;
1902 	  for (insn = first; insn; insn = NEXT_INSN (insn))
1903 	    if (!NOTE_P (insn))
1904 	      {
1905 		insn = NULL;
1906 		break;
1907 	      }
1908 	    else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1909 		     || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1910 	      break;
1911 	    else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1912 		     || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1913 	      continue;
1914 	    else
1915 	      {
1916 		insn = NULL;
1917 		break;
1918 	      }
1919 
1920 	  if (insn)
1921 	    need_profile_function = true;
1922 	  else
1923 	    profile_function (file);
1924 	}
1925       else
1926 	profile_function (file);
1927     }
1928 
1929   /* If debugging, assign block numbers to all of the blocks in this
1930      function.  */
1931   if (write_symbols)
1932     {
1933       reemit_insn_block_notes ();
1934       number_blocks (current_function_decl);
1935       /* We never actually put out begin/end notes for the top-level
1936 	 block in the function.  But, conceptually, that block is
1937 	 always needed.  */
1938       TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1939     }
1940 
1941   if (warn_frame_larger_than
1942     && get_frame_size () > frame_larger_than_size)
1943   {
1944       /* Issue a warning */
1945       warning (OPT_Wframe_larger_than_,
1946                "the frame size of %wd bytes is larger than %wd bytes",
1947                get_frame_size (), frame_larger_than_size);
1948   }
1949 
1950   /* First output the function prologue: code to set up the stack frame.  */
1951   targetm.asm_out.function_prologue (file, get_frame_size ());
1952 
1953   /* If the machine represents the prologue as RTL, the profiling code must
1954      be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1955   if (! targetm.have_prologue ())
1956     profile_after_prologue (file);
1957 }
1958 
1959 static void
1960 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1961 {
1962   if (!targetm.profile_before_prologue () && crtl->profile)
1963     profile_function (file);
1964 }
1965 
1966 static void
1967 profile_function (FILE *file ATTRIBUTE_UNUSED)
1968 {
1969 #ifndef NO_PROFILE_COUNTERS
1970 # define NO_PROFILE_COUNTERS	0
1971 #endif
1972 #ifdef ASM_OUTPUT_REG_PUSH
1973   rtx sval = NULL, chain = NULL;
1974 
1975   if (cfun->returns_struct)
1976     sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1977 					   true);
1978   if (cfun->static_chain_decl)
1979     chain = targetm.calls.static_chain (current_function_decl, true);
1980 #endif /* ASM_OUTPUT_REG_PUSH */
1981 
1982   if (! NO_PROFILE_COUNTERS)
1983     {
1984       int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1985       switch_to_section (data_section);
1986       ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1987       targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1988       assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1989     }
1990 
1991   switch_to_section (current_function_section ());
1992 
1993 #ifdef ASM_OUTPUT_REG_PUSH
1994   if (sval && REG_P (sval))
1995     ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1996   if (chain && REG_P (chain))
1997     ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1998 #endif
1999 
2000   FUNCTION_PROFILER (file, current_function_funcdef_no);
2001 
2002 #ifdef ASM_OUTPUT_REG_PUSH
2003   if (chain && REG_P (chain))
2004     ASM_OUTPUT_REG_POP (file, REGNO (chain));
2005   if (sval && REG_P (sval))
2006     ASM_OUTPUT_REG_POP (file, REGNO (sval));
2007 #endif
2008 }
2009 
2010 /* Output assembler code for the end of a function.
2011    For clarity, args are same as those of `final_start_function'
2012    even though not all of them are needed.  */
2013 
2014 void
2015 final_end_function (void)
2016 {
2017   app_disable ();
2018 
2019   if (!DECL_IGNORED_P (current_function_decl))
2020     debug_hooks->end_function (high_function_linenum);
2021 
2022   /* Finally, output the function epilogue:
2023      code to restore the stack frame and return to the caller.  */
2024   targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
2025 
2026   /* And debug output.  */
2027   if (!DECL_IGNORED_P (current_function_decl))
2028     debug_hooks->end_epilogue (last_linenum, last_filename);
2029 
2030   if (!dwarf2_debug_info_emitted_p (current_function_decl)
2031       && dwarf2out_do_frame ())
2032     dwarf2out_end_epilogue (last_linenum, last_filename);
2033 
2034   some_local_dynamic_name = 0;
2035 }
2036 
2037 
2038 /* Dumper helper for basic block information. FILE is the assembly
2039    output file, and INSN is the instruction being emitted.  */
2040 
2041 static void
2042 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
2043                        basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
2044 {
2045   basic_block bb;
2046 
2047   if (!flag_debug_asm)
2048     return;
2049 
2050   if (INSN_UID (insn) < bb_map_size
2051       && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
2052     {
2053       edge e;
2054       edge_iterator ei;
2055 
2056       fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
2057       if (bb->frequency)
2058         fprintf (file, " freq:%d", bb->frequency);
2059       if (bb->count)
2060         fprintf (file, " count:%" PRId64,
2061                  bb->count);
2062       fprintf (file, " seq:%d", (*bb_seqn)++);
2063       fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
2064       FOR_EACH_EDGE (e, ei, bb->preds)
2065         {
2066           dump_edge_info (file, e, TDF_DETAILS, 0);
2067         }
2068       fprintf (file, "\n");
2069     }
2070   if (INSN_UID (insn) < bb_map_size
2071       && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
2072     {
2073       edge e;
2074       edge_iterator ei;
2075 
2076       fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
2077       FOR_EACH_EDGE (e, ei, bb->succs)
2078        {
2079          dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
2080        }
2081       fprintf (file, "\n");
2082     }
2083 }
2084 
2085 /* Output assembler code for some insns: all or part of a function.
2086    For description of args, see `final_start_function', above.  */
2087 
2088 void
2089 final (rtx_insn *first, FILE *file, int optimize_p)
2090 {
2091   rtx_insn *insn, *next;
2092   int seen = 0;
2093 
2094   /* Used for -dA dump.  */
2095   basic_block *start_to_bb = NULL;
2096   basic_block *end_to_bb = NULL;
2097   int bb_map_size = 0;
2098   int bb_seqn = 0;
2099 
2100   last_ignored_compare = 0;
2101 
2102   if (HAVE_cc0)
2103     for (insn = first; insn; insn = NEXT_INSN (insn))
2104       {
2105 	/* If CC tracking across branches is enabled, record the insn which
2106 	   jumps to each branch only reached from one place.  */
2107 	if (optimize_p && JUMP_P (insn))
2108 	  {
2109 	    rtx lab = JUMP_LABEL (insn);
2110 	    if (lab && LABEL_P (lab) && LABEL_NUSES (lab) == 1)
2111 	      {
2112 		LABEL_REFS (lab) = insn;
2113 	      }
2114 	  }
2115       }
2116 
2117   init_recog ();
2118 
2119   CC_STATUS_INIT;
2120 
2121   if (flag_debug_asm)
2122     {
2123       basic_block bb;
2124 
2125       bb_map_size = get_max_uid () + 1;
2126       start_to_bb = XCNEWVEC (basic_block, bb_map_size);
2127       end_to_bb = XCNEWVEC (basic_block, bb_map_size);
2128 
2129       /* There is no cfg for a thunk.  */
2130       if (!cfun->is_thunk)
2131 	FOR_EACH_BB_REVERSE_FN (bb, cfun)
2132 	  {
2133 	    start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
2134 	    end_to_bb[INSN_UID (BB_END (bb))] = bb;
2135 	  }
2136     }
2137 
2138   /* Output the insns.  */
2139   for (insn = first; insn;)
2140     {
2141       if (HAVE_ATTR_length)
2142 	{
2143 	  if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
2144 	    {
2145 	      /* This can be triggered by bugs elsewhere in the compiler if
2146 		 new insns are created after init_insn_lengths is called.  */
2147 	      gcc_assert (NOTE_P (insn));
2148 	      insn_current_address = -1;
2149 	    }
2150 	  else
2151 	    insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2152 	}
2153 
2154       dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2155                              bb_map_size, &bb_seqn);
2156       insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2157     }
2158 
2159   if (flag_debug_asm)
2160     {
2161       free (start_to_bb);
2162       free (end_to_bb);
2163     }
2164 
2165   /* Remove CFI notes, to avoid compare-debug failures.  */
2166   for (insn = first; insn; insn = next)
2167     {
2168       next = NEXT_INSN (insn);
2169       if (NOTE_P (insn)
2170 	  && (NOTE_KIND (insn) == NOTE_INSN_CFI
2171 	      || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2172 	delete_insn (insn);
2173     }
2174 }
2175 
2176 const char *
2177 get_insn_template (int code, rtx insn)
2178 {
2179   switch (insn_data[code].output_format)
2180     {
2181     case INSN_OUTPUT_FORMAT_SINGLE:
2182       return insn_data[code].output.single;
2183     case INSN_OUTPUT_FORMAT_MULTI:
2184       return insn_data[code].output.multi[which_alternative];
2185     case INSN_OUTPUT_FORMAT_FUNCTION:
2186       gcc_assert (insn);
2187       return (*insn_data[code].output.function) (recog_data.operand,
2188 						 as_a <rtx_insn *> (insn));
2189 
2190     default:
2191       gcc_unreachable ();
2192     }
2193 }
2194 
2195 /* Emit the appropriate declaration for an alternate-entry-point
2196    symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
2197    LABEL_KIND != LABEL_NORMAL.
2198 
2199    The case fall-through in this function is intentional.  */
2200 static void
2201 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2202 {
2203   const char *name = LABEL_NAME (insn);
2204 
2205   switch (LABEL_KIND (insn))
2206     {
2207     case LABEL_WEAK_ENTRY:
2208 #ifdef ASM_WEAKEN_LABEL
2209       ASM_WEAKEN_LABEL (file, name);
2210       gcc_fallthrough ();
2211 #endif
2212     case LABEL_GLOBAL_ENTRY:
2213       targetm.asm_out.globalize_label (file, name);
2214       gcc_fallthrough ();
2215     case LABEL_STATIC_ENTRY:
2216 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2217       ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2218 #endif
2219       ASM_OUTPUT_LABEL (file, name);
2220       break;
2221 
2222     case LABEL_NORMAL:
2223     default:
2224       gcc_unreachable ();
2225     }
2226 }
2227 
2228 /* Given a CALL_INSN, find and return the nested CALL. */
2229 static rtx
2230 call_from_call_insn (rtx_call_insn *insn)
2231 {
2232   rtx x;
2233   gcc_assert (CALL_P (insn));
2234   x = PATTERN (insn);
2235 
2236   while (GET_CODE (x) != CALL)
2237     {
2238       switch (GET_CODE (x))
2239 	{
2240 	default:
2241 	  gcc_unreachable ();
2242 	case COND_EXEC:
2243 	  x = COND_EXEC_CODE (x);
2244 	  break;
2245 	case PARALLEL:
2246 	  x = XVECEXP (x, 0, 0);
2247 	  break;
2248 	case SET:
2249 	  x = XEXP (x, 1);
2250 	  break;
2251 	}
2252     }
2253   return x;
2254 }
2255 
2256 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2257    corresponding source line, if available.  */
2258 
2259 static void
2260 asm_show_source (const char *filename, int linenum)
2261 {
2262   if (!filename)
2263     return;
2264 
2265   int line_size;
2266   const char *line = location_get_source_line (filename, linenum, &line_size);
2267   if (!line)
2268     return;
2269 
2270   fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2271   /* "line" is not 0-terminated, so we must use line_size.  */
2272   fwrite (line, 1, line_size, asm_out_file);
2273   fputc ('\n', asm_out_file);
2274 }
2275 
2276 /* The final scan for one insn, INSN.
2277    Args are same as in `final', except that INSN
2278    is the insn being scanned.
2279    Value returned is the next insn to be scanned.
2280 
2281    NOPEEPHOLES is the flag to disallow peephole processing (currently
2282    used for within delayed branch sequence output).
2283 
2284    SEEN is used to track the end of the prologue, for emitting
2285    debug information.  We force the emission of a line note after
2286    both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG.  */
2287 
2288 rtx_insn *
2289 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2290 		 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2291 {
2292 #if HAVE_cc0
2293   rtx set;
2294 #endif
2295   rtx_insn *next;
2296 
2297   insn_counter++;
2298 
2299   /* Ignore deleted insns.  These can occur when we split insns (due to a
2300      template of "#") while not optimizing.  */
2301   if (insn->deleted ())
2302     return NEXT_INSN (insn);
2303 
2304   switch (GET_CODE (insn))
2305     {
2306     case NOTE:
2307       switch (NOTE_KIND (insn))
2308 	{
2309 	case NOTE_INSN_DELETED:
2310 	case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2311 	  break;
2312 
2313 	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2314 	  in_cold_section_p = !in_cold_section_p;
2315 
2316 	  if (dwarf2out_do_frame ())
2317 	    dwarf2out_switch_text_section ();
2318 	  else if (!DECL_IGNORED_P (current_function_decl))
2319 	    debug_hooks->switch_text_section ();
2320 
2321 	  switch_to_section (current_function_section ());
2322 	  targetm.asm_out.function_switched_text_sections (asm_out_file,
2323 							   current_function_decl,
2324 							   in_cold_section_p);
2325 	  /* Emit a label for the split cold section.  Form label name by
2326 	     suffixing "cold" to the original function's name.  */
2327 	  if (in_cold_section_p)
2328 	    {
2329 	      cold_function_name
2330 		= clone_function_name (current_function_decl, "cold");
2331 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2332 	      ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2333 					      IDENTIFIER_POINTER
2334 					          (cold_function_name),
2335 					      current_function_decl);
2336 #else
2337 	      ASM_OUTPUT_LABEL (asm_out_file,
2338 				IDENTIFIER_POINTER (cold_function_name));
2339 #endif
2340 	      if (dwarf2out_do_frame ()
2341 	          && cfun->fde->dw_fde_second_begin != NULL)
2342 		ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin);
2343 	    }
2344 	  break;
2345 
2346 	case NOTE_INSN_BASIC_BLOCK:
2347 	  if (need_profile_function)
2348 	    {
2349 	      profile_function (asm_out_file);
2350 	      need_profile_function = false;
2351 	    }
2352 
2353 	  if (targetm.asm_out.unwind_emit)
2354 	    targetm.asm_out.unwind_emit (asm_out_file, insn);
2355 
2356           discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2357 
2358 	  break;
2359 
2360 	case NOTE_INSN_EH_REGION_BEG:
2361 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2362 				  NOTE_EH_HANDLER (insn));
2363 	  break;
2364 
2365 	case NOTE_INSN_EH_REGION_END:
2366 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2367 				  NOTE_EH_HANDLER (insn));
2368 	  break;
2369 
2370 	case NOTE_INSN_PROLOGUE_END:
2371 	  targetm.asm_out.function_end_prologue (file);
2372 	  profile_after_prologue (file);
2373 
2374 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2375 	    {
2376 	      *seen |= SEEN_EMITTED;
2377 	      force_source_line = true;
2378 	    }
2379 	  else
2380 	    *seen |= SEEN_NOTE;
2381 
2382 	  break;
2383 
2384 	case NOTE_INSN_EPILOGUE_BEG:
2385           if (!DECL_IGNORED_P (current_function_decl))
2386             (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2387 	  targetm.asm_out.function_begin_epilogue (file);
2388 	  break;
2389 
2390 	case NOTE_INSN_CFI:
2391 	  dwarf2out_emit_cfi (NOTE_CFI (insn));
2392 	  break;
2393 
2394 	case NOTE_INSN_CFI_LABEL:
2395 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2396 				  NOTE_LABEL_NUMBER (insn));
2397 	  break;
2398 
2399 	case NOTE_INSN_FUNCTION_BEG:
2400 	  if (need_profile_function)
2401 	    {
2402 	      profile_function (asm_out_file);
2403 	      need_profile_function = false;
2404 	    }
2405 
2406 	  app_disable ();
2407 	  if (!DECL_IGNORED_P (current_function_decl))
2408 	    debug_hooks->end_prologue (last_linenum, last_filename);
2409 
2410 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2411 	    {
2412 	      *seen |= SEEN_EMITTED;
2413 	      force_source_line = true;
2414 	    }
2415 	  else
2416 	    *seen |= SEEN_NOTE;
2417 
2418 	  break;
2419 
2420 	case NOTE_INSN_BLOCK_BEG:
2421 	  if (debug_info_level == DINFO_LEVEL_NORMAL
2422 	      || debug_info_level == DINFO_LEVEL_VERBOSE
2423 	      || write_symbols == DWARF2_DEBUG
2424 	      || write_symbols == VMS_AND_DWARF2_DEBUG
2425 	      || write_symbols == VMS_DEBUG)
2426 	    {
2427 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2428 
2429 	      app_disable ();
2430 	      ++block_depth;
2431 	      high_block_linenum = last_linenum;
2432 
2433 	      /* Output debugging info about the symbol-block beginning.  */
2434 	      if (!DECL_IGNORED_P (current_function_decl))
2435 		debug_hooks->begin_block (last_linenum, n);
2436 
2437 	      /* Mark this block as output.  */
2438 	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2439 	      BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2440 	    }
2441 	  if (write_symbols == DBX_DEBUG
2442 	      || write_symbols == SDB_DEBUG)
2443 	    {
2444 	      location_t *locus_ptr
2445 		= block_nonartificial_location (NOTE_BLOCK (insn));
2446 
2447 	      if (locus_ptr != NULL)
2448 		{
2449 		  override_filename = LOCATION_FILE (*locus_ptr);
2450 		  override_linenum = LOCATION_LINE (*locus_ptr);
2451 		  override_columnnum = LOCATION_COLUMN (*locus_ptr);
2452 		}
2453 	    }
2454 	  break;
2455 
2456 	case NOTE_INSN_BLOCK_END:
2457 	  if (debug_info_level == DINFO_LEVEL_NORMAL
2458 	      || debug_info_level == DINFO_LEVEL_VERBOSE
2459 	      || write_symbols == DWARF2_DEBUG
2460 	      || write_symbols == VMS_AND_DWARF2_DEBUG
2461 	      || write_symbols == VMS_DEBUG)
2462 	    {
2463 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2464 
2465 	      app_disable ();
2466 
2467 	      /* End of a symbol-block.  */
2468 	      --block_depth;
2469 	      gcc_assert (block_depth >= 0);
2470 
2471 	      if (!DECL_IGNORED_P (current_function_decl))
2472 		debug_hooks->end_block (high_block_linenum, n);
2473 	      gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2474 			  == in_cold_section_p);
2475 	    }
2476 	  if (write_symbols == DBX_DEBUG
2477 	      || write_symbols == SDB_DEBUG)
2478 	    {
2479 	      tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2480 	      location_t *locus_ptr
2481 		= block_nonartificial_location (outer_block);
2482 
2483 	      if (locus_ptr != NULL)
2484 		{
2485 		  override_filename = LOCATION_FILE (*locus_ptr);
2486 		  override_linenum = LOCATION_LINE (*locus_ptr);
2487 		  override_columnnum = LOCATION_COLUMN (*locus_ptr);
2488 		}
2489 	      else
2490 		{
2491 		  override_filename = NULL;
2492 		  override_linenum = 0;
2493 		  override_columnnum = 0;
2494 		}
2495 	    }
2496 	  break;
2497 
2498 	case NOTE_INSN_DELETED_LABEL:
2499 	  /* Emit the label.  We may have deleted the CODE_LABEL because
2500 	     the label could be proved to be unreachable, though still
2501 	     referenced (in the form of having its address taken.  */
2502 	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2503 	  break;
2504 
2505 	case NOTE_INSN_DELETED_DEBUG_LABEL:
2506 	  /* Similarly, but need to use different namespace for it.  */
2507 	  if (CODE_LABEL_NUMBER (insn) != -1)
2508 	    ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2509 	  break;
2510 
2511 	case NOTE_INSN_VAR_LOCATION:
2512 	case NOTE_INSN_CALL_ARG_LOCATION:
2513 	  if (!DECL_IGNORED_P (current_function_decl))
2514 	    debug_hooks->var_location (insn);
2515 	  break;
2516 
2517 	default:
2518 	  gcc_unreachable ();
2519 	  break;
2520 	}
2521       break;
2522 
2523     case BARRIER:
2524       break;
2525 
2526     case CODE_LABEL:
2527       /* The target port might emit labels in the output function for
2528 	 some insn, e.g. sh.c output_branchy_insn.  */
2529       if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2530 	{
2531 	  int align = LABEL_TO_ALIGNMENT (insn);
2532 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2533 	  int max_skip = LABEL_TO_MAX_SKIP (insn);
2534 #endif
2535 
2536 	  if (align && NEXT_INSN (insn))
2537 	    {
2538 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2539 	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2540 #else
2541 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2542               ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2543 #else
2544 	      ASM_OUTPUT_ALIGN (file, align);
2545 #endif
2546 #endif
2547 	    }
2548 	}
2549       CC_STATUS_INIT;
2550 
2551       if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2552 	debug_hooks->label (as_a <rtx_code_label *> (insn));
2553 
2554       app_disable ();
2555 
2556       next = next_nonnote_insn (insn);
2557       /* If this label is followed by a jump-table, make sure we put
2558 	 the label in the read-only section.  Also possibly write the
2559 	 label and jump table together.  */
2560       if (next != 0 && JUMP_TABLE_DATA_P (next))
2561 	{
2562 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2563 	  /* In this case, the case vector is being moved by the
2564 	     target, so don't output the label at all.  Leave that
2565 	     to the back end macros.  */
2566 #else
2567 	  if (! JUMP_TABLES_IN_TEXT_SECTION)
2568 	    {
2569 	      int log_align;
2570 
2571 	      switch_to_section (targetm.asm_out.function_rodata_section
2572 				 (current_function_decl));
2573 
2574 #ifdef ADDR_VEC_ALIGN
2575 	      log_align = ADDR_VEC_ALIGN (next);
2576 #else
2577 	      log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2578 #endif
2579 	      ASM_OUTPUT_ALIGN (file, log_align);
2580 	    }
2581 	  else
2582 	    switch_to_section (current_function_section ());
2583 
2584 #ifdef ASM_OUTPUT_CASE_LABEL
2585 	  ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2586 				 next);
2587 #else
2588 	  targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2589 #endif
2590 #endif
2591 	  break;
2592 	}
2593       if (LABEL_ALT_ENTRY_P (insn))
2594 	output_alternate_entry_point (file, insn);
2595       else
2596 	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2597       break;
2598 
2599     default:
2600       {
2601 	rtx body = PATTERN (insn);
2602 	int insn_code_number;
2603 	const char *templ;
2604 	bool is_stmt;
2605 
2606 	/* Reset this early so it is correct for ASM statements.  */
2607 	current_insn_predicate = NULL_RTX;
2608 
2609 	/* An INSN, JUMP_INSN or CALL_INSN.
2610 	   First check for special kinds that recog doesn't recognize.  */
2611 
2612 	if (GET_CODE (body) == USE /* These are just declarations.  */
2613 	    || GET_CODE (body) == CLOBBER)
2614 	  break;
2615 
2616 #if HAVE_cc0
2617 	{
2618 	  /* If there is a REG_CC_SETTER note on this insn, it means that
2619 	     the setting of the condition code was done in the delay slot
2620 	     of the insn that branched here.  So recover the cc status
2621 	     from the insn that set it.  */
2622 
2623 	  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2624 	  if (note)
2625 	    {
2626 	      rtx_insn *other = as_a <rtx_insn *> (XEXP (note, 0));
2627 	      NOTICE_UPDATE_CC (PATTERN (other), other);
2628 	      cc_prev_status = cc_status;
2629 	    }
2630 	}
2631 #endif
2632 
2633 	/* Detect insns that are really jump-tables
2634 	   and output them as such.  */
2635 
2636         if (JUMP_TABLE_DATA_P (insn))
2637 	  {
2638 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2639 	    int vlen, idx;
2640 #endif
2641 
2642 	    if (! JUMP_TABLES_IN_TEXT_SECTION)
2643 	      switch_to_section (targetm.asm_out.function_rodata_section
2644 				 (current_function_decl));
2645 	    else
2646 	      switch_to_section (current_function_section ());
2647 
2648 	    app_disable ();
2649 
2650 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2651 	    if (GET_CODE (body) == ADDR_VEC)
2652 	      {
2653 #ifdef ASM_OUTPUT_ADDR_VEC
2654 		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2655 #else
2656 		gcc_unreachable ();
2657 #endif
2658 	      }
2659 	    else
2660 	      {
2661 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2662 		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2663 #else
2664 		gcc_unreachable ();
2665 #endif
2666 	      }
2667 #else
2668 	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2669 	    for (idx = 0; idx < vlen; idx++)
2670 	      {
2671 		if (GET_CODE (body) == ADDR_VEC)
2672 		  {
2673 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2674 		    ASM_OUTPUT_ADDR_VEC_ELT
2675 		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2676 #else
2677 		    gcc_unreachable ();
2678 #endif
2679 		  }
2680 		else
2681 		  {
2682 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2683 		    ASM_OUTPUT_ADDR_DIFF_ELT
2684 		      (file,
2685 		       body,
2686 		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2687 		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2688 #else
2689 		    gcc_unreachable ();
2690 #endif
2691 		  }
2692 	      }
2693 #ifdef ASM_OUTPUT_CASE_END
2694 	    ASM_OUTPUT_CASE_END (file,
2695 				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2696 				 insn);
2697 #endif
2698 #endif
2699 
2700 	    switch_to_section (current_function_section ());
2701 
2702 	    break;
2703 	  }
2704 	/* Output this line note if it is the first or the last line
2705 	   note in a row.  */
2706 	if (!DECL_IGNORED_P (current_function_decl)
2707 	    && notice_source_line (insn, &is_stmt))
2708 	  {
2709 	    if (flag_verbose_asm)
2710 	      asm_show_source (last_filename, last_linenum);
2711 	    (*debug_hooks->source_line) (last_linenum, last_columnnum,
2712 					 last_filename, last_discriminator,
2713 					 is_stmt);
2714 	  }
2715 
2716 	if (GET_CODE (body) == PARALLEL
2717 	    && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2718 	  body = XVECEXP (body, 0, 0);
2719 
2720 	if (GET_CODE (body) == ASM_INPUT)
2721 	  {
2722 	    const char *string = XSTR (body, 0);
2723 
2724 	    /* There's no telling what that did to the condition codes.  */
2725 	    CC_STATUS_INIT;
2726 
2727 	    if (string[0])
2728 	      {
2729 		expanded_location loc;
2730 
2731 		app_enable ();
2732 		loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2733 		if (*loc.file && loc.line)
2734 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2735 			   ASM_COMMENT_START, loc.line, loc.file);
2736 		fprintf (asm_out_file, "\t%s\n", string);
2737 #if HAVE_AS_LINE_ZERO
2738 		if (*loc.file && loc.line)
2739 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2740 #endif
2741 	      }
2742 	    break;
2743 	  }
2744 
2745 	/* Detect `asm' construct with operands.  */
2746 	if (asm_noperands (body) >= 0)
2747 	  {
2748 	    unsigned int noperands = asm_noperands (body);
2749 	    rtx *ops = XALLOCAVEC (rtx, noperands);
2750 	    const char *string;
2751 	    location_t loc;
2752 	    expanded_location expanded;
2753 
2754 	    /* There's no telling what that did to the condition codes.  */
2755 	    CC_STATUS_INIT;
2756 
2757 	    /* Get out the operand values.  */
2758 	    string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2759 	    /* Inhibit dying on what would otherwise be compiler bugs.  */
2760 	    insn_noperands = noperands;
2761 	    this_is_asm_operands = insn;
2762 	    expanded = expand_location (loc);
2763 
2764 #ifdef FINAL_PRESCAN_INSN
2765 	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2766 #endif
2767 
2768 	    /* Output the insn using them.  */
2769 	    if (string[0])
2770 	      {
2771 		app_enable ();
2772 		if (expanded.file && expanded.line)
2773 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2774 			   ASM_COMMENT_START, expanded.line, expanded.file);
2775 	        output_asm_insn (string, ops);
2776 #if HAVE_AS_LINE_ZERO
2777 		if (expanded.file && expanded.line)
2778 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2779 #endif
2780 	      }
2781 
2782 	    if (targetm.asm_out.final_postscan_insn)
2783 	      targetm.asm_out.final_postscan_insn (file, insn, ops,
2784 						   insn_noperands);
2785 
2786 	    this_is_asm_operands = 0;
2787 	    break;
2788 	  }
2789 
2790 	app_disable ();
2791 
2792 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2793 	  {
2794 	    /* A delayed-branch sequence */
2795 	    int i;
2796 
2797 	    final_sequence = seq;
2798 
2799 	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2800 	       force the restoration of a comparison that was previously
2801 	       thought unnecessary.  If that happens, cancel this sequence
2802 	       and cause that insn to be restored.  */
2803 
2804 	    next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2805 	    if (next != seq->insn (1))
2806 	      {
2807 		final_sequence = 0;
2808 		return next;
2809 	      }
2810 
2811 	    for (i = 1; i < seq->len (); i++)
2812 	      {
2813 		rtx_insn *insn = seq->insn (i);
2814 		rtx_insn *next = NEXT_INSN (insn);
2815 		/* We loop in case any instruction in a delay slot gets
2816 		   split.  */
2817 		do
2818 		  insn = final_scan_insn (insn, file, 0, 1, seen);
2819 		while (insn != next);
2820 	      }
2821 #ifdef DBR_OUTPUT_SEQEND
2822 	    DBR_OUTPUT_SEQEND (file);
2823 #endif
2824 	    final_sequence = 0;
2825 
2826 	    /* If the insn requiring the delay slot was a CALL_INSN, the
2827 	       insns in the delay slot are actually executed before the
2828 	       called function.  Hence we don't preserve any CC-setting
2829 	       actions in these insns and the CC must be marked as being
2830 	       clobbered by the function.  */
2831 	    if (CALL_P (seq->insn (0)))
2832 	      {
2833 		CC_STATUS_INIT;
2834 	      }
2835 	    break;
2836 	  }
2837 
2838 	/* We have a real machine instruction as rtl.  */
2839 
2840 	body = PATTERN (insn);
2841 
2842 #if HAVE_cc0
2843 	set = single_set (insn);
2844 
2845 	/* Check for redundant test and compare instructions
2846 	   (when the condition codes are already set up as desired).
2847 	   This is done only when optimizing; if not optimizing,
2848 	   it should be possible for the user to alter a variable
2849 	   with the debugger in between statements
2850 	   and the next statement should reexamine the variable
2851 	   to compute the condition codes.  */
2852 
2853 	if (optimize_p)
2854 	  {
2855 	    if (set
2856 		&& GET_CODE (SET_DEST (set)) == CC0
2857 		&& insn != last_ignored_compare)
2858 	      {
2859 		rtx src1, src2;
2860 		if (GET_CODE (SET_SRC (set)) == SUBREG)
2861 		  SET_SRC (set) = alter_subreg (&SET_SRC (set), true);
2862 
2863 		src1 = SET_SRC (set);
2864 		src2 = NULL_RTX;
2865 		if (GET_CODE (SET_SRC (set)) == COMPARE)
2866 		  {
2867 		    if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2868 		      XEXP (SET_SRC (set), 0)
2869 			= alter_subreg (&XEXP (SET_SRC (set), 0), true);
2870 		    if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2871 		      XEXP (SET_SRC (set), 1)
2872 			= alter_subreg (&XEXP (SET_SRC (set), 1), true);
2873 		    if (XEXP (SET_SRC (set), 1)
2874 			== CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2875 		      src2 = XEXP (SET_SRC (set), 0);
2876 		  }
2877 		if ((cc_status.value1 != 0
2878 		     && rtx_equal_p (src1, cc_status.value1))
2879 		    || (cc_status.value2 != 0
2880 			&& rtx_equal_p (src1, cc_status.value2))
2881 		    || (src2 != 0 && cc_status.value1 != 0
2882 		        && rtx_equal_p (src2, cc_status.value1))
2883 		    || (src2 != 0 && cc_status.value2 != 0
2884 			&& rtx_equal_p (src2, cc_status.value2)))
2885 		  {
2886 		    /* Don't delete insn if it has an addressing side-effect.  */
2887 		    if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2888 			/* or if anything in it is volatile.  */
2889 			&& ! volatile_refs_p (PATTERN (insn)))
2890 		      {
2891 			/* We don't really delete the insn; just ignore it.  */
2892 			last_ignored_compare = insn;
2893 			break;
2894 		      }
2895 		  }
2896 	      }
2897 	  }
2898 
2899 	/* If this is a conditional branch, maybe modify it
2900 	   if the cc's are in a nonstandard state
2901 	   so that it accomplishes the same thing that it would
2902 	   do straightforwardly if the cc's were set up normally.  */
2903 
2904 	if (cc_status.flags != 0
2905 	    && JUMP_P (insn)
2906 	    && GET_CODE (body) == SET
2907 	    && SET_DEST (body) == pc_rtx
2908 	    && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2909 	    && COMPARISON_P (XEXP (SET_SRC (body), 0))
2910 	    && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2911 	  {
2912 	    /* This function may alter the contents of its argument
2913 	       and clear some of the cc_status.flags bits.
2914 	       It may also return 1 meaning condition now always true
2915 	       or -1 meaning condition now always false
2916 	       or 2 meaning condition nontrivial but altered.  */
2917 	    int result = alter_cond (XEXP (SET_SRC (body), 0));
2918 	    /* If condition now has fixed value, replace the IF_THEN_ELSE
2919 	       with its then-operand or its else-operand.  */
2920 	    if (result == 1)
2921 	      SET_SRC (body) = XEXP (SET_SRC (body), 1);
2922 	    if (result == -1)
2923 	      SET_SRC (body) = XEXP (SET_SRC (body), 2);
2924 
2925 	    /* The jump is now either unconditional or a no-op.
2926 	       If it has become a no-op, don't try to output it.
2927 	       (It would not be recognized.)  */
2928 	    if (SET_SRC (body) == pc_rtx)
2929 	      {
2930 	        delete_insn (insn);
2931 		break;
2932 	      }
2933 	    else if (ANY_RETURN_P (SET_SRC (body)))
2934 	      /* Replace (set (pc) (return)) with (return).  */
2935 	      PATTERN (insn) = body = SET_SRC (body);
2936 
2937 	    /* Rerecognize the instruction if it has changed.  */
2938 	    if (result != 0)
2939 	      INSN_CODE (insn) = -1;
2940 	  }
2941 
2942 	/* If this is a conditional trap, maybe modify it if the cc's
2943 	   are in a nonstandard state so that it accomplishes the same
2944 	   thing that it would do straightforwardly if the cc's were
2945 	   set up normally.  */
2946 	if (cc_status.flags != 0
2947 	    && NONJUMP_INSN_P (insn)
2948 	    && GET_CODE (body) == TRAP_IF
2949 	    && COMPARISON_P (TRAP_CONDITION (body))
2950 	    && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2951 	  {
2952 	    /* This function may alter the contents of its argument
2953 	       and clear some of the cc_status.flags bits.
2954 	       It may also return 1 meaning condition now always true
2955 	       or -1 meaning condition now always false
2956 	       or 2 meaning condition nontrivial but altered.  */
2957 	    int result = alter_cond (TRAP_CONDITION (body));
2958 
2959 	    /* If TRAP_CONDITION has become always false, delete the
2960 	       instruction.  */
2961 	    if (result == -1)
2962 	      {
2963 		delete_insn (insn);
2964 		break;
2965 	      }
2966 
2967 	    /* If TRAP_CONDITION has become always true, replace
2968 	       TRAP_CONDITION with const_true_rtx.  */
2969 	    if (result == 1)
2970 	      TRAP_CONDITION (body) = const_true_rtx;
2971 
2972 	    /* Rerecognize the instruction if it has changed.  */
2973 	    if (result != 0)
2974 	      INSN_CODE (insn) = -1;
2975 	  }
2976 
2977 	/* Make same adjustments to instructions that examine the
2978 	   condition codes without jumping and instructions that
2979 	   handle conditional moves (if this machine has either one).  */
2980 
2981 	if (cc_status.flags != 0
2982 	    && set != 0)
2983 	  {
2984 	    rtx cond_rtx, then_rtx, else_rtx;
2985 
2986 	    if (!JUMP_P (insn)
2987 		&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2988 	      {
2989 		cond_rtx = XEXP (SET_SRC (set), 0);
2990 		then_rtx = XEXP (SET_SRC (set), 1);
2991 		else_rtx = XEXP (SET_SRC (set), 2);
2992 	      }
2993 	    else
2994 	      {
2995 		cond_rtx = SET_SRC (set);
2996 		then_rtx = const_true_rtx;
2997 		else_rtx = const0_rtx;
2998 	      }
2999 
3000 	    if (COMPARISON_P (cond_rtx)
3001 		&& XEXP (cond_rtx, 0) == cc0_rtx)
3002 	      {
3003 		int result;
3004 		result = alter_cond (cond_rtx);
3005 		if (result == 1)
3006 		  validate_change (insn, &SET_SRC (set), then_rtx, 0);
3007 		else if (result == -1)
3008 		  validate_change (insn, &SET_SRC (set), else_rtx, 0);
3009 		else if (result == 2)
3010 		  INSN_CODE (insn) = -1;
3011 		if (SET_DEST (set) == SET_SRC (set))
3012 		  delete_insn (insn);
3013 	      }
3014 	  }
3015 
3016 #endif
3017 
3018 	/* Do machine-specific peephole optimizations if desired.  */
3019 
3020 	if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
3021 	  {
3022 	    rtx_insn *next = peephole (insn);
3023 	    /* When peepholing, if there were notes within the peephole,
3024 	       emit them before the peephole.  */
3025 	    if (next != 0 && next != NEXT_INSN (insn))
3026 	      {
3027 		rtx_insn *note, *prev = PREV_INSN (insn);
3028 
3029 		for (note = NEXT_INSN (insn); note != next;
3030 		     note = NEXT_INSN (note))
3031 		  final_scan_insn (note, file, optimize_p, nopeepholes, seen);
3032 
3033 		/* Put the notes in the proper position for a later
3034 		   rescan.  For example, the SH target can do this
3035 		   when generating a far jump in a delayed branch
3036 		   sequence.  */
3037 		note = NEXT_INSN (insn);
3038 		SET_PREV_INSN (note) = prev;
3039 		SET_NEXT_INSN (prev) = note;
3040 		SET_NEXT_INSN (PREV_INSN (next)) = insn;
3041 		SET_PREV_INSN (insn) = PREV_INSN (next);
3042 		SET_NEXT_INSN (insn) = next;
3043 		SET_PREV_INSN (next) = insn;
3044 	      }
3045 
3046 	    /* PEEPHOLE might have changed this.  */
3047 	    body = PATTERN (insn);
3048 	  }
3049 
3050 	/* Try to recognize the instruction.
3051 	   If successful, verify that the operands satisfy the
3052 	   constraints for the instruction.  Crash if they don't,
3053 	   since `reload' should have changed them so that they do.  */
3054 
3055 	insn_code_number = recog_memoized (insn);
3056 	cleanup_subreg_operands (insn);
3057 
3058 	/* Dump the insn in the assembly for debugging (-dAP).
3059 	   If the final dump is requested as slim RTL, dump slim
3060 	   RTL to the assembly file also.  */
3061 	if (flag_dump_rtl_in_asm)
3062 	  {
3063 	    print_rtx_head = ASM_COMMENT_START;
3064 	    if (! (dump_flags & TDF_SLIM))
3065 	      print_rtl_single (asm_out_file, insn);
3066 	    else
3067 	      dump_insn_slim (asm_out_file, insn);
3068 	    print_rtx_head = "";
3069 	  }
3070 
3071 	if (! constrain_operands_cached (insn, 1))
3072 	  fatal_insn_not_found (insn);
3073 
3074 	/* Some target machines need to prescan each insn before
3075 	   it is output.  */
3076 
3077 #ifdef FINAL_PRESCAN_INSN
3078 	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
3079 #endif
3080 
3081 	if (targetm.have_conditional_execution ()
3082 	    && GET_CODE (PATTERN (insn)) == COND_EXEC)
3083 	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
3084 
3085 #if HAVE_cc0
3086 	cc_prev_status = cc_status;
3087 
3088 	/* Update `cc_status' for this instruction.
3089 	   The instruction's output routine may change it further.
3090 	   If the output routine for a jump insn needs to depend
3091 	   on the cc status, it should look at cc_prev_status.  */
3092 
3093 	NOTICE_UPDATE_CC (body, insn);
3094 #endif
3095 
3096 	current_output_insn = debug_insn = insn;
3097 
3098 	/* Find the proper template for this insn.  */
3099 	templ = get_insn_template (insn_code_number, insn);
3100 
3101 	/* If the C code returns 0, it means that it is a jump insn
3102 	   which follows a deleted test insn, and that test insn
3103 	   needs to be reinserted.  */
3104 	if (templ == 0)
3105 	  {
3106 	    rtx_insn *prev;
3107 
3108 	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
3109 
3110 	    /* We have already processed the notes between the setter and
3111 	       the user.  Make sure we don't process them again, this is
3112 	       particularly important if one of the notes is a block
3113 	       scope note or an EH note.  */
3114 	    for (prev = insn;
3115 		 prev != last_ignored_compare;
3116 		 prev = PREV_INSN (prev))
3117 	      {
3118 		if (NOTE_P (prev))
3119 		  delete_insn (prev);	/* Use delete_note.  */
3120 	      }
3121 
3122 	    return prev;
3123 	  }
3124 
3125 	/* If the template is the string "#", it means that this insn must
3126 	   be split.  */
3127 	if (templ[0] == '#' && templ[1] == '\0')
3128 	  {
3129 	    rtx_insn *new_rtx = try_split (body, insn, 0);
3130 
3131 	    /* If we didn't split the insn, go away.  */
3132 	    if (new_rtx == insn && PATTERN (new_rtx) == body)
3133 	      fatal_insn ("could not split insn", insn);
3134 
3135 	    /* If we have a length attribute, this instruction should have
3136 	       been split in shorten_branches, to ensure that we would have
3137 	       valid length info for the splitees.  */
3138 	    gcc_assert (!HAVE_ATTR_length);
3139 
3140 	    return new_rtx;
3141 	  }
3142 
3143 	/* ??? This will put the directives in the wrong place if
3144 	   get_insn_template outputs assembly directly.  However calling it
3145 	   before get_insn_template breaks if the insns is split.  */
3146 	if (targetm.asm_out.unwind_emit_before_insn
3147 	    && targetm.asm_out.unwind_emit)
3148 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
3149 
3150 	rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
3151 	if (call_insn != NULL)
3152 	  {
3153 	    rtx x = call_from_call_insn (call_insn);
3154 	    x = XEXP (x, 0);
3155 	    if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3156 	      {
3157 		tree t;
3158 		x = XEXP (x, 0);
3159 		t = SYMBOL_REF_DECL (x);
3160 		if (t)
3161 		  assemble_external (t);
3162 	      }
3163 	  }
3164 
3165 	/* Output assembler code from the template.  */
3166 	output_asm_insn (templ, recog_data.operand);
3167 
3168 	/* Some target machines need to postscan each insn after
3169 	   it is output.  */
3170 	if (targetm.asm_out.final_postscan_insn)
3171 	  targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
3172 					       recog_data.n_operands);
3173 
3174 	if (!targetm.asm_out.unwind_emit_before_insn
3175 	    && targetm.asm_out.unwind_emit)
3176 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
3177 
3178 	/* Let the debug info back-end know about this call.  We do this only
3179 	   after the instruction has been emitted because labels that may be
3180 	   created to reference the call instruction must appear after it.  */
3181 	if (call_insn != NULL && !DECL_IGNORED_P (current_function_decl))
3182 	  debug_hooks->var_location (insn);
3183 
3184 	current_output_insn = debug_insn = 0;
3185       }
3186     }
3187   return NEXT_INSN (insn);
3188 }
3189 
3190 /* Return whether a source line note needs to be emitted before INSN.
3191    Sets IS_STMT to TRUE if the line should be marked as a possible
3192    breakpoint location.  */
3193 
3194 static bool
3195 notice_source_line (rtx_insn *insn, bool *is_stmt)
3196 {
3197   const char *filename;
3198   int linenum, columnnum;
3199 
3200   if (override_filename)
3201     {
3202       filename = override_filename;
3203       linenum = override_linenum;
3204       columnnum = override_columnnum;
3205     }
3206   else if (INSN_HAS_LOCATION (insn))
3207     {
3208       expanded_location xloc = insn_location (insn);
3209       filename = xloc.file;
3210       linenum = xloc.line;
3211       columnnum = xloc.column;
3212     }
3213   else
3214     {
3215       filename = NULL;
3216       linenum = 0;
3217       columnnum = 0;
3218     }
3219 
3220   if (filename == NULL)
3221     return false;
3222 
3223   if (force_source_line
3224       || filename != last_filename
3225       || last_linenum != linenum
3226       || (debug_column_info && last_columnnum != columnnum))
3227     {
3228       force_source_line = false;
3229       last_filename = filename;
3230       last_linenum = linenum;
3231       last_columnnum = columnnum;
3232       last_discriminator = discriminator;
3233       *is_stmt = true;
3234       high_block_linenum = MAX (last_linenum, high_block_linenum);
3235       high_function_linenum = MAX (last_linenum, high_function_linenum);
3236       return true;
3237     }
3238 
3239   if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3240     {
3241       /* If the discriminator changed, but the line number did not,
3242          output the line table entry with is_stmt false so the
3243          debugger does not treat this as a breakpoint location.  */
3244       last_discriminator = discriminator;
3245       *is_stmt = false;
3246       return true;
3247     }
3248 
3249   return false;
3250 }
3251 
3252 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3253    directly to the desired hard register.  */
3254 
3255 void
3256 cleanup_subreg_operands (rtx_insn *insn)
3257 {
3258   int i;
3259   bool changed = false;
3260   extract_insn_cached (insn);
3261   for (i = 0; i < recog_data.n_operands; i++)
3262     {
3263       /* The following test cannot use recog_data.operand when testing
3264 	 for a SUBREG: the underlying object might have been changed
3265 	 already if we are inside a match_operator expression that
3266 	 matches the else clause.  Instead we test the underlying
3267 	 expression directly.  */
3268       if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3269 	{
3270 	  recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3271 	  changed = true;
3272 	}
3273       else if (GET_CODE (recog_data.operand[i]) == PLUS
3274 	       || GET_CODE (recog_data.operand[i]) == MULT
3275 	       || MEM_P (recog_data.operand[i]))
3276 	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3277     }
3278 
3279   for (i = 0; i < recog_data.n_dups; i++)
3280     {
3281       if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3282 	{
3283 	  *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3284 	  changed = true;
3285 	}
3286       else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3287 	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
3288 	       || MEM_P (*recog_data.dup_loc[i]))
3289 	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3290     }
3291   if (changed)
3292     df_insn_rescan (insn);
3293 }
3294 
3295 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3296    the thing it is a subreg of.  Do it anyway if FINAL_P.  */
3297 
3298 rtx
3299 alter_subreg (rtx *xp, bool final_p)
3300 {
3301   rtx x = *xp;
3302   rtx y = SUBREG_REG (x);
3303 
3304   /* simplify_subreg does not remove subreg from volatile references.
3305      We are required to.  */
3306   if (MEM_P (y))
3307     {
3308       int offset = SUBREG_BYTE (x);
3309 
3310       /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3311 	 contains 0 instead of the proper offset.  See simplify_subreg.  */
3312       if (offset == 0
3313 	  && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
3314         {
3315           int difference = GET_MODE_SIZE (GET_MODE (y))
3316 			   - GET_MODE_SIZE (GET_MODE (x));
3317           if (WORDS_BIG_ENDIAN)
3318             offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3319           if (BYTES_BIG_ENDIAN)
3320             offset += difference % UNITS_PER_WORD;
3321         }
3322 
3323       if (final_p)
3324 	*xp = adjust_address (y, GET_MODE (x), offset);
3325       else
3326 	*xp = adjust_address_nv (y, GET_MODE (x), offset);
3327     }
3328   else if (REG_P (y) && HARD_REGISTER_P (y))
3329     {
3330       rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3331 				     SUBREG_BYTE (x));
3332 
3333       if (new_rtx != 0)
3334 	*xp = new_rtx;
3335       else if (final_p && REG_P (y))
3336 	{
3337 	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
3338 	  unsigned int regno;
3339 	  HOST_WIDE_INT offset;
3340 
3341 	  regno = subreg_regno (x);
3342 	  if (subreg_lowpart_p (x))
3343 	    offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3344 	  else
3345 	    offset = SUBREG_BYTE (x);
3346 	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3347 	}
3348     }
3349 
3350   return *xp;
3351 }
3352 
3353 /* Do alter_subreg on all the SUBREGs contained in X.  */
3354 
3355 static rtx
3356 walk_alter_subreg (rtx *xp, bool *changed)
3357 {
3358   rtx x = *xp;
3359   switch (GET_CODE (x))
3360     {
3361     case PLUS:
3362     case MULT:
3363     case AND:
3364       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3365       XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3366       break;
3367 
3368     case MEM:
3369     case ZERO_EXTEND:
3370       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3371       break;
3372 
3373     case SUBREG:
3374       *changed = true;
3375       return alter_subreg (xp, true);
3376 
3377     default:
3378       break;
3379     }
3380 
3381   return *xp;
3382 }
3383 
3384 #if HAVE_cc0
3385 
3386 /* Given BODY, the body of a jump instruction, alter the jump condition
3387    as required by the bits that are set in cc_status.flags.
3388    Not all of the bits there can be handled at this level in all cases.
3389 
3390    The value is normally 0.
3391    1 means that the condition has become always true.
3392    -1 means that the condition has become always false.
3393    2 means that COND has been altered.  */
3394 
3395 static int
3396 alter_cond (rtx cond)
3397 {
3398   int value = 0;
3399 
3400   if (cc_status.flags & CC_REVERSED)
3401     {
3402       value = 2;
3403       PUT_CODE (cond, swap_condition (GET_CODE (cond)));
3404     }
3405 
3406   if (cc_status.flags & CC_INVERTED)
3407     {
3408       value = 2;
3409       PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
3410     }
3411 
3412   if (cc_status.flags & CC_NOT_POSITIVE)
3413     switch (GET_CODE (cond))
3414       {
3415       case LE:
3416       case LEU:
3417       case GEU:
3418 	/* Jump becomes unconditional.  */
3419 	return 1;
3420 
3421       case GT:
3422       case GTU:
3423       case LTU:
3424 	/* Jump becomes no-op.  */
3425 	return -1;
3426 
3427       case GE:
3428 	PUT_CODE (cond, EQ);
3429 	value = 2;
3430 	break;
3431 
3432       case LT:
3433 	PUT_CODE (cond, NE);
3434 	value = 2;
3435 	break;
3436 
3437       default:
3438 	break;
3439       }
3440 
3441   if (cc_status.flags & CC_NOT_NEGATIVE)
3442     switch (GET_CODE (cond))
3443       {
3444       case GE:
3445       case GEU:
3446 	/* Jump becomes unconditional.  */
3447 	return 1;
3448 
3449       case LT:
3450       case LTU:
3451 	/* Jump becomes no-op.  */
3452 	return -1;
3453 
3454       case LE:
3455       case LEU:
3456 	PUT_CODE (cond, EQ);
3457 	value = 2;
3458 	break;
3459 
3460       case GT:
3461       case GTU:
3462 	PUT_CODE (cond, NE);
3463 	value = 2;
3464 	break;
3465 
3466       default:
3467 	break;
3468       }
3469 
3470   if (cc_status.flags & CC_NO_OVERFLOW)
3471     switch (GET_CODE (cond))
3472       {
3473       case GEU:
3474 	/* Jump becomes unconditional.  */
3475 	return 1;
3476 
3477       case LEU:
3478 	PUT_CODE (cond, EQ);
3479 	value = 2;
3480 	break;
3481 
3482       case GTU:
3483 	PUT_CODE (cond, NE);
3484 	value = 2;
3485 	break;
3486 
3487       case LTU:
3488 	/* Jump becomes no-op.  */
3489 	return -1;
3490 
3491       default:
3492 	break;
3493       }
3494 
3495   if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3496     switch (GET_CODE (cond))
3497       {
3498       default:
3499 	gcc_unreachable ();
3500 
3501       case NE:
3502 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3503 	value = 2;
3504 	break;
3505 
3506       case EQ:
3507 	PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3508 	value = 2;
3509 	break;
3510       }
3511 
3512   if (cc_status.flags & CC_NOT_SIGNED)
3513     /* The flags are valid if signed condition operators are converted
3514        to unsigned.  */
3515     switch (GET_CODE (cond))
3516       {
3517       case LE:
3518 	PUT_CODE (cond, LEU);
3519 	value = 2;
3520 	break;
3521 
3522       case LT:
3523 	PUT_CODE (cond, LTU);
3524 	value = 2;
3525 	break;
3526 
3527       case GT:
3528 	PUT_CODE (cond, GTU);
3529 	value = 2;
3530 	break;
3531 
3532       case GE:
3533 	PUT_CODE (cond, GEU);
3534 	value = 2;
3535 	break;
3536 
3537       default:
3538 	break;
3539       }
3540 
3541   return value;
3542 }
3543 #endif
3544 
3545 /* Report inconsistency between the assembler template and the operands.
3546    In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
3547 
3548 void
3549 output_operand_lossage (const char *cmsgid, ...)
3550 {
3551   char *fmt_string;
3552   char *new_message;
3553   const char *pfx_str;
3554   va_list ap;
3555 
3556   va_start (ap, cmsgid);
3557 
3558   pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3559   fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3560   new_message = xvasprintf (fmt_string, ap);
3561 
3562   if (this_is_asm_operands)
3563     error_for_asm (this_is_asm_operands, "%s", new_message);
3564   else
3565     internal_error ("%s", new_message);
3566 
3567   free (fmt_string);
3568   free (new_message);
3569   va_end (ap);
3570 }
3571 
3572 /* Output of assembler code from a template, and its subroutines.  */
3573 
3574 /* Annotate the assembly with a comment describing the pattern and
3575    alternative used.  */
3576 
3577 static void
3578 output_asm_name (void)
3579 {
3580   if (debug_insn)
3581     {
3582       int num = INSN_CODE (debug_insn);
3583       fprintf (asm_out_file, "\t%s %d\t%s",
3584 	       ASM_COMMENT_START, INSN_UID (debug_insn),
3585 	       insn_data[num].name);
3586       if (insn_data[num].n_alternatives > 1)
3587 	fprintf (asm_out_file, "/%d", which_alternative + 1);
3588 
3589       if (HAVE_ATTR_length)
3590 	fprintf (asm_out_file, "\t[length = %d]",
3591 		 get_attr_length (debug_insn));
3592 
3593       /* Clear this so only the first assembler insn
3594 	 of any rtl insn will get the special comment for -dp.  */
3595       debug_insn = 0;
3596     }
3597 }
3598 
3599 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3600    or its address, return that expr .  Set *PADDRESSP to 1 if the expr
3601    corresponds to the address of the object and 0 if to the object.  */
3602 
3603 static tree
3604 get_mem_expr_from_op (rtx op, int *paddressp)
3605 {
3606   tree expr;
3607   int inner_addressp;
3608 
3609   *paddressp = 0;
3610 
3611   if (REG_P (op))
3612     return REG_EXPR (op);
3613   else if (!MEM_P (op))
3614     return 0;
3615 
3616   if (MEM_EXPR (op) != 0)
3617     return MEM_EXPR (op);
3618 
3619   /* Otherwise we have an address, so indicate it and look at the address.  */
3620   *paddressp = 1;
3621   op = XEXP (op, 0);
3622 
3623   /* First check if we have a decl for the address, then look at the right side
3624      if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
3625      But don't allow the address to itself be indirect.  */
3626   if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3627     return expr;
3628   else if (GET_CODE (op) == PLUS
3629 	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3630     return expr;
3631 
3632   while (UNARY_P (op)
3633 	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3634     op = XEXP (op, 0);
3635 
3636   expr = get_mem_expr_from_op (op, &inner_addressp);
3637   return inner_addressp ? 0 : expr;
3638 }
3639 
3640 /* Output operand names for assembler instructions.  OPERANDS is the
3641    operand vector, OPORDER is the order to write the operands, and NOPS
3642    is the number of operands to write.  */
3643 
3644 static void
3645 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3646 {
3647   int wrote = 0;
3648   int i;
3649 
3650   for (i = 0; i < nops; i++)
3651     {
3652       int addressp;
3653       rtx op = operands[oporder[i]];
3654       tree expr = get_mem_expr_from_op (op, &addressp);
3655 
3656       fprintf (asm_out_file, "%c%s",
3657 	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3658       wrote = 1;
3659       if (expr)
3660 	{
3661 	  fprintf (asm_out_file, "%s",
3662 		   addressp ? "*" : "");
3663 	  print_mem_expr (asm_out_file, expr);
3664 	  wrote = 1;
3665 	}
3666       else if (REG_P (op) && ORIGINAL_REGNO (op)
3667 	       && ORIGINAL_REGNO (op) != REGNO (op))
3668 	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3669     }
3670 }
3671 
3672 #ifdef ASSEMBLER_DIALECT
3673 /* Helper function to parse assembler dialects in the asm string.
3674    This is called from output_asm_insn and asm_fprintf.  */
3675 static const char *
3676 do_assembler_dialects (const char *p, int *dialect)
3677 {
3678   char c = *(p - 1);
3679 
3680   switch (c)
3681     {
3682     case '{':
3683       {
3684         int i;
3685 
3686         if (*dialect)
3687           output_operand_lossage ("nested assembly dialect alternatives");
3688         else
3689           *dialect = 1;
3690 
3691         /* If we want the first dialect, do nothing.  Otherwise, skip
3692            DIALECT_NUMBER of strings ending with '|'.  */
3693         for (i = 0; i < dialect_number; i++)
3694           {
3695             while (*p && *p != '}')
3696 	      {
3697 		if (*p == '|')
3698 		  {
3699 		    p++;
3700 		    break;
3701 		  }
3702 
3703 		/* Skip over any character after a percent sign.  */
3704 		if (*p == '%')
3705 		  p++;
3706 		if (*p)
3707 		  p++;
3708 	      }
3709 
3710             if (*p == '}')
3711 	      break;
3712           }
3713 
3714         if (*p == '\0')
3715           output_operand_lossage ("unterminated assembly dialect alternative");
3716       }
3717       break;
3718 
3719     case '|':
3720       if (*dialect)
3721         {
3722           /* Skip to close brace.  */
3723           do
3724             {
3725 	      if (*p == '\0')
3726 		{
3727 		  output_operand_lossage ("unterminated assembly dialect alternative");
3728 		  break;
3729 		}
3730 
3731 	      /* Skip over any character after a percent sign.  */
3732 	      if (*p == '%' && p[1])
3733 		{
3734 		  p += 2;
3735 		  continue;
3736 		}
3737 
3738 	      if (*p++ == '}')
3739 		break;
3740             }
3741           while (1);
3742 
3743           *dialect = 0;
3744         }
3745       else
3746         putc (c, asm_out_file);
3747       break;
3748 
3749     case '}':
3750       if (! *dialect)
3751         putc (c, asm_out_file);
3752       *dialect = 0;
3753       break;
3754     default:
3755       gcc_unreachable ();
3756     }
3757 
3758   return p;
3759 }
3760 #endif
3761 
3762 /* Output text from TEMPLATE to the assembler output file,
3763    obeying %-directions to substitute operands taken from
3764    the vector OPERANDS.
3765 
3766    %N (for N a digit) means print operand N in usual manner.
3767    %lN means require operand N to be a CODE_LABEL or LABEL_REF
3768       and print the label name with no punctuation.
3769    %cN means require operand N to be a constant
3770       and print the constant expression with no punctuation.
3771    %aN means expect operand N to be a memory address
3772       (not a memory reference!) and print a reference
3773       to that address.
3774    %nN means expect operand N to be a constant
3775       and print a constant expression for minus the value
3776       of the operand, with no other punctuation.  */
3777 
3778 void
3779 output_asm_insn (const char *templ, rtx *operands)
3780 {
3781   const char *p;
3782   int c;
3783 #ifdef ASSEMBLER_DIALECT
3784   int dialect = 0;
3785 #endif
3786   int oporder[MAX_RECOG_OPERANDS];
3787   char opoutput[MAX_RECOG_OPERANDS];
3788   int ops = 0;
3789 
3790   /* An insn may return a null string template
3791      in a case where no assembler code is needed.  */
3792   if (*templ == 0)
3793     return;
3794 
3795   memset (opoutput, 0, sizeof opoutput);
3796   p = templ;
3797   putc ('\t', asm_out_file);
3798 
3799 #ifdef ASM_OUTPUT_OPCODE
3800   ASM_OUTPUT_OPCODE (asm_out_file, p);
3801 #endif
3802 
3803   while ((c = *p++))
3804     switch (c)
3805       {
3806       case '\n':
3807 	if (flag_verbose_asm)
3808 	  output_asm_operand_names (operands, oporder, ops);
3809 	if (flag_print_asm_name)
3810 	  output_asm_name ();
3811 
3812 	ops = 0;
3813 	memset (opoutput, 0, sizeof opoutput);
3814 
3815 	putc (c, asm_out_file);
3816 #ifdef ASM_OUTPUT_OPCODE
3817 	while ((c = *p) == '\t')
3818 	  {
3819 	    putc (c, asm_out_file);
3820 	    p++;
3821 	  }
3822 	ASM_OUTPUT_OPCODE (asm_out_file, p);
3823 #endif
3824 	break;
3825 
3826 #ifdef ASSEMBLER_DIALECT
3827       case '{':
3828       case '}':
3829       case '|':
3830 	p = do_assembler_dialects (p, &dialect);
3831 	break;
3832 #endif
3833 
3834       case '%':
3835 	/* %% outputs a single %.  %{, %} and %| print {, } and | respectively
3836 	   if ASSEMBLER_DIALECT defined and these characters have a special
3837 	   meaning as dialect delimiters.*/
3838 	if (*p == '%'
3839 #ifdef ASSEMBLER_DIALECT
3840 	    || *p == '{' || *p == '}' || *p == '|'
3841 #endif
3842 	    )
3843 	  {
3844 	    putc (*p, asm_out_file);
3845 	    p++;
3846 	  }
3847 	/* %= outputs a number which is unique to each insn in the entire
3848 	   compilation.  This is useful for making local labels that are
3849 	   referred to more than once in a given insn.  */
3850 	else if (*p == '=')
3851 	  {
3852 	    p++;
3853 	    fprintf (asm_out_file, "%d", insn_counter);
3854 	  }
3855 	/* % followed by a letter and some digits
3856 	   outputs an operand in a special way depending on the letter.
3857 	   Letters `acln' are implemented directly.
3858 	   Other letters are passed to `output_operand' so that
3859 	   the TARGET_PRINT_OPERAND hook can define them.  */
3860 	else if (ISALPHA (*p))
3861 	  {
3862 	    int letter = *p++;
3863 	    unsigned long opnum;
3864 	    char *endptr;
3865 
3866 	    opnum = strtoul (p, &endptr, 10);
3867 
3868 	    if (endptr == p)
3869 	      output_operand_lossage ("operand number missing "
3870 				      "after %%-letter");
3871 	    else if (this_is_asm_operands && opnum >= insn_noperands)
3872 	      output_operand_lossage ("operand number out of range");
3873 	    else if (letter == 'l')
3874 	      output_asm_label (operands[opnum]);
3875 	    else if (letter == 'a')
3876 	      output_address (VOIDmode, operands[opnum]);
3877 	    else if (letter == 'c')
3878 	      {
3879 		if (CONSTANT_ADDRESS_P (operands[opnum]))
3880 		  output_addr_const (asm_out_file, operands[opnum]);
3881 		else
3882 		  output_operand (operands[opnum], 'c');
3883 	      }
3884 	    else if (letter == 'n')
3885 	      {
3886 		if (CONST_INT_P (operands[opnum]))
3887 		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3888 			   - INTVAL (operands[opnum]));
3889 		else
3890 		  {
3891 		    putc ('-', asm_out_file);
3892 		    output_addr_const (asm_out_file, operands[opnum]);
3893 		  }
3894 	      }
3895 	    else
3896 	      output_operand (operands[opnum], letter);
3897 
3898 	    if (!opoutput[opnum])
3899 	      oporder[ops++] = opnum;
3900 	    opoutput[opnum] = 1;
3901 
3902 	    p = endptr;
3903 	    c = *p;
3904 	  }
3905 	/* % followed by a digit outputs an operand the default way.  */
3906 	else if (ISDIGIT (*p))
3907 	  {
3908 	    unsigned long opnum;
3909 	    char *endptr;
3910 
3911 	    opnum = strtoul (p, &endptr, 10);
3912 	    if (this_is_asm_operands && opnum >= insn_noperands)
3913 	      output_operand_lossage ("operand number out of range");
3914 	    else
3915 	      output_operand (operands[opnum], 0);
3916 
3917 	    if (!opoutput[opnum])
3918 	      oporder[ops++] = opnum;
3919 	    opoutput[opnum] = 1;
3920 
3921 	    p = endptr;
3922 	    c = *p;
3923 	  }
3924 	/* % followed by punctuation: output something for that
3925 	   punctuation character alone, with no operand.  The
3926 	   TARGET_PRINT_OPERAND hook decides what is actually done.  */
3927 	else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3928 	  output_operand (NULL_RTX, *p++);
3929 	else
3930 	  output_operand_lossage ("invalid %%-code");
3931 	break;
3932 
3933       default:
3934 	putc (c, asm_out_file);
3935       }
3936 
3937   /* Write out the variable names for operands, if we know them.  */
3938   if (flag_verbose_asm)
3939     output_asm_operand_names (operands, oporder, ops);
3940   if (flag_print_asm_name)
3941     output_asm_name ();
3942 
3943   putc ('\n', asm_out_file);
3944 }
3945 
3946 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
3947 
3948 void
3949 output_asm_label (rtx x)
3950 {
3951   char buf[256];
3952 
3953   if (GET_CODE (x) == LABEL_REF)
3954     x = label_ref_label (x);
3955   if (LABEL_P (x)
3956       || (NOTE_P (x)
3957 	  && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3958     ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3959   else
3960     output_operand_lossage ("'%%l' operand isn't a label");
3961 
3962   assemble_name (asm_out_file, buf);
3963 }
3964 
3965 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external.  */
3966 
3967 void
3968 mark_symbol_refs_as_used (rtx x)
3969 {
3970   subrtx_iterator::array_type array;
3971   FOR_EACH_SUBRTX (iter, array, x, ALL)
3972     {
3973       const_rtx x = *iter;
3974       if (GET_CODE (x) == SYMBOL_REF)
3975 	if (tree t = SYMBOL_REF_DECL (x))
3976 	  assemble_external (t);
3977     }
3978 }
3979 
3980 /* Print operand X using machine-dependent assembler syntax.
3981    CODE is a non-digit that preceded the operand-number in the % spec,
3982    such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
3983    between the % and the digits.
3984    When CODE is a non-letter, X is 0.
3985 
3986    The meanings of the letters are machine-dependent and controlled
3987    by TARGET_PRINT_OPERAND.  */
3988 
3989 void
3990 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3991 {
3992   if (x && GET_CODE (x) == SUBREG)
3993     x = alter_subreg (&x, true);
3994 
3995   /* X must not be a pseudo reg.  */
3996   if (!targetm.no_register_allocation)
3997     gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3998 
3999   targetm.asm_out.print_operand (asm_out_file, x, code);
4000 
4001   if (x == NULL_RTX)
4002     return;
4003 
4004   mark_symbol_refs_as_used (x);
4005 }
4006 
4007 /* Print a memory reference operand for address X using
4008    machine-dependent assembler syntax.  */
4009 
4010 void
4011 output_address (machine_mode mode, rtx x)
4012 {
4013   bool changed = false;
4014   walk_alter_subreg (&x, &changed);
4015   targetm.asm_out.print_operand_address (asm_out_file, mode, x);
4016 }
4017 
4018 /* Print an integer constant expression in assembler syntax.
4019    Addition and subtraction are the only arithmetic
4020    that may appear in these expressions.  */
4021 
4022 void
4023 output_addr_const (FILE *file, rtx x)
4024 {
4025   char buf[256];
4026 
4027  restart:
4028   switch (GET_CODE (x))
4029     {
4030     case PC:
4031       putc ('.', file);
4032       break;
4033 
4034     case SYMBOL_REF:
4035       if (SYMBOL_REF_DECL (x))
4036 	assemble_external (SYMBOL_REF_DECL (x));
4037 #ifdef ASM_OUTPUT_SYMBOL_REF
4038       ASM_OUTPUT_SYMBOL_REF (file, x);
4039 #else
4040       assemble_name (file, XSTR (x, 0));
4041 #endif
4042       break;
4043 
4044     case LABEL_REF:
4045       x = label_ref_label (x);
4046       /* Fall through.  */
4047     case CODE_LABEL:
4048       ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
4049 #ifdef ASM_OUTPUT_LABEL_REF
4050       ASM_OUTPUT_LABEL_REF (file, buf);
4051 #else
4052       assemble_name (file, buf);
4053 #endif
4054       break;
4055 
4056     case CONST_INT:
4057       fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4058       break;
4059 
4060     case CONST:
4061       /* This used to output parentheses around the expression,
4062 	 but that does not work on the 386 (either ATT or BSD assembler).  */
4063       output_addr_const (file, XEXP (x, 0));
4064       break;
4065 
4066     case CONST_WIDE_INT:
4067       /* We do not know the mode here so we have to use a round about
4068 	 way to build a wide-int to get it printed properly.  */
4069       {
4070 	wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
4071 					   CONST_WIDE_INT_NUNITS (x),
4072 					   CONST_WIDE_INT_NUNITS (x)
4073 					   * HOST_BITS_PER_WIDE_INT,
4074 					   false);
4075 	print_decs (w, file);
4076       }
4077       break;
4078 
4079     case CONST_DOUBLE:
4080       if (CONST_DOUBLE_AS_INT_P (x))
4081 	{
4082 	  /* We can use %d if the number is one word and positive.  */
4083 	  if (CONST_DOUBLE_HIGH (x))
4084 	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
4085 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
4086 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
4087 	  else if (CONST_DOUBLE_LOW (x) < 0)
4088 	    fprintf (file, HOST_WIDE_INT_PRINT_HEX,
4089 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
4090 	  else
4091 	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
4092 	}
4093       else
4094 	/* We can't handle floating point constants;
4095 	   PRINT_OPERAND must handle them.  */
4096 	output_operand_lossage ("floating constant misused");
4097       break;
4098 
4099     case CONST_FIXED:
4100       fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
4101       break;
4102 
4103     case PLUS:
4104       /* Some assemblers need integer constants to appear last (eg masm).  */
4105       if (CONST_INT_P (XEXP (x, 0)))
4106 	{
4107 	  output_addr_const (file, XEXP (x, 1));
4108 	  if (INTVAL (XEXP (x, 0)) >= 0)
4109 	    fprintf (file, "+");
4110 	  output_addr_const (file, XEXP (x, 0));
4111 	}
4112       else
4113 	{
4114 	  output_addr_const (file, XEXP (x, 0));
4115 	  if (!CONST_INT_P (XEXP (x, 1))
4116 	      || INTVAL (XEXP (x, 1)) >= 0)
4117 	    fprintf (file, "+");
4118 	  output_addr_const (file, XEXP (x, 1));
4119 	}
4120       break;
4121 
4122     case MINUS:
4123       /* Avoid outputting things like x-x or x+5-x,
4124 	 since some assemblers can't handle that.  */
4125       x = simplify_subtraction (x);
4126       if (GET_CODE (x) != MINUS)
4127 	goto restart;
4128 
4129       output_addr_const (file, XEXP (x, 0));
4130       fprintf (file, "-");
4131       if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
4132 	  || GET_CODE (XEXP (x, 1)) == PC
4133 	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
4134 	output_addr_const (file, XEXP (x, 1));
4135       else
4136 	{
4137 	  fputs (targetm.asm_out.open_paren, file);
4138 	  output_addr_const (file, XEXP (x, 1));
4139 	  fputs (targetm.asm_out.close_paren, file);
4140 	}
4141       break;
4142 
4143     case ZERO_EXTEND:
4144     case SIGN_EXTEND:
4145     case SUBREG:
4146     case TRUNCATE:
4147       output_addr_const (file, XEXP (x, 0));
4148       break;
4149 
4150     default:
4151       if (targetm.asm_out.output_addr_const_extra (file, x))
4152 	break;
4153 
4154       output_operand_lossage ("invalid expression as operand");
4155     }
4156 }
4157 
4158 /* Output a quoted string.  */
4159 
4160 void
4161 output_quoted_string (FILE *asm_file, const char *string)
4162 {
4163 #ifdef OUTPUT_QUOTED_STRING
4164   OUTPUT_QUOTED_STRING (asm_file, string);
4165 #else
4166   char c;
4167 
4168   putc ('\"', asm_file);
4169   while ((c = *string++) != 0)
4170     {
4171       if (ISPRINT (c))
4172 	{
4173 	  if (c == '\"' || c == '\\')
4174 	    putc ('\\', asm_file);
4175 	  putc (c, asm_file);
4176 	}
4177       else
4178 	fprintf (asm_file, "\\%03o", (unsigned char) c);
4179     }
4180   putc ('\"', asm_file);
4181 #endif
4182 }
4183 
4184 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
4185 
4186 void
4187 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
4188 {
4189   char buf[2 + CHAR_BIT * sizeof (value) / 4];
4190   if (value == 0)
4191     putc ('0', f);
4192   else
4193     {
4194       char *p = buf + sizeof (buf);
4195       do
4196         *--p = "0123456789abcdef"[value % 16];
4197       while ((value /= 16) != 0);
4198       *--p = 'x';
4199       *--p = '0';
4200       fwrite (p, 1, buf + sizeof (buf) - p, f);
4201     }
4202 }
4203 
4204 /* Internal function that prints an unsigned long in decimal in reverse.
4205    The output string IS NOT null-terminated. */
4206 
4207 static int
4208 sprint_ul_rev (char *s, unsigned long value)
4209 {
4210   int i = 0;
4211   do
4212     {
4213       s[i] = "0123456789"[value % 10];
4214       value /= 10;
4215       i++;
4216       /* alternate version, without modulo */
4217       /* oldval = value; */
4218       /* value /= 10; */
4219       /* s[i] = "0123456789" [oldval - 10*value]; */
4220       /* i++ */
4221     }
4222   while (value != 0);
4223   return i;
4224 }
4225 
4226 /* Write an unsigned long as decimal to a file, fast. */
4227 
4228 void
4229 fprint_ul (FILE *f, unsigned long value)
4230 {
4231   /* python says: len(str(2**64)) == 20 */
4232   char s[20];
4233   int i;
4234 
4235   i = sprint_ul_rev (s, value);
4236 
4237   /* It's probably too small to bother with string reversal and fputs. */
4238   do
4239     {
4240       i--;
4241       putc (s[i], f);
4242     }
4243   while (i != 0);
4244 }
4245 
4246 /* Write an unsigned long as decimal to a string, fast.
4247    s must be wide enough to not overflow, at least 21 chars.
4248    Returns the length of the string (without terminating '\0'). */
4249 
4250 int
4251 sprint_ul (char *s, unsigned long value)
4252 {
4253   int len = sprint_ul_rev (s, value);
4254   s[len] = '\0';
4255 
4256   std::reverse (s, s + len);
4257   return len;
4258 }
4259 
4260 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
4261    %R prints the value of REGISTER_PREFIX.
4262    %L prints the value of LOCAL_LABEL_PREFIX.
4263    %U prints the value of USER_LABEL_PREFIX.
4264    %I prints the value of IMMEDIATE_PREFIX.
4265    %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
4266    Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
4267 
4268    We handle alternate assembler dialects here, just like output_asm_insn.  */
4269 
4270 void
4271 asm_fprintf (FILE *file, const char *p, ...)
4272 {
4273   char buf[10];
4274   char *q, c;
4275 #ifdef ASSEMBLER_DIALECT
4276   int dialect = 0;
4277 #endif
4278   va_list argptr;
4279 
4280   va_start (argptr, p);
4281 
4282   buf[0] = '%';
4283 
4284   while ((c = *p++))
4285     switch (c)
4286       {
4287 #ifdef ASSEMBLER_DIALECT
4288       case '{':
4289       case '}':
4290       case '|':
4291 	p = do_assembler_dialects (p, &dialect);
4292 	break;
4293 #endif
4294 
4295       case '%':
4296 	c = *p++;
4297 	q = &buf[1];
4298 	while (strchr ("-+ #0", c))
4299 	  {
4300 	    *q++ = c;
4301 	    c = *p++;
4302 	  }
4303 	while (ISDIGIT (c) || c == '.')
4304 	  {
4305 	    *q++ = c;
4306 	    c = *p++;
4307 	  }
4308 	switch (c)
4309 	  {
4310 	  case '%':
4311 	    putc ('%', file);
4312 	    break;
4313 
4314 	  case 'd':  case 'i':  case 'u':
4315 	  case 'x':  case 'X':  case 'o':
4316 	  case 'c':
4317 	    *q++ = c;
4318 	    *q = 0;
4319 	    fprintf (file, buf, va_arg (argptr, int));
4320 	    break;
4321 
4322 	  case 'w':
4323 	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4324 	       'o' cases, but we do not check for those cases.  It
4325 	       means that the value is a HOST_WIDE_INT, which may be
4326 	       either `long' or `long long'.  */
4327 	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4328 	    q += strlen (HOST_WIDE_INT_PRINT);
4329 	    *q++ = *p++;
4330 	    *q = 0;
4331 	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4332 	    break;
4333 
4334 	  case 'l':
4335 	    *q++ = c;
4336 #ifdef HAVE_LONG_LONG
4337 	    if (*p == 'l')
4338 	      {
4339 		*q++ = *p++;
4340 		*q++ = *p++;
4341 		*q = 0;
4342 		fprintf (file, buf, va_arg (argptr, long long));
4343 	      }
4344 	    else
4345 #endif
4346 	      {
4347 		*q++ = *p++;
4348 		*q = 0;
4349 		fprintf (file, buf, va_arg (argptr, long));
4350 	      }
4351 
4352 	    break;
4353 
4354 	  case 's':
4355 	    *q++ = c;
4356 	    *q = 0;
4357 	    fprintf (file, buf, va_arg (argptr, char *));
4358 	    break;
4359 
4360 	  case 'O':
4361 #ifdef ASM_OUTPUT_OPCODE
4362 	    ASM_OUTPUT_OPCODE (asm_out_file, p);
4363 #endif
4364 	    break;
4365 
4366 	  case 'R':
4367 #ifdef REGISTER_PREFIX
4368 	    fprintf (file, "%s", REGISTER_PREFIX);
4369 #endif
4370 	    break;
4371 
4372 	  case 'I':
4373 #ifdef IMMEDIATE_PREFIX
4374 	    fprintf (file, "%s", IMMEDIATE_PREFIX);
4375 #endif
4376 	    break;
4377 
4378 	  case 'L':
4379 #ifdef LOCAL_LABEL_PREFIX
4380 	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4381 #endif
4382 	    break;
4383 
4384 	  case 'U':
4385 	    fputs (user_label_prefix, file);
4386 	    break;
4387 
4388 #ifdef ASM_FPRINTF_EXTENSIONS
4389 	    /* Uppercase letters are reserved for general use by asm_fprintf
4390 	       and so are not available to target specific code.  In order to
4391 	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4392 	       they are defined here.  As they get turned into real extensions
4393 	       to asm_fprintf they should be removed from this list.  */
4394 	  case 'A': case 'B': case 'C': case 'D': case 'E':
4395 	  case 'F': case 'G': case 'H': case 'J': case 'K':
4396 	  case 'M': case 'N': case 'P': case 'Q': case 'S':
4397 	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
4398 	    break;
4399 
4400 	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4401 #endif
4402 	  default:
4403 	    gcc_unreachable ();
4404 	  }
4405 	break;
4406 
4407       default:
4408 	putc (c, file);
4409       }
4410   va_end (argptr);
4411 }
4412 
4413 /* Return nonzero if this function has no function calls.  */
4414 
4415 int
4416 leaf_function_p (void)
4417 {
4418   rtx_insn *insn;
4419 
4420   /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4421      functions even if they call mcount.  */
4422   if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4423     return 0;
4424 
4425   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4426     {
4427       if (CALL_P (insn)
4428 	  && ! SIBLING_CALL_P (insn))
4429 	return 0;
4430       if (NONJUMP_INSN_P (insn)
4431 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
4432 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4433 	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4434 	return 0;
4435     }
4436 
4437   return 1;
4438 }
4439 
4440 /* Return 1 if branch is a forward branch.
4441    Uses insn_shuid array, so it works only in the final pass.  May be used by
4442    output templates to customary add branch prediction hints.
4443  */
4444 int
4445 final_forward_branch_p (rtx_insn *insn)
4446 {
4447   int insn_id, label_id;
4448 
4449   gcc_assert (uid_shuid);
4450   insn_id = INSN_SHUID (insn);
4451   label_id = INSN_SHUID (JUMP_LABEL (insn));
4452   /* We've hit some insns that does not have id information available.  */
4453   gcc_assert (insn_id && label_id);
4454   return insn_id < label_id;
4455 }
4456 
4457 /* On some machines, a function with no call insns
4458    can run faster if it doesn't create its own register window.
4459    When output, the leaf function should use only the "output"
4460    registers.  Ordinarily, the function would be compiled to use
4461    the "input" registers to find its arguments; it is a candidate
4462    for leaf treatment if it uses only the "input" registers.
4463    Leaf function treatment means renumbering so the function
4464    uses the "output" registers instead.  */
4465 
4466 #ifdef LEAF_REGISTERS
4467 
4468 /* Return 1 if this function uses only the registers that can be
4469    safely renumbered.  */
4470 
4471 int
4472 only_leaf_regs_used (void)
4473 {
4474   int i;
4475   const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4476 
4477   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4478     if ((df_regs_ever_live_p (i) || global_regs[i])
4479 	&& ! permitted_reg_in_leaf_functions[i])
4480       return 0;
4481 
4482   if (crtl->uses_pic_offset_table
4483       && pic_offset_table_rtx != 0
4484       && REG_P (pic_offset_table_rtx)
4485       && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4486     return 0;
4487 
4488   return 1;
4489 }
4490 
4491 /* Scan all instructions and renumber all registers into those
4492    available in leaf functions.  */
4493 
4494 static void
4495 leaf_renumber_regs (rtx_insn *first)
4496 {
4497   rtx_insn *insn;
4498 
4499   /* Renumber only the actual patterns.
4500      The reg-notes can contain frame pointer refs,
4501      and renumbering them could crash, and should not be needed.  */
4502   for (insn = first; insn; insn = NEXT_INSN (insn))
4503     if (INSN_P (insn))
4504       leaf_renumber_regs_insn (PATTERN (insn));
4505 }
4506 
4507 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4508    available in leaf functions.  */
4509 
4510 void
4511 leaf_renumber_regs_insn (rtx in_rtx)
4512 {
4513   int i, j;
4514   const char *format_ptr;
4515 
4516   if (in_rtx == 0)
4517     return;
4518 
4519   /* Renumber all input-registers into output-registers.
4520      renumbered_regs would be 1 for an output-register;
4521      they  */
4522 
4523   if (REG_P (in_rtx))
4524     {
4525       int newreg;
4526 
4527       /* Don't renumber the same reg twice.  */
4528       if (in_rtx->used)
4529 	return;
4530 
4531       newreg = REGNO (in_rtx);
4532       /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
4533 	 to reach here as part of a REG_NOTE.  */
4534       if (newreg >= FIRST_PSEUDO_REGISTER)
4535 	{
4536 	  in_rtx->used = 1;
4537 	  return;
4538 	}
4539       newreg = LEAF_REG_REMAP (newreg);
4540       gcc_assert (newreg >= 0);
4541       df_set_regs_ever_live (REGNO (in_rtx), false);
4542       df_set_regs_ever_live (newreg, true);
4543       SET_REGNO (in_rtx, newreg);
4544       in_rtx->used = 1;
4545       return;
4546     }
4547 
4548   if (INSN_P (in_rtx))
4549     {
4550       /* Inside a SEQUENCE, we find insns.
4551 	 Renumber just the patterns of these insns,
4552 	 just as we do for the top-level insns.  */
4553       leaf_renumber_regs_insn (PATTERN (in_rtx));
4554       return;
4555     }
4556 
4557   format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4558 
4559   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4560     switch (*format_ptr++)
4561       {
4562       case 'e':
4563 	leaf_renumber_regs_insn (XEXP (in_rtx, i));
4564 	break;
4565 
4566       case 'E':
4567 	if (NULL != XVEC (in_rtx, i))
4568 	  {
4569 	    for (j = 0; j < XVECLEN (in_rtx, i); j++)
4570 	      leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4571 	  }
4572 	break;
4573 
4574       case 'S':
4575       case 's':
4576       case '0':
4577       case 'i':
4578       case 'w':
4579       case 'n':
4580       case 'u':
4581 	break;
4582 
4583       default:
4584 	gcc_unreachable ();
4585       }
4586 }
4587 #endif
4588 
4589 /* Turn the RTL into assembly.  */
4590 static unsigned int
4591 rest_of_handle_final (void)
4592 {
4593   const char *fnname = get_fnname_from_decl (current_function_decl);
4594 
4595   assemble_start_function (current_function_decl, fnname);
4596   final_start_function (get_insns (), asm_out_file, optimize);
4597   final (get_insns (), asm_out_file, optimize);
4598   if (flag_ipa_ra)
4599     collect_fn_hard_reg_usage ();
4600   final_end_function ();
4601 
4602   /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4603      directive that closes the procedure descriptor.  Similarly, for x64 SEH.
4604      Otherwise it's not strictly necessary, but it doesn't hurt either.  */
4605   output_function_exception_table (fnname);
4606 
4607   assemble_end_function (current_function_decl, fnname);
4608 
4609   /* Free up reg info memory.  */
4610   free_reg_info ();
4611 
4612   if (! quiet_flag)
4613     fflush (asm_out_file);
4614 
4615   /* Write DBX symbols if requested.  */
4616 
4617   /* Note that for those inline functions where we don't initially
4618      know for certain that we will be generating an out-of-line copy,
4619      the first invocation of this routine (rest_of_compilation) will
4620      skip over this code by doing a `goto exit_rest_of_compilation;'.
4621      Later on, wrapup_global_declarations will (indirectly) call
4622      rest_of_compilation again for those inline functions that need
4623      to have out-of-line copies generated.  During that call, we
4624      *will* be routed past here.  */
4625 
4626   timevar_push (TV_SYMOUT);
4627   if (!DECL_IGNORED_P (current_function_decl))
4628     debug_hooks->function_decl (current_function_decl);
4629   timevar_pop (TV_SYMOUT);
4630 
4631   /* Release the blocks that are linked to DECL_INITIAL() to free the memory.  */
4632   DECL_INITIAL (current_function_decl) = error_mark_node;
4633 
4634   if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4635       && targetm.have_ctors_dtors)
4636     targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4637 				 decl_init_priority_lookup
4638 				   (current_function_decl));
4639   if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4640       && targetm.have_ctors_dtors)
4641     targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4642 				decl_fini_priority_lookup
4643 				  (current_function_decl));
4644   return 0;
4645 }
4646 
4647 namespace {
4648 
4649 const pass_data pass_data_final =
4650 {
4651   RTL_PASS, /* type */
4652   "final", /* name */
4653   OPTGROUP_NONE, /* optinfo_flags */
4654   TV_FINAL, /* tv_id */
4655   0, /* properties_required */
4656   0, /* properties_provided */
4657   0, /* properties_destroyed */
4658   0, /* todo_flags_start */
4659   0, /* todo_flags_finish */
4660 };
4661 
4662 class pass_final : public rtl_opt_pass
4663 {
4664 public:
4665   pass_final (gcc::context *ctxt)
4666     : rtl_opt_pass (pass_data_final, ctxt)
4667   {}
4668 
4669   /* opt_pass methods: */
4670   virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4671 
4672 }; // class pass_final
4673 
4674 } // anon namespace
4675 
4676 rtl_opt_pass *
4677 make_pass_final (gcc::context *ctxt)
4678 {
4679   return new pass_final (ctxt);
4680 }
4681 
4682 
4683 static unsigned int
4684 rest_of_handle_shorten_branches (void)
4685 {
4686   /* Shorten branches.  */
4687   shorten_branches (get_insns ());
4688   return 0;
4689 }
4690 
4691 namespace {
4692 
4693 const pass_data pass_data_shorten_branches =
4694 {
4695   RTL_PASS, /* type */
4696   "shorten", /* name */
4697   OPTGROUP_NONE, /* optinfo_flags */
4698   TV_SHORTEN_BRANCH, /* tv_id */
4699   0, /* properties_required */
4700   0, /* properties_provided */
4701   0, /* properties_destroyed */
4702   0, /* todo_flags_start */
4703   0, /* todo_flags_finish */
4704 };
4705 
4706 class pass_shorten_branches : public rtl_opt_pass
4707 {
4708 public:
4709   pass_shorten_branches (gcc::context *ctxt)
4710     : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4711   {}
4712 
4713   /* opt_pass methods: */
4714   virtual unsigned int execute (function *)
4715     {
4716       return rest_of_handle_shorten_branches ();
4717     }
4718 
4719 }; // class pass_shorten_branches
4720 
4721 } // anon namespace
4722 
4723 rtl_opt_pass *
4724 make_pass_shorten_branches (gcc::context *ctxt)
4725 {
4726   return new pass_shorten_branches (ctxt);
4727 }
4728 
4729 
4730 static unsigned int
4731 rest_of_clean_state (void)
4732 {
4733   rtx_insn *insn, *next;
4734   FILE *final_output = NULL;
4735   int save_unnumbered = flag_dump_unnumbered;
4736   int save_noaddr = flag_dump_noaddr;
4737 
4738   if (flag_dump_final_insns)
4739     {
4740       final_output = fopen (flag_dump_final_insns, "a");
4741       if (!final_output)
4742 	{
4743 	  error ("could not open final insn dump file %qs: %m",
4744 		 flag_dump_final_insns);
4745 	  flag_dump_final_insns = NULL;
4746 	}
4747       else
4748 	{
4749 	  flag_dump_noaddr = flag_dump_unnumbered = 1;
4750 	  if (flag_compare_debug_opt || flag_compare_debug)
4751 	    dump_flags |= TDF_NOUID;
4752 	  dump_function_header (final_output, current_function_decl,
4753 				dump_flags);
4754 	  final_insns_dump_p = true;
4755 
4756 	  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4757 	    if (LABEL_P (insn))
4758 	      INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4759 	    else
4760 	      {
4761 		if (NOTE_P (insn))
4762 		  set_block_for_insn (insn, NULL);
4763 		INSN_UID (insn) = 0;
4764 	      }
4765 	}
4766     }
4767 
4768   /* It is very important to decompose the RTL instruction chain here:
4769      debug information keeps pointing into CODE_LABEL insns inside the function
4770      body.  If these remain pointing to the other insns, we end up preserving
4771      whole RTL chain and attached detailed debug info in memory.  */
4772   for (insn = get_insns (); insn; insn = next)
4773     {
4774       next = NEXT_INSN (insn);
4775       SET_NEXT_INSN (insn) = NULL;
4776       SET_PREV_INSN (insn) = NULL;
4777 
4778       if (final_output
4779 	  && (!NOTE_P (insn) ||
4780 	      (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4781 	       && NOTE_KIND (insn) != NOTE_INSN_CALL_ARG_LOCATION
4782 	       && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4783 	       && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4784 	       && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4785 	print_rtl_single (final_output, insn);
4786     }
4787 
4788   if (final_output)
4789     {
4790       flag_dump_noaddr = save_noaddr;
4791       flag_dump_unnumbered = save_unnumbered;
4792       final_insns_dump_p = false;
4793 
4794       if (fclose (final_output))
4795 	{
4796 	  error ("could not close final insn dump file %qs: %m",
4797 		 flag_dump_final_insns);
4798 	  flag_dump_final_insns = NULL;
4799 	}
4800     }
4801 
4802   /* In case the function was not output,
4803      don't leave any temporary anonymous types
4804      queued up for sdb output.  */
4805   if (SDB_DEBUGGING_INFO && write_symbols == SDB_DEBUG)
4806     sdbout_types (NULL_TREE);
4807 
4808   flag_rerun_cse_after_global_opts = 0;
4809   reload_completed = 0;
4810   epilogue_completed = 0;
4811 #ifdef STACK_REGS
4812   regstack_completed = 0;
4813 #endif
4814 
4815   /* Clear out the insn_length contents now that they are no
4816      longer valid.  */
4817   init_insn_lengths ();
4818 
4819   /* Show no temporary slots allocated.  */
4820   init_temp_slots ();
4821 
4822   free_bb_for_insn ();
4823 
4824   if (cfun->gimple_df)
4825     delete_tree_ssa (cfun);
4826 
4827   /* We can reduce stack alignment on call site only when we are sure that
4828      the function body just produced will be actually used in the final
4829      executable.  */
4830   if (decl_binds_to_current_def_p (current_function_decl))
4831     {
4832       unsigned int pref = crtl->preferred_stack_boundary;
4833       if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4834         pref = crtl->stack_alignment_needed;
4835       cgraph_node::rtl_info (current_function_decl)
4836 	->preferred_incoming_stack_boundary = pref;
4837     }
4838 
4839   /* Make sure volatile mem refs aren't considered valid operands for
4840      arithmetic insns.  We must call this here if this is a nested inline
4841      function, since the above code leaves us in the init_recog state,
4842      and the function context push/pop code does not save/restore volatile_ok.
4843 
4844      ??? Maybe it isn't necessary for expand_start_function to call this
4845      anymore if we do it here?  */
4846 
4847   init_recog_no_volatile ();
4848 
4849   /* We're done with this function.  Free up memory if we can.  */
4850   free_after_parsing (cfun);
4851   free_after_compilation (cfun);
4852   return 0;
4853 }
4854 
4855 namespace {
4856 
4857 const pass_data pass_data_clean_state =
4858 {
4859   RTL_PASS, /* type */
4860   "*clean_state", /* name */
4861   OPTGROUP_NONE, /* optinfo_flags */
4862   TV_FINAL, /* tv_id */
4863   0, /* properties_required */
4864   0, /* properties_provided */
4865   PROP_rtl, /* properties_destroyed */
4866   0, /* todo_flags_start */
4867   0, /* todo_flags_finish */
4868 };
4869 
4870 class pass_clean_state : public rtl_opt_pass
4871 {
4872 public:
4873   pass_clean_state (gcc::context *ctxt)
4874     : rtl_opt_pass (pass_data_clean_state, ctxt)
4875   {}
4876 
4877   /* opt_pass methods: */
4878   virtual unsigned int execute (function *)
4879     {
4880       return rest_of_clean_state ();
4881     }
4882 
4883 }; // class pass_clean_state
4884 
4885 } // anon namespace
4886 
4887 rtl_opt_pass *
4888 make_pass_clean_state (gcc::context *ctxt)
4889 {
4890   return new pass_clean_state (ctxt);
4891 }
4892 
4893 /* Return true if INSN is a call to the current function.  */
4894 
4895 static bool
4896 self_recursive_call_p (rtx_insn *insn)
4897 {
4898   tree fndecl = get_call_fndecl (insn);
4899   return (fndecl == current_function_decl
4900 	  && decl_binds_to_current_def_p (fndecl));
4901 }
4902 
4903 /* Collect hard register usage for the current function.  */
4904 
4905 static void
4906 collect_fn_hard_reg_usage (void)
4907 {
4908   rtx_insn *insn;
4909 #ifdef STACK_REGS
4910   int i;
4911 #endif
4912   struct cgraph_rtl_info *node;
4913   HARD_REG_SET function_used_regs;
4914 
4915   /* ??? To be removed when all the ports have been fixed.  */
4916   if (!targetm.call_fusage_contains_non_callee_clobbers)
4917     return;
4918 
4919   CLEAR_HARD_REG_SET (function_used_regs);
4920 
4921   for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4922     {
4923       HARD_REG_SET insn_used_regs;
4924 
4925       if (!NONDEBUG_INSN_P (insn))
4926 	continue;
4927 
4928       if (CALL_P (insn)
4929 	  && !self_recursive_call_p (insn))
4930 	{
4931 	  if (!get_call_reg_set_usage (insn, &insn_used_regs,
4932 				       call_used_reg_set))
4933 	    return;
4934 
4935 	  IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4936 	}
4937 
4938       find_all_hard_reg_sets (insn, &insn_used_regs, false);
4939       IOR_HARD_REG_SET (function_used_regs, insn_used_regs);
4940     }
4941 
4942   /* Be conservative - mark fixed and global registers as used.  */
4943   IOR_HARD_REG_SET (function_used_regs, fixed_reg_set);
4944 
4945 #ifdef STACK_REGS
4946   /* Handle STACK_REGS conservatively, since the df-framework does not
4947      provide accurate information for them.  */
4948 
4949   for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4950     SET_HARD_REG_BIT (function_used_regs, i);
4951 #endif
4952 
4953   /* The information we have gathered is only interesting if it exposes a
4954      register from the call_used_regs that is not used in this function.  */
4955   if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
4956     return;
4957 
4958   node = cgraph_node::rtl_info (current_function_decl);
4959   gcc_assert (node != NULL);
4960 
4961   COPY_HARD_REG_SET (node->function_used_regs, function_used_regs);
4962   node->function_used_regs_valid = 1;
4963 }
4964 
4965 /* Get the declaration of the function called by INSN.  */
4966 
4967 static tree
4968 get_call_fndecl (rtx_insn *insn)
4969 {
4970   rtx note, datum;
4971 
4972   note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX);
4973   if (note == NULL_RTX)
4974     return NULL_TREE;
4975 
4976   datum = XEXP (note, 0);
4977   if (datum != NULL_RTX)
4978     return SYMBOL_REF_DECL (datum);
4979 
4980   return NULL_TREE;
4981 }
4982 
4983 /* Return the cgraph_rtl_info of the function called by INSN.  Returns NULL for
4984    call targets that can be overwritten.  */
4985 
4986 static struct cgraph_rtl_info *
4987 get_call_cgraph_rtl_info (rtx_insn *insn)
4988 {
4989   tree fndecl;
4990 
4991   if (insn == NULL_RTX)
4992     return NULL;
4993 
4994   fndecl = get_call_fndecl (insn);
4995   if (fndecl == NULL_TREE
4996       || !decl_binds_to_current_def_p (fndecl))
4997     return NULL;
4998 
4999   return cgraph_node::rtl_info (fndecl);
5000 }
5001 
5002 /* Find hard registers used by function call instruction INSN, and return them
5003    in REG_SET.  Return DEFAULT_SET in REG_SET if not found.  */
5004 
5005 bool
5006 get_call_reg_set_usage (rtx_insn *insn, HARD_REG_SET *reg_set,
5007 			HARD_REG_SET default_set)
5008 {
5009   if (flag_ipa_ra)
5010     {
5011       struct cgraph_rtl_info *node = get_call_cgraph_rtl_info (insn);
5012       if (node != NULL
5013 	  && node->function_used_regs_valid)
5014 	{
5015 	  COPY_HARD_REG_SET (*reg_set, node->function_used_regs);
5016 	  AND_HARD_REG_SET (*reg_set, default_set);
5017 	  return true;
5018 	}
5019     }
5020 
5021   COPY_HARD_REG_SET (*reg_set, default_set);
5022   return false;
5023 }
5024