xref: /netbsd-src/external/gpl3/gcc/dist/gcc/final.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This is the final pass of the compiler.
21    It looks at the rtl code for a function and outputs assembler code.
22 
23    Call `final_start_function' to output the assembler code for function entry,
24    `final' to output assembler code for some RTL code,
25    `final_end_function' to output assembler code for function exit.
26    If a function is compiled in several pieces, each piece is
27    output separately with `final'.
28 
29    Some optimizations are also done at this level.
30    Move instructions that were made unnecessary by good register allocation
31    are detected and omitted from the output.  (Though most of these
32    are removed by the last jump pass.)
33 
34    Instructions to set the condition codes are omitted when it can be
35    seen that the condition codes already had the desired values.
36 
37    In some cases it is sufficient if the inherited condition codes
38    have related values, but this may require the following insn
39    (the one that tests the condition codes) to be modified.
40 
41    The code for the function prologue and epilogue are generated
42    directly in assembler by the target functions function_prologue and
43    function_epilogue.  Those instructions never exist as rtl.  */
44 
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "asan.h"
81 #include "rtl-iter.h"
82 #include "print-rtl.h"
83 #include "function-abi.h"
84 #include "common/common-target.h"
85 
86 #ifdef XCOFF_DEBUGGING_INFO
87 #include "xcoffout.h"		/* Needed for external data declarations.  */
88 #endif
89 
90 #include "dwarf2out.h"
91 
92 #ifdef DBX_DEBUGGING_INFO
93 #include "dbxout.h"
94 #endif
95 
96 /* Most ports don't need to define CC_STATUS_INIT.
97    So define a null default for it to save conditionalization later.  */
98 #ifndef CC_STATUS_INIT
99 #define CC_STATUS_INIT
100 #endif
101 
102 /* Is the given character a logical line separator for the assembler?  */
103 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
104 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
105 #endif
106 
107 #ifndef JUMP_TABLES_IN_TEXT_SECTION
108 #define JUMP_TABLES_IN_TEXT_SECTION 0
109 #endif
110 
111 /* Bitflags used by final_scan_insn.  */
112 #define SEEN_NOTE	1
113 #define SEEN_EMITTED	2
114 #define SEEN_NEXT_VIEW	4
115 
116 /* Last insn processed by final_scan_insn.  */
117 static rtx_insn *debug_insn;
118 rtx_insn *current_output_insn;
119 
120 /* Line number of last NOTE.  */
121 static int last_linenum;
122 
123 /* Column number of last NOTE.  */
124 static int last_columnnum;
125 
126 /* Discriminator written to assembly.  */
127 static int last_discriminator;
128 
129 /* Discriminator to be written to assembly for current instruction.
130    Note: actual usage depends on loc_discriminator_kind setting.  */
131 static int discriminator;
132 static inline int compute_discriminator (location_t loc);
133 
134 /* Discriminator identifying current basic block among others sharing
135    the same locus.  */
136 static int bb_discriminator;
137 
138 /* Basic block discriminator for previous instruction.  */
139 static int last_bb_discriminator;
140 
141 /* Highest line number in current block.  */
142 static int high_block_linenum;
143 
144 /* Likewise for function.  */
145 static int high_function_linenum;
146 
147 /* Filename of last NOTE.  */
148 static const char *last_filename;
149 
150 /* Override filename, line and column number.  */
151 static const char *override_filename;
152 static int override_linenum;
153 static int override_columnnum;
154 static int override_discriminator;
155 
156 /* Whether to force emission of a line note before the next insn.  */
157 static bool force_source_line = false;
158 
159 extern const int length_unit_log; /* This is defined in insn-attrtab.cc.  */
160 
161 /* Nonzero while outputting an `asm' with operands.
162    This means that inconsistencies are the user's fault, so don't die.
163    The precise value is the insn being output, to pass to error_for_asm.  */
164 const rtx_insn *this_is_asm_operands;
165 
166 /* Number of operands of this insn, for an `asm' with operands.  */
167 static unsigned int insn_noperands;
168 
169 /* Compare optimization flag.  */
170 
171 static rtx last_ignored_compare = 0;
172 
173 /* Assign a unique number to each insn that is output.
174    This can be used to generate unique local labels.  */
175 
176 static int insn_counter = 0;
177 
178 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen.  */
179 
180 static int block_depth;
181 
182 /* Nonzero if have enabled APP processing of our assembler output.  */
183 
184 static int app_on;
185 
186 /* If we are outputting an insn sequence, this contains the sequence rtx.
187    Zero otherwise.  */
188 
189 rtx_sequence *final_sequence;
190 
191 #ifdef ASSEMBLER_DIALECT
192 
193 /* Number of the assembler dialect to use, starting at 0.  */
194 static int dialect_number;
195 #endif
196 
197 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern.  */
198 rtx current_insn_predicate;
199 
200 /* True if printing into -fdump-final-insns= dump.  */
201 bool final_insns_dump_p;
202 
203 /* True if profile_function should be called, but hasn't been called yet.  */
204 static bool need_profile_function;
205 
206 static int asm_insn_count (rtx);
207 static void profile_function (FILE *);
208 static void profile_after_prologue (FILE *);
209 static bool notice_source_line (rtx_insn *, bool *);
210 static rtx walk_alter_subreg (rtx *, bool *);
211 static void output_asm_name (void);
212 static void output_alternate_entry_point (FILE *, rtx_insn *);
213 static tree get_mem_expr_from_op (rtx, int *);
214 static void output_asm_operand_names (rtx *, int *, int);
215 #ifdef LEAF_REGISTERS
216 static void leaf_renumber_regs (rtx_insn *);
217 #endif
218 static int align_fuzz (rtx, rtx, int, unsigned);
219 static void collect_fn_hard_reg_usage (void);
220 
221 /* Initialize data in final at the beginning of a compilation.  */
222 
223 void
init_final(const char * filename ATTRIBUTE_UNUSED)224 init_final (const char *filename ATTRIBUTE_UNUSED)
225 {
226   app_on = 0;
227   final_sequence = 0;
228 
229 #ifdef ASSEMBLER_DIALECT
230   dialect_number = ASSEMBLER_DIALECT;
231 #endif
232 }
233 
234 /* Default target function prologue and epilogue assembler output.
235 
236    If not overridden for epilogue code, then the function body itself
237    contains return instructions wherever needed.  */
238 void
default_function_pro_epilogue(FILE *)239 default_function_pro_epilogue (FILE *)
240 {
241 }
242 
243 void
default_function_switched_text_sections(FILE * file ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED,bool new_is_cold ATTRIBUTE_UNUSED)244 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
245 					 tree decl ATTRIBUTE_UNUSED,
246 					 bool new_is_cold ATTRIBUTE_UNUSED)
247 {
248 }
249 
250 /* Default target hook that outputs nothing to a stream.  */
251 void
no_asm_to_stream(FILE * file ATTRIBUTE_UNUSED)252 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
253 {
254 }
255 
256 /* Enable APP processing of subsequent output.
257    Used before the output from an `asm' statement.  */
258 
259 void
app_enable(void)260 app_enable (void)
261 {
262   if (! app_on)
263     {
264       fputs (ASM_APP_ON, asm_out_file);
265       app_on = 1;
266     }
267 }
268 
269 /* Disable APP processing of subsequent output.
270    Called from varasm.cc before most kinds of output.  */
271 
272 void
app_disable(void)273 app_disable (void)
274 {
275   if (app_on)
276     {
277       fputs (ASM_APP_OFF, asm_out_file);
278       app_on = 0;
279     }
280 }
281 
282 /* Return the number of slots filled in the current
283    delayed branch sequence (we don't count the insn needing the
284    delay slot).   Zero if not in a delayed branch sequence.  */
285 
286 int
dbr_sequence_length(void)287 dbr_sequence_length (void)
288 {
289   if (final_sequence != 0)
290     return XVECLEN (final_sequence, 0) - 1;
291   else
292     return 0;
293 }
294 
295 /* The next two pages contain routines used to compute the length of an insn
296    and to shorten branches.  */
297 
298 /* Arrays for insn lengths, and addresses.  The latter is referenced by
299    `insn_current_length'.  */
300 
301 static int *insn_lengths;
302 
303 vec<int> insn_addresses_;
304 
305 /* Max uid for which the above arrays are valid.  */
306 static int insn_lengths_max_uid;
307 
308 /* Address of insn being processed.  Used by `insn_current_length'.  */
309 int insn_current_address;
310 
311 /* Address of insn being processed in previous iteration.  */
312 int insn_last_address;
313 
314 /* known invariant alignment of insn being processed.  */
315 int insn_current_align;
316 
317 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
318    gives the next following alignment insn that increases the known
319    alignment, or NULL_RTX if there is no such insn.
320    For any alignment obtained this way, we can again index uid_align with
321    its uid to obtain the next following align that in turn increases the
322    alignment, till we reach NULL_RTX; the sequence obtained this way
323    for each insn we'll call the alignment chain of this insn in the following
324    comments.  */
325 
326 static rtx *uid_align;
327 static int *uid_shuid;
328 static vec<align_flags> label_align;
329 
330 /* Indicate that branch shortening hasn't yet been done.  */
331 
332 void
init_insn_lengths(void)333 init_insn_lengths (void)
334 {
335   if (uid_shuid)
336     {
337       free (uid_shuid);
338       uid_shuid = 0;
339     }
340   if (insn_lengths)
341     {
342       free (insn_lengths);
343       insn_lengths = 0;
344       insn_lengths_max_uid = 0;
345     }
346   if (HAVE_ATTR_length)
347     INSN_ADDRESSES_FREE ();
348   if (uid_align)
349     {
350       free (uid_align);
351       uid_align = 0;
352     }
353 }
354 
355 /* Obtain the current length of an insn.  If branch shortening has been done,
356    get its actual length.  Otherwise, use FALLBACK_FN to calculate the
357    length.  */
358 static int
get_attr_length_1(rtx_insn * insn,int (* fallback_fn)(rtx_insn *))359 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
360 {
361   rtx body;
362   int i;
363   int length = 0;
364 
365   if (!HAVE_ATTR_length)
366     return 0;
367 
368   if (insn_lengths_max_uid > INSN_UID (insn))
369     return insn_lengths[INSN_UID (insn)];
370   else
371     switch (GET_CODE (insn))
372       {
373       case NOTE:
374       case BARRIER:
375       case CODE_LABEL:
376       case DEBUG_INSN:
377 	return 0;
378 
379       case CALL_INSN:
380       case JUMP_INSN:
381 	length = fallback_fn (insn);
382 	break;
383 
384       case INSN:
385 	body = PATTERN (insn);
386 	if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
387 	  return 0;
388 
389 	else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
390 	  length = asm_insn_count (body) * fallback_fn (insn);
391 	else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
392 	  for (i = 0; i < seq->len (); i++)
393 	    length += get_attr_length_1 (seq->insn (i), fallback_fn);
394 	else
395 	  length = fallback_fn (insn);
396 	break;
397 
398       default:
399 	break;
400       }
401 
402 #ifdef ADJUST_INSN_LENGTH
403   ADJUST_INSN_LENGTH (insn, length);
404 #endif
405   return length;
406 }
407 
408 /* Obtain the current length of an insn.  If branch shortening has been done,
409    get its actual length.  Otherwise, get its maximum length.  */
410 int
get_attr_length(rtx_insn * insn)411 get_attr_length (rtx_insn *insn)
412 {
413   return get_attr_length_1 (insn, insn_default_length);
414 }
415 
416 /* Obtain the current length of an insn.  If branch shortening has been done,
417    get its actual length.  Otherwise, get its minimum length.  */
418 int
get_attr_min_length(rtx_insn * insn)419 get_attr_min_length (rtx_insn *insn)
420 {
421   return get_attr_length_1 (insn, insn_min_length);
422 }
423 
424 /* Code to handle alignment inside shorten_branches.  */
425 
426 /* Here is an explanation how the algorithm in align_fuzz can give
427    proper results:
428 
429    Call a sequence of instructions beginning with alignment point X
430    and continuing until the next alignment point `block X'.  When `X'
431    is used in an expression, it means the alignment value of the
432    alignment point.
433 
434    Call the distance between the start of the first insn of block X, and
435    the end of the last insn of block X `IX', for the `inner size of X'.
436    This is clearly the sum of the instruction lengths.
437 
438    Likewise with the next alignment-delimited block following X, which we
439    shall call block Y.
440 
441    Call the distance between the start of the first insn of block X, and
442    the start of the first insn of block Y `OX', for the `outer size of X'.
443 
444    The estimated padding is then OX - IX.
445 
446    OX can be safely estimated as
447 
448            if (X >= Y)
449                    OX = round_up(IX, Y)
450            else
451                    OX = round_up(IX, X) + Y - X
452 
453    Clearly est(IX) >= real(IX), because that only depends on the
454    instruction lengths, and those being overestimated is a given.
455 
456    Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
457    we needn't worry about that when thinking about OX.
458 
459    When X >= Y, the alignment provided by Y adds no uncertainty factor
460    for branch ranges starting before X, so we can just round what we have.
461    But when X < Y, we don't know anything about the, so to speak,
462    `middle bits', so we have to assume the worst when aligning up from an
463    address mod X to one mod Y, which is Y - X.  */
464 
465 #ifndef LABEL_ALIGN
466 #define LABEL_ALIGN(LABEL) align_labels
467 #endif
468 
469 #ifndef LOOP_ALIGN
470 #define LOOP_ALIGN(LABEL) align_loops
471 #endif
472 
473 #ifndef LABEL_ALIGN_AFTER_BARRIER
474 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
475 #endif
476 
477 #ifndef JUMP_ALIGN
478 #define JUMP_ALIGN(LABEL) align_jumps
479 #endif
480 
481 #ifndef ADDR_VEC_ALIGN
482 static int
final_addr_vec_align(rtx_jump_table_data * addr_vec)483 final_addr_vec_align (rtx_jump_table_data *addr_vec)
484 {
485   int align = GET_MODE_SIZE (addr_vec->get_data_mode ());
486 
487   if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
488     align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
489   return exact_log2 (align);
490 
491 }
492 
493 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
494 #endif
495 
496 #ifndef INSN_LENGTH_ALIGNMENT
497 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
498 #endif
499 
500 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
501 
502 static int min_labelno, max_labelno;
503 
504 #define LABEL_TO_ALIGNMENT(LABEL) \
505   (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno])
506 
507 /* For the benefit of port specific code do this also as a function.  */
508 
509 align_flags
label_to_alignment(rtx label)510 label_to_alignment (rtx label)
511 {
512   if (CODE_LABEL_NUMBER (label) <= max_labelno)
513     return LABEL_TO_ALIGNMENT (label);
514   return align_flags ();
515 }
516 
517 /* The differences in addresses
518    between a branch and its target might grow or shrink depending on
519    the alignment the start insn of the range (the branch for a forward
520    branch or the label for a backward branch) starts out on; if these
521    differences are used naively, they can even oscillate infinitely.
522    We therefore want to compute a 'worst case' address difference that
523    is independent of the alignment the start insn of the range end
524    up on, and that is at least as large as the actual difference.
525    The function align_fuzz calculates the amount we have to add to the
526    naively computed difference, by traversing the part of the alignment
527    chain of the start insn of the range that is in front of the end insn
528    of the range, and considering for each alignment the maximum amount
529    that it might contribute to a size increase.
530 
531    For casesi tables, we also want to know worst case minimum amounts of
532    address difference, in case a machine description wants to introduce
533    some common offset that is added to all offsets in a table.
534    For this purpose, align_fuzz with a growth argument of 0 computes the
535    appropriate adjustment.  */
536 
537 /* Compute the maximum delta by which the difference of the addresses of
538    START and END might grow / shrink due to a different address for start
539    which changes the size of alignment insns between START and END.
540    KNOWN_ALIGN_LOG is the alignment known for START.
541    GROWTH should be ~0 if the objective is to compute potential code size
542    increase, and 0 if the objective is to compute potential shrink.
543    The return value is undefined for any other value of GROWTH.  */
544 
545 static int
align_fuzz(rtx start,rtx end,int known_align_log,unsigned int growth)546 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
547 {
548   int uid = INSN_UID (start);
549   rtx align_label;
550   int known_align = 1 << known_align_log;
551   int end_shuid = INSN_SHUID (end);
552   int fuzz = 0;
553 
554   for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
555     {
556       int align_addr, new_align;
557 
558       uid = INSN_UID (align_label);
559       align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
560       if (uid_shuid[uid] > end_shuid)
561 	break;
562       align_flags alignment = LABEL_TO_ALIGNMENT (align_label);
563       new_align = 1 << alignment.levels[0].log;
564       if (new_align < known_align)
565 	continue;
566       fuzz += (-align_addr ^ growth) & (new_align - known_align);
567       known_align = new_align;
568     }
569   return fuzz;
570 }
571 
572 /* Compute a worst-case reference address of a branch so that it
573    can be safely used in the presence of aligned labels.  Since the
574    size of the branch itself is unknown, the size of the branch is
575    not included in the range.  I.e. for a forward branch, the reference
576    address is the end address of the branch as known from the previous
577    branch shortening pass, minus a value to account for possible size
578    increase due to alignment.  For a backward branch, it is the start
579    address of the branch as known from the current pass, plus a value
580    to account for possible size increase due to alignment.
581    NB.: Therefore, the maximum offset allowed for backward branches needs
582    to exclude the branch size.  */
583 
584 int
insn_current_reference_address(rtx_insn * branch)585 insn_current_reference_address (rtx_insn *branch)
586 {
587   rtx dest;
588   int seq_uid;
589 
590   if (! INSN_ADDRESSES_SET_P ())
591     return 0;
592 
593   rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
594   seq_uid = INSN_UID (seq);
595   if (!jump_to_label_p (branch))
596     /* This can happen for example on the PA; the objective is to know the
597        offset to address something in front of the start of the function.
598        Thus, we can treat it like a backward branch.
599        We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
600        any alignment we'd encounter, so we skip the call to align_fuzz.  */
601     return insn_current_address;
602   dest = JUMP_LABEL (branch);
603 
604   /* BRANCH has no proper alignment chain set, so use SEQ.
605      BRANCH also has no INSN_SHUID.  */
606   if (INSN_SHUID (seq) < INSN_SHUID (dest))
607     {
608       /* Forward branch.  */
609       return (insn_last_address + insn_lengths[seq_uid]
610 	      - align_fuzz (seq, dest, length_unit_log, ~0));
611     }
612   else
613     {
614       /* Backward branch.  */
615       return (insn_current_address
616 	      + align_fuzz (dest, seq, length_unit_log, ~0));
617     }
618 }
619 
620 /* Compute branch alignments based on CFG profile.  */
621 
622 unsigned int
compute_alignments(void)623 compute_alignments (void)
624 {
625   basic_block bb;
626   align_flags max_alignment;
627 
628   label_align.truncate (0);
629 
630   max_labelno = max_label_num ();
631   min_labelno = get_first_label_num ();
632   label_align.safe_grow_cleared (max_labelno - min_labelno + 1, true);
633 
634   /* If not optimizing or optimizing for size, don't assign any alignments.  */
635   if (! optimize || optimize_function_for_size_p (cfun))
636     return 0;
637 
638   if (dump_file)
639     {
640       dump_reg_info (dump_file);
641       dump_flow_info (dump_file, TDF_DETAILS);
642       flow_loops_dump (dump_file, NULL, 1);
643     }
644   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
645   profile_count count_threshold = cfun->cfg->count_max.apply_scale
646 		 (1, param_align_threshold);
647 
648   if (dump_file)
649     {
650       fprintf (dump_file, "count_max: ");
651       cfun->cfg->count_max.dump (dump_file);
652       fprintf (dump_file, "\n");
653     }
654   FOR_EACH_BB_FN (bb, cfun)
655     {
656       rtx_insn *label = BB_HEAD (bb);
657       bool has_fallthru = 0;
658       edge e;
659       edge_iterator ei;
660 
661       if (!LABEL_P (label)
662 	  || optimize_bb_for_size_p (bb))
663 	{
664 	  if (dump_file)
665 	    fprintf (dump_file,
666 		     "BB %4i loop %2i loop_depth %2i skipped.\n",
667 		     bb->index,
668 		     bb->loop_father->num,
669 		     bb_loop_depth (bb));
670 	  continue;
671 	}
672       max_alignment = LABEL_ALIGN (label);
673       profile_count fallthru_count = profile_count::zero ();
674       profile_count branch_count = profile_count::zero ();
675 
676       FOR_EACH_EDGE (e, ei, bb->preds)
677 	{
678 	  if (e->flags & EDGE_FALLTHRU)
679 	    has_fallthru = 1, fallthru_count += e->count ();
680 	  else
681 	    branch_count += e->count ();
682 	}
683       if (dump_file)
684 	{
685 	  fprintf (dump_file, "BB %4i loop %2i loop_depth"
686 		   " %2i fall ",
687 		   bb->index, bb->loop_father->num,
688 		   bb_loop_depth (bb));
689 	  fallthru_count.dump (dump_file);
690 	  fprintf (dump_file, " branch ");
691 	  branch_count.dump (dump_file);
692 	  if (!bb->loop_father->inner && bb->loop_father->num)
693 	    fprintf (dump_file, " inner_loop");
694 	  if (bb->loop_father->header == bb)
695 	    fprintf (dump_file, " loop_header");
696 	  fprintf (dump_file, "\n");
697 	}
698       if (!fallthru_count.initialized_p () || !branch_count.initialized_p ())
699 	continue;
700 
701       /* There are two purposes to align block with no fallthru incoming edge:
702 	 1) to avoid fetch stalls when branch destination is near cache boundary
703 	 2) to improve cache efficiency in case the previous block is not executed
704 	    (so it does not need to be in the cache).
705 
706 	 We to catch first case, we align frequently executed blocks.
707 	 To catch the second, we align blocks that are executed more frequently
708 	 than the predecessor and the predecessor is likely to not be executed
709 	 when function is called.  */
710 
711       if (!has_fallthru
712 	  && (branch_count > count_threshold
713 	      || (bb->count > bb->prev_bb->count.apply_scale (10, 1)
714 		  && (bb->prev_bb->count
715 		      <= ENTRY_BLOCK_PTR_FOR_FN (cfun)
716 			   ->count.apply_scale (1, 2)))))
717 	{
718 	  align_flags alignment = JUMP_ALIGN (label);
719 	  if (dump_file)
720 	    fprintf (dump_file, "  jump alignment added.\n");
721 	  max_alignment = align_flags::max (max_alignment, alignment);
722 	}
723       /* In case block is frequent and reached mostly by non-fallthru edge,
724 	 align it.  It is most likely a first block of loop.  */
725       if (has_fallthru
726 	  && !(single_succ_p (bb)
727 	       && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
728 	  && optimize_bb_for_speed_p (bb)
729 	  && branch_count + fallthru_count > count_threshold
730 	  && (branch_count
731 	      > fallthru_count.apply_scale
732 		    (param_align_loop_iterations, 1)))
733 	{
734 	  align_flags alignment = LOOP_ALIGN (label);
735 	  if (dump_file)
736 	    fprintf (dump_file, "  internal loop alignment added.\n");
737 	  max_alignment = align_flags::max (max_alignment, alignment);
738 	}
739       LABEL_TO_ALIGNMENT (label) = max_alignment;
740     }
741 
742   loop_optimizer_finalize ();
743   free_dominance_info (CDI_DOMINATORS);
744   return 0;
745 }
746 
747 /* Grow the LABEL_ALIGN array after new labels are created.  */
748 
749 static void
grow_label_align(void)750 grow_label_align (void)
751 {
752   int old = max_labelno;
753   int n_labels;
754   int n_old_labels;
755 
756   max_labelno = max_label_num ();
757 
758   n_labels = max_labelno - min_labelno + 1;
759   n_old_labels = old - min_labelno + 1;
760 
761   label_align.safe_grow_cleared (n_labels, true);
762 
763   /* Range of labels grows monotonically in the function.  Failing here
764      means that the initialization of array got lost.  */
765   gcc_assert (n_old_labels <= n_labels);
766 }
767 
768 /* Update the already computed alignment information.  LABEL_PAIRS is a vector
769    made up of pairs of labels for which the alignment information of the first
770    element will be copied from that of the second element.  */
771 
772 void
update_alignments(vec<rtx> & label_pairs)773 update_alignments (vec<rtx> &label_pairs)
774 {
775   unsigned int i = 0;
776   rtx iter, label = NULL_RTX;
777 
778   if (max_labelno != max_label_num ())
779     grow_label_align ();
780 
781   FOR_EACH_VEC_ELT (label_pairs, i, iter)
782     if (i & 1)
783       LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
784     else
785       label = iter;
786 }
787 
788 namespace {
789 
790 const pass_data pass_data_compute_alignments =
791 {
792   RTL_PASS, /* type */
793   "alignments", /* name */
794   OPTGROUP_NONE, /* optinfo_flags */
795   TV_NONE, /* tv_id */
796   0, /* properties_required */
797   0, /* properties_provided */
798   0, /* properties_destroyed */
799   0, /* todo_flags_start */
800   0, /* todo_flags_finish */
801 };
802 
803 class pass_compute_alignments : public rtl_opt_pass
804 {
805 public:
pass_compute_alignments(gcc::context * ctxt)806   pass_compute_alignments (gcc::context *ctxt)
807     : rtl_opt_pass (pass_data_compute_alignments, ctxt)
808   {}
809 
810   /* opt_pass methods: */
execute(function *)811   virtual unsigned int execute (function *) { return compute_alignments (); }
812 
813 }; // class pass_compute_alignments
814 
815 } // anon namespace
816 
817 rtl_opt_pass *
make_pass_compute_alignments(gcc::context * ctxt)818 make_pass_compute_alignments (gcc::context *ctxt)
819 {
820   return new pass_compute_alignments (ctxt);
821 }
822 
823 
824 /* Make a pass over all insns and compute their actual lengths by shortening
825    any branches of variable length if possible.  */
826 
827 /* shorten_branches might be called multiple times:  for example, the SH
828    port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
829    In order to do this, it needs proper length information, which it obtains
830    by calling shorten_branches.  This cannot be collapsed with
831    shorten_branches itself into a single pass unless we also want to integrate
832    reorg.cc, since the branch splitting exposes new instructions with delay
833    slots.  */
834 
835 void
shorten_branches(rtx_insn * first)836 shorten_branches (rtx_insn *first)
837 {
838   rtx_insn *insn;
839   int max_uid;
840   int i;
841   rtx_insn *seq;
842   int something_changed = 1;
843   char *varying_length;
844   rtx body;
845   int uid;
846   rtx align_tab[MAX_CODE_ALIGN + 1];
847 
848   /* Compute maximum UID and allocate label_align / uid_shuid.  */
849   max_uid = get_max_uid ();
850 
851   /* Free uid_shuid before reallocating it.  */
852   free (uid_shuid);
853 
854   uid_shuid = XNEWVEC (int, max_uid);
855 
856   if (max_labelno != max_label_num ())
857     grow_label_align ();
858 
859   /* Initialize label_align and set up uid_shuid to be strictly
860      monotonically rising with insn order.  */
861   /* We use alignment here to keep track of the maximum alignment we want to
862      impose on the next CODE_LABEL (or the current one if we are processing
863      the CODE_LABEL itself).  */
864 
865   align_flags max_alignment;
866 
867   for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
868     {
869       INSN_SHUID (insn) = i++;
870       if (INSN_P (insn))
871 	continue;
872 
873       if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
874 	{
875 	  /* Merge in alignments computed by compute_alignments.  */
876 	  align_flags alignment = LABEL_TO_ALIGNMENT (label);
877 	  max_alignment = align_flags::max (max_alignment, alignment);
878 
879 	  rtx_jump_table_data *table = jump_table_for_label (label);
880 	  if (!table)
881 	    {
882 	      align_flags alignment = LABEL_ALIGN (label);
883 	      max_alignment = align_flags::max (max_alignment, alignment);
884 	    }
885 	  /* ADDR_VECs only take room if read-only data goes into the text
886 	     section.  */
887 	  if ((JUMP_TABLES_IN_TEXT_SECTION
888 	       || readonly_data_section == text_section)
889 	      && table)
890 	    {
891 	      align_flags alignment = align_flags (ADDR_VEC_ALIGN (table));
892 	      max_alignment = align_flags::max (max_alignment, alignment);
893 	    }
894 	  LABEL_TO_ALIGNMENT (label) = max_alignment;
895 	  max_alignment = align_flags ();
896 	}
897       else if (BARRIER_P (insn))
898 	{
899 	  rtx_insn *label;
900 
901 	  for (label = insn; label && ! INSN_P (label);
902 	       label = NEXT_INSN (label))
903 	    if (LABEL_P (label))
904 	      {
905 		align_flags alignment
906 		  = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn));
907 		max_alignment = align_flags::max (max_alignment, alignment);
908 		break;
909 	      }
910 	}
911     }
912   if (!HAVE_ATTR_length)
913     return;
914 
915   /* Allocate the rest of the arrays.  */
916   insn_lengths = XNEWVEC (int, max_uid);
917   insn_lengths_max_uid = max_uid;
918   /* Syntax errors can lead to labels being outside of the main insn stream.
919      Initialize insn_addresses, so that we get reproducible results.  */
920   INSN_ADDRESSES_ALLOC (max_uid);
921 
922   varying_length = XCNEWVEC (char, max_uid);
923 
924   /* Initialize uid_align.  We scan instructions
925      from end to start, and keep in align_tab[n] the last seen insn
926      that does an alignment of at least n+1, i.e. the successor
927      in the alignment chain for an insn that does / has a known
928      alignment of n.  */
929   uid_align = XCNEWVEC (rtx, max_uid);
930 
931   for (i = MAX_CODE_ALIGN + 1; --i >= 0;)
932     align_tab[i] = NULL_RTX;
933   seq = get_last_insn ();
934   for (; seq; seq = PREV_INSN (seq))
935     {
936       int uid = INSN_UID (seq);
937       int log;
938       log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq).levels[0].log : 0);
939       uid_align[uid] = align_tab[0];
940       if (log)
941 	{
942 	  /* Found an alignment label.  */
943 	  gcc_checking_assert (log < MAX_CODE_ALIGN + 1);
944 	  uid_align[uid] = align_tab[log];
945 	  for (i = log - 1; i >= 0; i--)
946 	    align_tab[i] = seq;
947 	}
948     }
949 
950   /* When optimizing, we start assuming minimum length, and keep increasing
951      lengths as we find the need for this, till nothing changes.
952      When not optimizing, we start assuming maximum lengths, and
953      do a single pass to update the lengths.  */
954   bool increasing = optimize != 0;
955 
956 #ifdef CASE_VECTOR_SHORTEN_MODE
957   if (optimize)
958     {
959       /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
960          label fields.  */
961 
962       int min_shuid = INSN_SHUID (get_insns ()) - 1;
963       int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
964       int rel;
965 
966       for (insn = first; insn != 0; insn = NEXT_INSN (insn))
967 	{
968 	  rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
969 	  int len, i, min, max, insn_shuid;
970 	  int min_align;
971 	  addr_diff_vec_flags flags;
972 
973 	  if (! JUMP_TABLE_DATA_P (insn)
974 	      || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
975 	    continue;
976 	  pat = PATTERN (insn);
977 	  len = XVECLEN (pat, 1);
978 	  gcc_assert (len > 0);
979 	  min_align = MAX_CODE_ALIGN;
980 	  for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
981 	    {
982 	      rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
983 	      int shuid = INSN_SHUID (lab);
984 	      if (shuid < min)
985 		{
986 		  min = shuid;
987 		  min_lab = lab;
988 		}
989 	      if (shuid > max)
990 		{
991 		  max = shuid;
992 		  max_lab = lab;
993 		}
994 
995 	      int label_alignment = LABEL_TO_ALIGNMENT (lab).levels[0].log;
996 	      if (min_align > label_alignment)
997 		min_align = label_alignment;
998 	    }
999 	  XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1000 	  XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1001 	  insn_shuid = INSN_SHUID (insn);
1002 	  rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1003 	  memset (&flags, 0, sizeof (flags));
1004 	  flags.min_align = min_align;
1005 	  flags.base_after_vec = rel > insn_shuid;
1006 	  flags.min_after_vec  = min > insn_shuid;
1007 	  flags.max_after_vec  = max > insn_shuid;
1008 	  flags.min_after_base = min > rel;
1009 	  flags.max_after_base = max > rel;
1010 	  ADDR_DIFF_VEC_FLAGS (pat) = flags;
1011 
1012 	  if (increasing)
1013 	    PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1014 	}
1015     }
1016 #endif /* CASE_VECTOR_SHORTEN_MODE */
1017 
1018   /* Compute initial lengths, addresses, and varying flags for each insn.  */
1019   int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1020 
1021   for (insn_current_address = 0, insn = first;
1022        insn != 0;
1023        insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1024     {
1025       uid = INSN_UID (insn);
1026 
1027       insn_lengths[uid] = 0;
1028 
1029       if (LABEL_P (insn))
1030 	{
1031 	  int log = LABEL_TO_ALIGNMENT (insn).levels[0].log;
1032 	  if (log)
1033 	    {
1034 	      int align = 1 << log;
1035 	      int new_address = (insn_current_address + align - 1) & -align;
1036 	      insn_lengths[uid] = new_address - insn_current_address;
1037 	    }
1038 	}
1039 
1040       INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1041 
1042       if (NOTE_P (insn) || BARRIER_P (insn)
1043 	  || LABEL_P (insn) || DEBUG_INSN_P (insn))
1044 	continue;
1045       if (insn->deleted ())
1046 	continue;
1047 
1048       body = PATTERN (insn);
1049       if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1050 	{
1051 	  /* This only takes room if read-only data goes into the text
1052 	     section.  */
1053 	  if (JUMP_TABLES_IN_TEXT_SECTION
1054 	      || readonly_data_section == text_section)
1055 	    insn_lengths[uid] = (XVECLEN (body,
1056 					  GET_CODE (body) == ADDR_DIFF_VEC)
1057 				 * GET_MODE_SIZE (table->get_data_mode ()));
1058 	  /* Alignment is handled by ADDR_VEC_ALIGN.  */
1059 	}
1060       else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1061 	insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1062       else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1063 	{
1064 	  int i;
1065 	  int const_delay_slots;
1066 	  if (DELAY_SLOTS)
1067 	    const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1068 	  else
1069 	    const_delay_slots = 0;
1070 
1071 	  int (*inner_length_fun) (rtx_insn *)
1072 	    = const_delay_slots ? length_fun : insn_default_length;
1073 	  /* Inside a delay slot sequence, we do not do any branch shortening
1074 	     if the shortening could change the number of delay slots
1075 	     of the branch.  */
1076 	  for (i = 0; i < body_seq->len (); i++)
1077 	    {
1078 	      rtx_insn *inner_insn = body_seq->insn (i);
1079 	      int inner_uid = INSN_UID (inner_insn);
1080 	      int inner_length;
1081 
1082 	      if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1083 		  || asm_noperands (PATTERN (inner_insn)) >= 0)
1084 		inner_length = (asm_insn_count (PATTERN (inner_insn))
1085 				* insn_default_length (inner_insn));
1086 	      else
1087 		inner_length = inner_length_fun (inner_insn);
1088 
1089 	      insn_lengths[inner_uid] = inner_length;
1090 	      if (const_delay_slots)
1091 		{
1092 		  if ((varying_length[inner_uid]
1093 		       = insn_variable_length_p (inner_insn)) != 0)
1094 		    varying_length[uid] = 1;
1095 		  INSN_ADDRESSES (inner_uid) = (insn_current_address
1096 						+ insn_lengths[uid]);
1097 		}
1098 	      else
1099 		varying_length[inner_uid] = 0;
1100 	      insn_lengths[uid] += inner_length;
1101 	    }
1102 	}
1103       else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1104 	{
1105 	  insn_lengths[uid] = length_fun (insn);
1106 	  varying_length[uid] = insn_variable_length_p (insn);
1107 	}
1108 
1109       /* If needed, do any adjustment.  */
1110 #ifdef ADJUST_INSN_LENGTH
1111       ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1112       if (insn_lengths[uid] < 0)
1113 	fatal_insn ("negative insn length", insn);
1114 #endif
1115     }
1116 
1117   /* Now loop over all the insns finding varying length insns.  For each,
1118      get the current insn length.  If it has changed, reflect the change.
1119      When nothing changes for a full pass, we are done.  */
1120 
1121   while (something_changed)
1122     {
1123       something_changed = 0;
1124       insn_current_align = MAX_CODE_ALIGN - 1;
1125       for (insn_current_address = 0, insn = first;
1126 	   insn != 0;
1127 	   insn = NEXT_INSN (insn))
1128 	{
1129 	  int new_length;
1130 #ifdef ADJUST_INSN_LENGTH
1131 	  int tmp_length;
1132 #endif
1133 	  int length_align;
1134 
1135 	  uid = INSN_UID (insn);
1136 
1137 	  if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
1138 	    {
1139 	      int log = LABEL_TO_ALIGNMENT (label).levels[0].log;
1140 
1141 #ifdef CASE_VECTOR_SHORTEN_MODE
1142 	      /* If the mode of a following jump table was changed, we
1143 		 may need to update the alignment of this label.  */
1144 
1145 	      if (JUMP_TABLES_IN_TEXT_SECTION
1146 		  || readonly_data_section == text_section)
1147 		{
1148 		  rtx_jump_table_data *table = jump_table_for_label (label);
1149 		  if (table)
1150 		    {
1151 		      int newlog = ADDR_VEC_ALIGN (table);
1152 		      if (newlog != log)
1153 			{
1154 			  log = newlog;
1155 			  LABEL_TO_ALIGNMENT (insn) = log;
1156 			  something_changed = 1;
1157 			}
1158 		    }
1159 		}
1160 #endif
1161 
1162 	      if (log > insn_current_align)
1163 		{
1164 		  int align = 1 << log;
1165 		  int new_address= (insn_current_address + align - 1) & -align;
1166 		  insn_lengths[uid] = new_address - insn_current_address;
1167 		  insn_current_align = log;
1168 		  insn_current_address = new_address;
1169 		}
1170 	      else
1171 		insn_lengths[uid] = 0;
1172 	      INSN_ADDRESSES (uid) = insn_current_address;
1173 	      continue;
1174 	    }
1175 
1176 	  length_align = INSN_LENGTH_ALIGNMENT (insn);
1177 	  if (length_align < insn_current_align)
1178 	    insn_current_align = length_align;
1179 
1180 	  insn_last_address = INSN_ADDRESSES (uid);
1181 	  INSN_ADDRESSES (uid) = insn_current_address;
1182 
1183 #ifdef CASE_VECTOR_SHORTEN_MODE
1184 	  if (optimize
1185 	      && JUMP_TABLE_DATA_P (insn)
1186 	      && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1187 	    {
1188 	      rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn);
1189 	      rtx body = PATTERN (insn);
1190 	      int old_length = insn_lengths[uid];
1191 	      rtx_insn *rel_lab =
1192 		safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1193 	      rtx min_lab = XEXP (XEXP (body, 2), 0);
1194 	      rtx max_lab = XEXP (XEXP (body, 3), 0);
1195 	      int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1196 	      int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1197 	      int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1198 	      rtx_insn *prev;
1199 	      int rel_align = 0;
1200 	      addr_diff_vec_flags flags;
1201 	      scalar_int_mode vec_mode;
1202 
1203 	      /* Avoid automatic aggregate initialization.  */
1204 	      flags = ADDR_DIFF_VEC_FLAGS (body);
1205 
1206 	      /* Try to find a known alignment for rel_lab.  */
1207 	      for (prev = rel_lab;
1208 		   prev
1209 		   && ! insn_lengths[INSN_UID (prev)]
1210 		   && ! (varying_length[INSN_UID (prev)] & 1);
1211 		   prev = PREV_INSN (prev))
1212 		if (varying_length[INSN_UID (prev)] & 2)
1213 		  {
1214 		    rel_align = LABEL_TO_ALIGNMENT (prev).levels[0].log;
1215 		    break;
1216 		  }
1217 
1218 	      /* See the comment on addr_diff_vec_flags in rtl.h for the
1219 		 meaning of the flags values.  base: REL_LAB   vec: INSN  */
1220 	      /* Anything after INSN has still addresses from the last
1221 		 pass; adjust these so that they reflect our current
1222 		 estimate for this pass.  */
1223 	      if (flags.base_after_vec)
1224 		rel_addr += insn_current_address - insn_last_address;
1225 	      if (flags.min_after_vec)
1226 		min_addr += insn_current_address - insn_last_address;
1227 	      if (flags.max_after_vec)
1228 		max_addr += insn_current_address - insn_last_address;
1229 	      /* We want to know the worst case, i.e. lowest possible value
1230 		 for the offset of MIN_LAB.  If MIN_LAB is after REL_LAB,
1231 		 its offset is positive, and we have to be wary of code shrink;
1232 		 otherwise, it is negative, and we have to be vary of code
1233 		 size increase.  */
1234 	      if (flags.min_after_base)
1235 		{
1236 		  /* If INSN is between REL_LAB and MIN_LAB, the size
1237 		     changes we are about to make can change the alignment
1238 		     within the observed offset, therefore we have to break
1239 		     it up into two parts that are independent.  */
1240 		  if (! flags.base_after_vec && flags.min_after_vec)
1241 		    {
1242 		      min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1243 		      min_addr -= align_fuzz (insn, min_lab, 0, 0);
1244 		    }
1245 		  else
1246 		    min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1247 		}
1248 	      else
1249 		{
1250 		  if (flags.base_after_vec && ! flags.min_after_vec)
1251 		    {
1252 		      min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1253 		      min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1254 		    }
1255 		  else
1256 		    min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1257 		}
1258 	      /* Likewise, determine the highest lowest possible value
1259 		 for the offset of MAX_LAB.  */
1260 	      if (flags.max_after_base)
1261 		{
1262 		  if (! flags.base_after_vec && flags.max_after_vec)
1263 		    {
1264 		      max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1265 		      max_addr += align_fuzz (insn, max_lab, 0, ~0);
1266 		    }
1267 		  else
1268 		    max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1269 		}
1270 	      else
1271 		{
1272 		  if (flags.base_after_vec && ! flags.max_after_vec)
1273 		    {
1274 		      max_addr += align_fuzz (max_lab, insn, 0, 0);
1275 		      max_addr += align_fuzz (insn, rel_lab, 0, 0);
1276 		    }
1277 		  else
1278 		    max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1279 		}
1280 	      vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1281 						   max_addr - rel_addr, body);
1282 	      if (!increasing
1283 		  || (GET_MODE_SIZE (vec_mode)
1284 		      >= GET_MODE_SIZE (table->get_data_mode ())))
1285 		PUT_MODE (body, vec_mode);
1286 	      if (JUMP_TABLES_IN_TEXT_SECTION
1287 		  || readonly_data_section == text_section)
1288 		{
1289 		  insn_lengths[uid]
1290 		    = (XVECLEN (body, 1)
1291 		       * GET_MODE_SIZE (table->get_data_mode ()));
1292 		  insn_current_address += insn_lengths[uid];
1293 		  if (insn_lengths[uid] != old_length)
1294 		    something_changed = 1;
1295 		}
1296 
1297 	      continue;
1298 	    }
1299 #endif /* CASE_VECTOR_SHORTEN_MODE */
1300 
1301 	  if (! (varying_length[uid]))
1302 	    {
1303 	      if (NONJUMP_INSN_P (insn)
1304 		  && GET_CODE (PATTERN (insn)) == SEQUENCE)
1305 		{
1306 		  int i;
1307 
1308 		  body = PATTERN (insn);
1309 		  for (i = 0; i < XVECLEN (body, 0); i++)
1310 		    {
1311 		      rtx inner_insn = XVECEXP (body, 0, i);
1312 		      int inner_uid = INSN_UID (inner_insn);
1313 
1314 		      INSN_ADDRESSES (inner_uid) = insn_current_address;
1315 
1316 		      insn_current_address += insn_lengths[inner_uid];
1317 		    }
1318 		}
1319 	      else
1320 		insn_current_address += insn_lengths[uid];
1321 
1322 	      continue;
1323 	    }
1324 
1325 	  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1326 	    {
1327 	      rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1328 	      int i;
1329 
1330 	      body = PATTERN (insn);
1331 	      new_length = 0;
1332 	      for (i = 0; i < seqn->len (); i++)
1333 		{
1334 		  rtx_insn *inner_insn = seqn->insn (i);
1335 		  int inner_uid = INSN_UID (inner_insn);
1336 		  int inner_length;
1337 
1338 		  INSN_ADDRESSES (inner_uid) = insn_current_address;
1339 
1340 		  /* insn_current_length returns 0 for insns with a
1341 		     non-varying length.  */
1342 		  if (! varying_length[inner_uid])
1343 		    inner_length = insn_lengths[inner_uid];
1344 		  else
1345 		    inner_length = insn_current_length (inner_insn);
1346 
1347 		  if (inner_length != insn_lengths[inner_uid])
1348 		    {
1349 		      if (!increasing || inner_length > insn_lengths[inner_uid])
1350 			{
1351 			  insn_lengths[inner_uid] = inner_length;
1352 			  something_changed = 1;
1353 			}
1354 		      else
1355 			inner_length = insn_lengths[inner_uid];
1356 		    }
1357 		  insn_current_address += inner_length;
1358 		  new_length += inner_length;
1359 		}
1360 	    }
1361 	  else
1362 	    {
1363 	      new_length = insn_current_length (insn);
1364 	      insn_current_address += new_length;
1365 	    }
1366 
1367 #ifdef ADJUST_INSN_LENGTH
1368 	  /* If needed, do any adjustment.  */
1369 	  tmp_length = new_length;
1370 	  ADJUST_INSN_LENGTH (insn, new_length);
1371 	  insn_current_address += (new_length - tmp_length);
1372 #endif
1373 
1374 	  if (new_length != insn_lengths[uid]
1375 	      && (!increasing || new_length > insn_lengths[uid]))
1376 	    {
1377 	      insn_lengths[uid] = new_length;
1378 	      something_changed = 1;
1379 	    }
1380 	  else
1381 	    insn_current_address += insn_lengths[uid] - new_length;
1382 	}
1383       /* For a non-optimizing compile, do only a single pass.  */
1384       if (!increasing)
1385 	break;
1386     }
1387   crtl->max_insn_address = insn_current_address;
1388   free (varying_length);
1389 }
1390 
1391 /* Given the body of an INSN known to be generated by an ASM statement, return
1392    the number of machine instructions likely to be generated for this insn.
1393    This is used to compute its length.  */
1394 
1395 static int
asm_insn_count(rtx body)1396 asm_insn_count (rtx body)
1397 {
1398   const char *templ;
1399 
1400   if (GET_CODE (body) == ASM_INPUT)
1401     templ = XSTR (body, 0);
1402   else
1403     templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1404 
1405   return asm_str_count (templ);
1406 }
1407 
1408 /* Return the number of machine instructions likely to be generated for the
1409    inline-asm template. */
1410 int
asm_str_count(const char * templ)1411 asm_str_count (const char *templ)
1412 {
1413   int count = 1;
1414 
1415   if (!*templ)
1416     return 0;
1417 
1418   for (; *templ; templ++)
1419     if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1420 	|| *templ == '\n')
1421       count++;
1422 
1423   return count;
1424 }
1425 
1426 /* Return true if DWARF2 debug info can be emitted for DECL.  */
1427 
1428 static bool
dwarf2_debug_info_emitted_p(tree decl)1429 dwarf2_debug_info_emitted_p (tree decl)
1430 {
1431   /* When DWARF2 debug info is not generated internally.  */
1432   if (!dwarf_debuginfo_p () && !dwarf_based_debuginfo_p ())
1433     return false;
1434 
1435   if (DECL_IGNORED_P (decl))
1436     return false;
1437 
1438   return true;
1439 }
1440 
1441 /* Return scope resulting from combination of S1 and S2.  */
1442 static tree
choose_inner_scope(tree s1,tree s2)1443 choose_inner_scope (tree s1, tree s2)
1444 {
1445    if (!s1)
1446      return s2;
1447    if (!s2)
1448      return s1;
1449    if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1450      return s1;
1451    return s2;
1452 }
1453 
1454 /* Emit lexical block notes needed to change scope from S1 to S2.  */
1455 
1456 static void
change_scope(rtx_insn * orig_insn,tree s1,tree s2)1457 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1458 {
1459   rtx_insn *insn = orig_insn;
1460   tree com = NULL_TREE;
1461   tree ts1 = s1, ts2 = s2;
1462   tree s;
1463 
1464   while (ts1 != ts2)
1465     {
1466       gcc_assert (ts1 && ts2);
1467       if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1468 	ts1 = BLOCK_SUPERCONTEXT (ts1);
1469       else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1470 	ts2 = BLOCK_SUPERCONTEXT (ts2);
1471       else
1472 	{
1473 	  ts1 = BLOCK_SUPERCONTEXT (ts1);
1474 	  ts2 = BLOCK_SUPERCONTEXT (ts2);
1475 	}
1476     }
1477   com = ts1;
1478 
1479   /* Close scopes.  */
1480   s = s1;
1481   while (s != com)
1482     {
1483       rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1484       NOTE_BLOCK (note) = s;
1485       s = BLOCK_SUPERCONTEXT (s);
1486     }
1487 
1488   /* Open scopes.  */
1489   s = s2;
1490   while (s != com)
1491     {
1492       insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1493       NOTE_BLOCK (insn) = s;
1494       s = BLOCK_SUPERCONTEXT (s);
1495     }
1496 }
1497 
1498 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1499    on the scope tree and the newly reordered instructions.  */
1500 
1501 static void
reemit_insn_block_notes(void)1502 reemit_insn_block_notes (void)
1503 {
1504   tree cur_block = DECL_INITIAL (cfun->decl);
1505   rtx_insn *insn;
1506 
1507   insn = get_insns ();
1508   for (; insn; insn = NEXT_INSN (insn))
1509     {
1510       tree this_block;
1511 
1512       /* Prevent lexical blocks from straddling section boundaries.  */
1513       if (NOTE_P (insn))
1514 	switch (NOTE_KIND (insn))
1515 	  {
1516 	  case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1517 	    {
1518 	      for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1519 		   s = BLOCK_SUPERCONTEXT (s))
1520 		{
1521 		  rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1522 		  NOTE_BLOCK (note) = s;
1523 		  note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1524 		  NOTE_BLOCK (note) = s;
1525 		}
1526 	    }
1527 	    break;
1528 
1529 	  case NOTE_INSN_BEGIN_STMT:
1530 	  case NOTE_INSN_INLINE_ENTRY:
1531 	    this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn));
1532 	    goto set_cur_block_to_this_block;
1533 
1534 	  default:
1535 	    continue;
1536 	}
1537 
1538       if (!active_insn_p (insn))
1539         continue;
1540 
1541       /* Avoid putting scope notes between jump table and its label.  */
1542       if (JUMP_TABLE_DATA_P (insn))
1543 	continue;
1544 
1545       this_block = insn_scope (insn);
1546       /* For sequences compute scope resulting from merging all scopes
1547 	 of instructions nested inside.  */
1548       if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1549 	{
1550 	  int i;
1551 
1552 	  this_block = NULL;
1553 	  for (i = 0; i < body->len (); i++)
1554 	    this_block = choose_inner_scope (this_block,
1555 					     insn_scope (body->insn (i)));
1556 	}
1557     set_cur_block_to_this_block:
1558       if (! this_block)
1559 	{
1560 	  if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1561 	    continue;
1562 	  else
1563 	    this_block = DECL_INITIAL (cfun->decl);
1564 	}
1565 
1566       if (this_block != cur_block)
1567 	{
1568 	  change_scope (insn, cur_block, this_block);
1569 	  cur_block = this_block;
1570 	}
1571     }
1572 
1573   /* change_scope emits before the insn, not after.  */
1574   rtx_note *note = emit_note (NOTE_INSN_DELETED);
1575   change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1576   delete_insn (note);
1577 
1578   reorder_blocks ();
1579 }
1580 
1581 static const char *some_local_dynamic_name;
1582 
1583 /* Locate some local-dynamic symbol still in use by this function
1584    so that we can print its name in local-dynamic base patterns.
1585    Return null if there are no local-dynamic references.  */
1586 
1587 const char *
get_some_local_dynamic_name()1588 get_some_local_dynamic_name ()
1589 {
1590   subrtx_iterator::array_type array;
1591   rtx_insn *insn;
1592 
1593   if (some_local_dynamic_name)
1594     return some_local_dynamic_name;
1595 
1596   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1597     if (NONDEBUG_INSN_P (insn))
1598       FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1599 	{
1600 	  const_rtx x = *iter;
1601 	  if (GET_CODE (x) == SYMBOL_REF)
1602 	    {
1603 	      if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1604 		return some_local_dynamic_name = XSTR (x, 0);
1605 	      if (CONSTANT_POOL_ADDRESS_P (x))
1606 		iter.substitute (get_pool_constant (x));
1607 	    }
1608 	}
1609 
1610   return 0;
1611 }
1612 
1613 /* Arrange for us to emit a source location note before any further
1614    real insns or section changes, by setting the SEEN_NEXT_VIEW bit in
1615    *SEEN, as long as we are keeping track of location views.  The bit
1616    indicates we have referenced the next view at the current PC, so we
1617    have to emit it.  This should be called next to the var_location
1618    debug hook.  */
1619 
1620 static inline void
set_next_view_needed(int * seen)1621 set_next_view_needed (int *seen)
1622 {
1623   if (debug_variable_location_views)
1624     *seen |= SEEN_NEXT_VIEW;
1625 }
1626 
1627 /* Clear the flag in *SEEN indicating we need to emit the next view.
1628    This should be called next to the source_line debug hook.  */
1629 
1630 static inline void
clear_next_view_needed(int * seen)1631 clear_next_view_needed (int *seen)
1632 {
1633   *seen &= ~SEEN_NEXT_VIEW;
1634 }
1635 
1636 /* Test whether we have a pending request to emit the next view in
1637    *SEEN, and emit it if needed, clearing the request bit.  */
1638 
1639 static inline void
maybe_output_next_view(int * seen)1640 maybe_output_next_view (int *seen)
1641 {
1642   if ((*seen & SEEN_NEXT_VIEW) != 0)
1643     {
1644       clear_next_view_needed (seen);
1645       (*debug_hooks->source_line) (last_linenum, last_columnnum,
1646 				   last_filename, last_discriminator,
1647 				   false);
1648     }
1649 }
1650 
1651 /* We want to emit param bindings (before the first begin_stmt) in the
1652    initial view, if we are emitting views.  To that end, we may
1653    consume initial notes in the function, processing them in
1654    final_start_function, before signaling the beginning of the
1655    prologue, rather than in final.
1656 
1657    We don't test whether the DECLs are PARM_DECLs: the assumption is
1658    that there will be a NOTE_INSN_BEGIN_STMT marker before any
1659    non-parameter NOTE_INSN_VAR_LOCATION.  It's ok if the marker is not
1660    there, we'll just have more variable locations bound in the initial
1661    view, which is consistent with their being bound without any code
1662    that would give them a value.  */
1663 
1664 static inline bool
in_initial_view_p(rtx_insn * insn)1665 in_initial_view_p (rtx_insn *insn)
1666 {
1667   return (!DECL_IGNORED_P (current_function_decl)
1668 	  && debug_variable_location_views
1669 	  && insn && GET_CODE (insn) == NOTE
1670 	  && (NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
1671 	      || NOTE_KIND (insn) == NOTE_INSN_DELETED));
1672 }
1673 
1674 /* Output assembler code for the start of a function,
1675    and initialize some of the variables in this file
1676    for the new function.  The label for the function and associated
1677    assembler pseudo-ops have already been output in `assemble_start_function'.
1678 
1679    FIRST is the first insn of the rtl for the function being compiled.
1680    FILE is the file to write assembler code to.
1681    SEEN should be initially set to zero, and it may be updated to
1682    indicate we have references to the next location view, that would
1683    require us to emit it at the current PC.
1684    OPTIMIZE_P is nonzero if we should eliminate redundant
1685      test and compare insns.  */
1686 
1687 static void
final_start_function_1(rtx_insn ** firstp,FILE * file,int * seen,int optimize_p ATTRIBUTE_UNUSED)1688 final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen,
1689 			int optimize_p ATTRIBUTE_UNUSED)
1690 {
1691   block_depth = 0;
1692 
1693   this_is_asm_operands = 0;
1694 
1695   need_profile_function = false;
1696 
1697   last_filename = LOCATION_FILE (prologue_location);
1698   last_linenum = LOCATION_LINE (prologue_location);
1699   last_columnnum = LOCATION_COLUMN (prologue_location);
1700   last_discriminator = discriminator = 0;
1701   last_bb_discriminator = bb_discriminator = 0;
1702   force_source_line = false;
1703 
1704   high_block_linenum = high_function_linenum = last_linenum;
1705 
1706   if (flag_sanitize & SANITIZE_ADDRESS)
1707     asan_function_start ();
1708 
1709   rtx_insn *first = *firstp;
1710   if (in_initial_view_p (first))
1711     {
1712       do
1713 	{
1714 	  final_scan_insn (first, file, 0, 0, seen);
1715 	  first = NEXT_INSN (first);
1716 	}
1717       while (in_initial_view_p (first));
1718       *firstp = first;
1719     }
1720 
1721   if (!DECL_IGNORED_P (current_function_decl))
1722     debug_hooks->begin_prologue (last_linenum, last_columnnum,
1723 				 last_filename);
1724 
1725   if (!dwarf2_debug_info_emitted_p (current_function_decl))
1726     dwarf2out_begin_prologue (0, 0, NULL);
1727 
1728   if (DECL_IGNORED_P (current_function_decl) && last_linenum && last_filename)
1729     debug_hooks->set_ignored_loc (last_linenum, last_columnnum, last_filename);
1730 
1731 #ifdef LEAF_REG_REMAP
1732   if (crtl->uses_only_leaf_regs)
1733     leaf_renumber_regs (first);
1734 #endif
1735 
1736   /* The Sun386i and perhaps other machines don't work right
1737      if the profiling code comes after the prologue.  */
1738   if (targetm.profile_before_prologue () && crtl->profile)
1739     {
1740       if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1741 	  && targetm.have_prologue ())
1742 	{
1743 	  rtx_insn *insn;
1744 	  for (insn = first; insn; insn = NEXT_INSN (insn))
1745 	    if (!NOTE_P (insn))
1746 	      {
1747 		insn = NULL;
1748 		break;
1749 	      }
1750 	    else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1751 		     || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1752 	      break;
1753 	    else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1754 		     || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1755 	      continue;
1756 	    else
1757 	      {
1758 		insn = NULL;
1759 		break;
1760 	      }
1761 
1762 	  if (insn)
1763 	    need_profile_function = true;
1764 	  else
1765 	    profile_function (file);
1766 	}
1767       else
1768 	profile_function (file);
1769     }
1770 
1771   /* If debugging, assign block numbers to all of the blocks in this
1772      function.  */
1773   if (write_symbols)
1774     {
1775       reemit_insn_block_notes ();
1776       number_blocks (current_function_decl);
1777       /* We never actually put out begin/end notes for the top-level
1778 	 block in the function.  But, conceptually, that block is
1779 	 always needed.  */
1780       TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1781     }
1782 
1783   unsigned HOST_WIDE_INT min_frame_size
1784     = constant_lower_bound (get_frame_size ());
1785   if (min_frame_size > (unsigned HOST_WIDE_INT) warn_frame_larger_than_size)
1786     {
1787       /* Issue a warning */
1788       warning (OPT_Wframe_larger_than_,
1789 	       "the frame size of %wu bytes is larger than %wu bytes",
1790 	       min_frame_size, warn_frame_larger_than_size);
1791     }
1792 
1793   /* First output the function prologue: code to set up the stack frame.  */
1794   targetm.asm_out.function_prologue (file);
1795 
1796   /* If the machine represents the prologue as RTL, the profiling code must
1797      be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
1798   if (! targetm.have_prologue ())
1799     profile_after_prologue (file);
1800 }
1801 
1802 /* This is an exported final_start_function_1, callable without SEEN.  */
1803 
1804 void
final_start_function(rtx_insn * first,FILE * file,int optimize_p ATTRIBUTE_UNUSED)1805 final_start_function (rtx_insn *first, FILE *file,
1806 		      int optimize_p ATTRIBUTE_UNUSED)
1807 {
1808   int seen = 0;
1809   final_start_function_1 (&first, file, &seen, optimize_p);
1810   gcc_assert (seen == 0);
1811 }
1812 
1813 static void
profile_after_prologue(FILE * file ATTRIBUTE_UNUSED)1814 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1815 {
1816   if (!targetm.profile_before_prologue () && crtl->profile)
1817     profile_function (file);
1818 }
1819 
1820 static void
profile_function(FILE * file ATTRIBUTE_UNUSED)1821 profile_function (FILE *file ATTRIBUTE_UNUSED)
1822 {
1823 #ifndef NO_PROFILE_COUNTERS
1824 # define NO_PROFILE_COUNTERS	0
1825 #endif
1826 #ifdef ASM_OUTPUT_REG_PUSH
1827   rtx sval = NULL, chain = NULL;
1828 
1829   if (cfun->returns_struct)
1830     sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1831 					   true);
1832   if (cfun->static_chain_decl)
1833     chain = targetm.calls.static_chain (current_function_decl, true);
1834 #endif /* ASM_OUTPUT_REG_PUSH */
1835 
1836   if (! NO_PROFILE_COUNTERS)
1837     {
1838       int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1839       switch_to_section (data_section);
1840       ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1841       targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1842       assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1843     }
1844 
1845   switch_to_section (current_function_section ());
1846 
1847 #ifdef ASM_OUTPUT_REG_PUSH
1848   if (sval && REG_P (sval))
1849     ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1850   if (chain && REG_P (chain))
1851     ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1852 #endif
1853 
1854   FUNCTION_PROFILER (file, current_function_funcdef_no);
1855 
1856 #ifdef ASM_OUTPUT_REG_PUSH
1857   if (chain && REG_P (chain))
1858     ASM_OUTPUT_REG_POP (file, REGNO (chain));
1859   if (sval && REG_P (sval))
1860     ASM_OUTPUT_REG_POP (file, REGNO (sval));
1861 #endif
1862 }
1863 
1864 /* Output assembler code for the end of a function.
1865    For clarity, args are same as those of `final_start_function'
1866    even though not all of them are needed.  */
1867 
1868 void
final_end_function(void)1869 final_end_function (void)
1870 {
1871   app_disable ();
1872 
1873   if (!DECL_IGNORED_P (current_function_decl))
1874     debug_hooks->end_function (high_function_linenum);
1875 
1876   /* Finally, output the function epilogue:
1877      code to restore the stack frame and return to the caller.  */
1878   targetm.asm_out.function_epilogue (asm_out_file);
1879 
1880   /* And debug output.  */
1881   if (!DECL_IGNORED_P (current_function_decl))
1882     debug_hooks->end_epilogue (last_linenum, last_filename);
1883 
1884   if (!dwarf2_debug_info_emitted_p (current_function_decl)
1885       && dwarf2out_do_frame ())
1886     dwarf2out_end_epilogue (last_linenum, last_filename);
1887 
1888   some_local_dynamic_name = 0;
1889 }
1890 
1891 
1892 /* Dumper helper for basic block information. FILE is the assembly
1893    output file, and INSN is the instruction being emitted.  */
1894 
1895 static void
dump_basic_block_info(FILE * file,rtx_insn * insn,basic_block * start_to_bb,basic_block * end_to_bb,int bb_map_size,int * bb_seqn)1896 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1897                        basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1898 {
1899   basic_block bb;
1900 
1901   if (!flag_debug_asm)
1902     return;
1903 
1904   if (INSN_UID (insn) < bb_map_size
1905       && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1906     {
1907       edge e;
1908       edge_iterator ei;
1909 
1910       fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1911       if (bb->count.initialized_p ())
1912 	{
1913           fprintf (file, ", count:");
1914 	  bb->count.dump (file);
1915 	}
1916       fprintf (file, " seq:%d", (*bb_seqn)++);
1917       fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1918       FOR_EACH_EDGE (e, ei, bb->preds)
1919         {
1920           dump_edge_info (file, e, TDF_DETAILS, 0);
1921         }
1922       fprintf (file, "\n");
1923     }
1924   if (INSN_UID (insn) < bb_map_size
1925       && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1926     {
1927       edge e;
1928       edge_iterator ei;
1929 
1930       fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1931       FOR_EACH_EDGE (e, ei, bb->succs)
1932        {
1933          dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1934        }
1935       fprintf (file, "\n");
1936     }
1937 }
1938 
1939 /* Output assembler code for some insns: all or part of a function.
1940    For description of args, see `final_start_function', above.  */
1941 
1942 static void
final_1(rtx_insn * first,FILE * file,int seen,int optimize_p)1943 final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p)
1944 {
1945   rtx_insn *insn, *next;
1946 
1947   /* Used for -dA dump.  */
1948   basic_block *start_to_bb = NULL;
1949   basic_block *end_to_bb = NULL;
1950   int bb_map_size = 0;
1951   int bb_seqn = 0;
1952 
1953   last_ignored_compare = 0;
1954 
1955   init_recog ();
1956 
1957   CC_STATUS_INIT;
1958 
1959   if (flag_debug_asm)
1960     {
1961       basic_block bb;
1962 
1963       bb_map_size = get_max_uid () + 1;
1964       start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1965       end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1966 
1967       /* There is no cfg for a thunk.  */
1968       if (!cfun->is_thunk)
1969 	FOR_EACH_BB_REVERSE_FN (bb, cfun)
1970 	  {
1971 	    start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1972 	    end_to_bb[INSN_UID (BB_END (bb))] = bb;
1973 	  }
1974     }
1975 
1976   /* Output the insns.  */
1977   for (insn = first; insn;)
1978     {
1979       if (HAVE_ATTR_length)
1980 	{
1981 	  if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1982 	    {
1983 	      /* This can be triggered by bugs elsewhere in the compiler if
1984 		 new insns are created after init_insn_lengths is called.  */
1985 	      gcc_assert (NOTE_P (insn));
1986 	      insn_current_address = -1;
1987 	    }
1988 	  else
1989 	    insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1990 	  /* final can be seen as an iteration of shorten_branches that
1991 	     does nothing (since a fixed point has already been reached).  */
1992 	  insn_last_address = insn_current_address;
1993 	}
1994 
1995       dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1996                              bb_map_size, &bb_seqn);
1997       insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1998     }
1999 
2000   maybe_output_next_view (&seen);
2001 
2002   if (flag_debug_asm)
2003     {
2004       free (start_to_bb);
2005       free (end_to_bb);
2006     }
2007 
2008   /* Remove CFI notes, to avoid compare-debug failures.  */
2009   for (insn = first; insn; insn = next)
2010     {
2011       next = NEXT_INSN (insn);
2012       if (NOTE_P (insn)
2013 	  && (NOTE_KIND (insn) == NOTE_INSN_CFI
2014 	      || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2015 	delete_insn (insn);
2016     }
2017 }
2018 
2019 /* This is an exported final_1, callable without SEEN.  */
2020 
2021 void
final(rtx_insn * first,FILE * file,int optimize_p)2022 final (rtx_insn *first, FILE *file, int optimize_p)
2023 {
2024   /* Those that use the internal final_start_function_1/final_1 API
2025      skip initial debug bind notes in final_start_function_1, and pass
2026      the modified FIRST to final_1.  But those that use the public
2027      final_start_function/final APIs, final_start_function can't move
2028      FIRST because it's not passed by reference, so if they were
2029      skipped there, skip them again here.  */
2030   while (in_initial_view_p (first))
2031     first = NEXT_INSN (first);
2032 
2033   final_1 (first, file, 0, optimize_p);
2034 }
2035 
2036 const char *
get_insn_template(int code,rtx_insn * insn)2037 get_insn_template (int code, rtx_insn *insn)
2038 {
2039   switch (insn_data[code].output_format)
2040     {
2041     case INSN_OUTPUT_FORMAT_SINGLE:
2042       return insn_data[code].output.single;
2043     case INSN_OUTPUT_FORMAT_MULTI:
2044       return insn_data[code].output.multi[which_alternative];
2045     case INSN_OUTPUT_FORMAT_FUNCTION:
2046       gcc_assert (insn);
2047       return (*insn_data[code].output.function) (recog_data.operand, insn);
2048 
2049     default:
2050       gcc_unreachable ();
2051     }
2052 }
2053 
2054 /* Emit the appropriate declaration for an alternate-entry-point
2055    symbol represented by INSN, to FILE.  INSN is a CODE_LABEL with
2056    LABEL_KIND != LABEL_NORMAL.
2057 
2058    The case fall-through in this function is intentional.  */
2059 static void
output_alternate_entry_point(FILE * file,rtx_insn * insn)2060 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2061 {
2062   const char *name = LABEL_NAME (insn);
2063 
2064   switch (LABEL_KIND (insn))
2065     {
2066     case LABEL_WEAK_ENTRY:
2067 #ifdef ASM_WEAKEN_LABEL
2068       ASM_WEAKEN_LABEL (file, name);
2069       gcc_fallthrough ();
2070 #endif
2071     case LABEL_GLOBAL_ENTRY:
2072       targetm.asm_out.globalize_label (file, name);
2073       gcc_fallthrough ();
2074     case LABEL_STATIC_ENTRY:
2075 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2076       ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2077 #endif
2078       ASM_OUTPUT_LABEL (file, name);
2079       break;
2080 
2081     case LABEL_NORMAL:
2082     default:
2083       gcc_unreachable ();
2084     }
2085 }
2086 
2087 /* Given a CALL_INSN, find and return the nested CALL. */
2088 static rtx
call_from_call_insn(rtx_call_insn * insn)2089 call_from_call_insn (rtx_call_insn *insn)
2090 {
2091   rtx x;
2092   gcc_assert (CALL_P (insn));
2093   x = PATTERN (insn);
2094 
2095   while (GET_CODE (x) != CALL)
2096     {
2097       switch (GET_CODE (x))
2098 	{
2099 	default:
2100 	  gcc_unreachable ();
2101 	case COND_EXEC:
2102 	  x = COND_EXEC_CODE (x);
2103 	  break;
2104 	case PARALLEL:
2105 	  x = XVECEXP (x, 0, 0);
2106 	  break;
2107 	case SET:
2108 	  x = XEXP (x, 1);
2109 	  break;
2110 	}
2111     }
2112   return x;
2113 }
2114 
2115 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2116    corresponding source line, if available.  */
2117 
2118 static void
asm_show_source(const char * filename,int linenum)2119 asm_show_source (const char *filename, int linenum)
2120 {
2121   if (!filename)
2122     return;
2123 
2124   char_span line = location_get_source_line (filename, linenum);
2125   if (!line)
2126     return;
2127 
2128   fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2129   /* "line" is not 0-terminated, so we must use its length.  */
2130   fwrite (line.get_buffer (), 1, line.length (), asm_out_file);
2131   fputc ('\n', asm_out_file);
2132 }
2133 
2134 /* Judge if an absolute jump table is relocatable.  */
2135 
2136 bool
jumptable_relocatable(void)2137 jumptable_relocatable (void)
2138 {
2139   bool relocatable = false;
2140 
2141   if (!CASE_VECTOR_PC_RELATIVE
2142       && !targetm.asm_out.generate_pic_addr_diff_vec ()
2143       && targetm_common.have_named_sections)
2144      relocatable = targetm.asm_out.reloc_rw_mask ();
2145 
2146   return relocatable;
2147 }
2148 
2149 /* The final scan for one insn, INSN.
2150    Args are same as in `final', except that INSN
2151    is the insn being scanned.
2152    Value returned is the next insn to be scanned.
2153 
2154    NOPEEPHOLES is the flag to disallow peephole processing (currently
2155    used for within delayed branch sequence output).
2156 
2157    SEEN is used to track the end of the prologue, for emitting
2158    debug information.  We force the emission of a line note after
2159    both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG.  */
2160 
2161 static rtx_insn *
final_scan_insn_1(rtx_insn * insn,FILE * file,int optimize_p ATTRIBUTE_UNUSED,int nopeepholes ATTRIBUTE_UNUSED,int * seen)2162 final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2163 		   int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2164 {
2165   rtx_insn *next;
2166   rtx_jump_table_data *table;
2167 
2168   insn_counter++;
2169 
2170   /* Ignore deleted insns.  These can occur when we split insns (due to a
2171      template of "#") while not optimizing.  */
2172   if (insn->deleted ())
2173     return NEXT_INSN (insn);
2174 
2175   switch (GET_CODE (insn))
2176     {
2177     case NOTE:
2178       switch (NOTE_KIND (insn))
2179 	{
2180 	case NOTE_INSN_DELETED:
2181 	case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2182 	  break;
2183 
2184 	case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2185 	  maybe_output_next_view (seen);
2186 
2187 	  output_function_exception_table (0);
2188 
2189 	  if (targetm.asm_out.unwind_emit)
2190 	    targetm.asm_out.unwind_emit (asm_out_file, insn);
2191 
2192 	  in_cold_section_p = !in_cold_section_p;
2193 
2194 	  gcc_checking_assert (in_cold_section_p);
2195 	  if (in_cold_section_p)
2196 	    cold_function_name
2197 	      = clone_function_name (current_function_decl, "cold");
2198 
2199 	  if (dwarf2out_do_frame ())
2200 	    {
2201 	      dwarf2out_switch_text_section ();
2202 	      if (!dwarf2_debug_info_emitted_p (current_function_decl)
2203 		  && !DECL_IGNORED_P (current_function_decl))
2204 		debug_hooks->switch_text_section ();
2205 	    }
2206 	  else if (!DECL_IGNORED_P (current_function_decl))
2207 	    debug_hooks->switch_text_section ();
2208 	  if (DECL_IGNORED_P (current_function_decl) && last_linenum
2209 	      && last_filename)
2210 	    debug_hooks->set_ignored_loc (last_linenum, last_columnnum,
2211 					  last_filename);
2212 
2213 	  switch_to_section (current_function_section ());
2214 	  targetm.asm_out.function_switched_text_sections (asm_out_file,
2215 							   current_function_decl,
2216 							   in_cold_section_p);
2217 	  /* Emit a label for the split cold section.  Form label name by
2218 	     suffixing "cold" to the original function's name.  */
2219 	  if (in_cold_section_p)
2220 	    {
2221 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2222 	      ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2223 					      IDENTIFIER_POINTER
2224 					          (cold_function_name),
2225 					      current_function_decl);
2226 #else
2227 	      ASM_OUTPUT_LABEL (asm_out_file,
2228 				IDENTIFIER_POINTER (cold_function_name));
2229 #endif
2230 	      if (dwarf2out_do_frame ()
2231 	          && cfun->fde->dw_fde_second_begin != NULL)
2232 		ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin);
2233 	    }
2234 	  break;
2235 
2236 	case NOTE_INSN_BASIC_BLOCK:
2237 	  if (need_profile_function)
2238 	    {
2239 	      profile_function (asm_out_file);
2240 	      need_profile_function = false;
2241 	    }
2242 
2243 	  if (targetm.asm_out.unwind_emit)
2244 	    targetm.asm_out.unwind_emit (asm_out_file, insn);
2245 
2246 	  bb_discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2247 	  break;
2248 
2249 	case NOTE_INSN_EH_REGION_BEG:
2250 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2251 				  NOTE_EH_HANDLER (insn));
2252 	  break;
2253 
2254 	case NOTE_INSN_EH_REGION_END:
2255 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2256 				  NOTE_EH_HANDLER (insn));
2257 	  break;
2258 
2259 	case NOTE_INSN_PROLOGUE_END:
2260 	  targetm.asm_out.function_end_prologue (file);
2261 	  profile_after_prologue (file);
2262 
2263 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2264 	    {
2265 	      *seen |= SEEN_EMITTED;
2266 	      force_source_line = true;
2267 	    }
2268 	  else
2269 	    *seen |= SEEN_NOTE;
2270 
2271 	  break;
2272 
2273 	case NOTE_INSN_EPILOGUE_BEG:
2274           if (!DECL_IGNORED_P (current_function_decl))
2275             (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2276 	  targetm.asm_out.function_begin_epilogue (file);
2277 	  break;
2278 
2279 	case NOTE_INSN_CFI:
2280 	  dwarf2out_emit_cfi (NOTE_CFI (insn));
2281 	  break;
2282 
2283 	case NOTE_INSN_CFI_LABEL:
2284 	  ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2285 				  NOTE_LABEL_NUMBER (insn));
2286 	  break;
2287 
2288 	case NOTE_INSN_FUNCTION_BEG:
2289 	  if (need_profile_function)
2290 	    {
2291 	      profile_function (asm_out_file);
2292 	      need_profile_function = false;
2293 	    }
2294 
2295 	  app_disable ();
2296 	  if (!DECL_IGNORED_P (current_function_decl))
2297 	    debug_hooks->end_prologue (last_linenum, last_filename);
2298 
2299 	  if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2300 	    {
2301 	      *seen |= SEEN_EMITTED;
2302 	      force_source_line = true;
2303 	    }
2304 	  else
2305 	    *seen |= SEEN_NOTE;
2306 
2307 	  break;
2308 
2309 	case NOTE_INSN_BLOCK_BEG:
2310 	  if (debug_info_level >= DINFO_LEVEL_NORMAL
2311 	      || dwarf_debuginfo_p ()
2312 	      || write_symbols == VMS_DEBUG)
2313 	    {
2314 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2315 
2316 	      app_disable ();
2317 	      ++block_depth;
2318 	      high_block_linenum = last_linenum;
2319 
2320 	      /* Output debugging info about the symbol-block beginning.  */
2321 	      if (!DECL_IGNORED_P (current_function_decl))
2322 		debug_hooks->begin_block (last_linenum, n);
2323 
2324 	      /* Mark this block as output.  */
2325 	      TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2326 	      BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2327 	    }
2328 	  if (write_symbols == DBX_DEBUG)
2329 	    {
2330 	      location_t *locus_ptr
2331 		= block_nonartificial_location (NOTE_BLOCK (insn));
2332 
2333 	      if (locus_ptr != NULL)
2334 		{
2335 		  override_filename = LOCATION_FILE (*locus_ptr);
2336 		  override_linenum = LOCATION_LINE (*locus_ptr);
2337 		  override_columnnum = LOCATION_COLUMN (*locus_ptr);
2338 		  override_discriminator = compute_discriminator (*locus_ptr);
2339 		}
2340 	    }
2341 	  break;
2342 
2343 	case NOTE_INSN_BLOCK_END:
2344 	  maybe_output_next_view (seen);
2345 
2346 	  if (debug_info_level >= DINFO_LEVEL_NORMAL
2347 	      || dwarf_debuginfo_p ()
2348 	      || write_symbols == VMS_DEBUG)
2349 	    {
2350 	      int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2351 
2352 	      app_disable ();
2353 
2354 	      /* End of a symbol-block.  */
2355 	      --block_depth;
2356 	      gcc_assert (block_depth >= 0);
2357 
2358 	      if (!DECL_IGNORED_P (current_function_decl))
2359 		debug_hooks->end_block (high_block_linenum, n);
2360 	      gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2361 			  == in_cold_section_p);
2362 	    }
2363 	  if (write_symbols == DBX_DEBUG)
2364 	    {
2365 	      tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2366 	      location_t *locus_ptr
2367 		= block_nonartificial_location (outer_block);
2368 
2369 	      if (locus_ptr != NULL)
2370 		{
2371 		  override_filename = LOCATION_FILE (*locus_ptr);
2372 		  override_linenum = LOCATION_LINE (*locus_ptr);
2373 		  override_columnnum = LOCATION_COLUMN (*locus_ptr);
2374 		  override_discriminator = compute_discriminator (*locus_ptr);
2375 		}
2376 	      else
2377 		{
2378 		  override_filename = NULL;
2379 		  override_linenum = 0;
2380 		  override_columnnum = 0;
2381 		  override_discriminator = 0;
2382 		}
2383 	    }
2384 	  break;
2385 
2386 	case NOTE_INSN_DELETED_LABEL:
2387 	  /* Emit the label.  We may have deleted the CODE_LABEL because
2388 	     the label could be proved to be unreachable, though still
2389 	     referenced (in the form of having its address taken.  */
2390 	  ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2391 	  break;
2392 
2393 	case NOTE_INSN_DELETED_DEBUG_LABEL:
2394 	  /* Similarly, but need to use different namespace for it.  */
2395 	  if (CODE_LABEL_NUMBER (insn) != -1)
2396 	    ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2397 	  break;
2398 
2399 	case NOTE_INSN_VAR_LOCATION:
2400 	  if (!DECL_IGNORED_P (current_function_decl))
2401 	    {
2402 	      debug_hooks->var_location (insn);
2403 	      set_next_view_needed (seen);
2404 	    }
2405 	  break;
2406 
2407 	case NOTE_INSN_BEGIN_STMT:
2408 	  gcc_checking_assert (cfun->debug_nonbind_markers);
2409 	  if (!DECL_IGNORED_P (current_function_decl)
2410 	      && notice_source_line (insn, NULL))
2411 	    {
2412 	    output_source_line:
2413 	      (*debug_hooks->source_line) (last_linenum, last_columnnum,
2414 					   last_filename, last_discriminator,
2415 					   true);
2416 	      clear_next_view_needed (seen);
2417 	    }
2418 	  break;
2419 
2420 	case NOTE_INSN_INLINE_ENTRY:
2421 	  gcc_checking_assert (cfun->debug_nonbind_markers);
2422 	  if (!DECL_IGNORED_P (current_function_decl)
2423 	      && notice_source_line (insn, NULL))
2424 	    {
2425 	      (*debug_hooks->inline_entry) (LOCATION_BLOCK
2426 					    (NOTE_MARKER_LOCATION (insn)));
2427 	      goto output_source_line;
2428 	    }
2429 	  break;
2430 
2431 	default:
2432 	  gcc_unreachable ();
2433 	  break;
2434 	}
2435       break;
2436 
2437     case BARRIER:
2438       break;
2439 
2440     case CODE_LABEL:
2441       /* The target port might emit labels in the output function for
2442 	 some insn, e.g. sh.cc output_branchy_insn.  */
2443       if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2444 	{
2445 	  align_flags alignment = LABEL_TO_ALIGNMENT (insn);
2446 	  if (alignment.levels[0].log && NEXT_INSN (insn))
2447 	    {
2448 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2449 	      /* Output both primary and secondary alignment.  */
2450 	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log,
2451 					 alignment.levels[0].maxskip);
2452 	      ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log,
2453 					 alignment.levels[1].maxskip);
2454 #else
2455 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2456               ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log);
2457 #else
2458 	      ASM_OUTPUT_ALIGN (file, alignment.levels[0].log);
2459 #endif
2460 #endif
2461 	    }
2462 	}
2463       CC_STATUS_INIT;
2464 
2465       if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2466 	debug_hooks->label (as_a <rtx_code_label *> (insn));
2467 
2468       app_disable ();
2469 
2470       /* If this label is followed by a jump-table, make sure we put
2471 	 the label in the read-only section.  Also possibly write the
2472 	 label and jump table together.  */
2473       table = jump_table_for_label (as_a <rtx_code_label *> (insn));
2474       if (table)
2475 	{
2476 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2477 	  /* In this case, the case vector is being moved by the
2478 	     target, so don't output the label at all.  Leave that
2479 	     to the back end macros.  */
2480 #else
2481 	  if (! JUMP_TABLES_IN_TEXT_SECTION)
2482 	    {
2483 	      int log_align;
2484 
2485 	      switch_to_section (targetm.asm_out.function_rodata_section
2486 				 (current_function_decl,
2487 				  jumptable_relocatable ()));
2488 
2489 #ifdef ADDR_VEC_ALIGN
2490 	      log_align = ADDR_VEC_ALIGN (table);
2491 #else
2492 	      log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2493 #endif
2494 	      ASM_OUTPUT_ALIGN (file, log_align);
2495 	    }
2496 	  else
2497 	    switch_to_section (current_function_section ());
2498 
2499 #ifdef ASM_OUTPUT_CASE_LABEL
2500 	  ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn), table);
2501 #else
2502 	  targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2503 #endif
2504 #endif
2505 	  break;
2506 	}
2507       if (LABEL_ALT_ENTRY_P (insn))
2508 	output_alternate_entry_point (file, insn);
2509       else
2510 	targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2511       break;
2512 
2513     default:
2514       {
2515 	rtx body = PATTERN (insn);
2516 	int insn_code_number;
2517 	const char *templ;
2518 	bool is_stmt, *is_stmt_p;
2519 
2520 	if (MAY_HAVE_DEBUG_MARKER_INSNS && cfun->debug_nonbind_markers)
2521 	  {
2522 	    is_stmt = false;
2523 	    is_stmt_p = NULL;
2524 	  }
2525 	else
2526 	  is_stmt_p = &is_stmt;
2527 
2528 	/* Reset this early so it is correct for ASM statements.  */
2529 	current_insn_predicate = NULL_RTX;
2530 
2531 	/* An INSN, JUMP_INSN or CALL_INSN.
2532 	   First check for special kinds that recog doesn't recognize.  */
2533 
2534 	if (GET_CODE (body) == USE /* These are just declarations.  */
2535 	    || GET_CODE (body) == CLOBBER)
2536 	  break;
2537 
2538 	/* Detect insns that are really jump-tables
2539 	   and output them as such.  */
2540 
2541         if (JUMP_TABLE_DATA_P (insn))
2542 	  {
2543 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2544 	    int vlen, idx;
2545 #endif
2546 
2547 	    if (! JUMP_TABLES_IN_TEXT_SECTION)
2548 	      switch_to_section (targetm.asm_out.function_rodata_section
2549 				 (current_function_decl,
2550 				  jumptable_relocatable ()));
2551 	    else
2552 	      switch_to_section (current_function_section ());
2553 
2554 	    app_disable ();
2555 
2556 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2557 	    if (GET_CODE (body) == ADDR_VEC)
2558 	      {
2559 #ifdef ASM_OUTPUT_ADDR_VEC
2560 		ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2561 #else
2562 		gcc_unreachable ();
2563 #endif
2564 	      }
2565 	    else
2566 	      {
2567 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2568 		ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2569 #else
2570 		gcc_unreachable ();
2571 #endif
2572 	      }
2573 #else
2574 	    vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2575 	    for (idx = 0; idx < vlen; idx++)
2576 	      {
2577 		if (GET_CODE (body) == ADDR_VEC)
2578 		  {
2579 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2580 		    ASM_OUTPUT_ADDR_VEC_ELT
2581 		      (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2582 #else
2583 		    gcc_unreachable ();
2584 #endif
2585 		  }
2586 		else
2587 		  {
2588 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2589 		    ASM_OUTPUT_ADDR_DIFF_ELT
2590 		      (file,
2591 		       body,
2592 		       CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2593 		       CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2594 #else
2595 		    gcc_unreachable ();
2596 #endif
2597 		  }
2598 	      }
2599 #ifdef ASM_OUTPUT_CASE_END
2600 	    ASM_OUTPUT_CASE_END (file,
2601 				 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2602 				 insn);
2603 #endif
2604 #endif
2605 
2606 	    switch_to_section (current_function_section ());
2607 
2608 	    if (debug_variable_location_views
2609 		&& !DECL_IGNORED_P (current_function_decl))
2610 	      debug_hooks->var_location (insn);
2611 
2612 	    break;
2613 	  }
2614 	/* Output this line note if it is the first or the last line
2615 	   note in a row.  */
2616 	if (!DECL_IGNORED_P (current_function_decl)
2617 	    && notice_source_line (insn, is_stmt_p))
2618 	  {
2619 	    if (flag_verbose_asm)
2620 	      asm_show_source (last_filename, last_linenum);
2621 	    (*debug_hooks->source_line) (last_linenum, last_columnnum,
2622 					 last_filename, last_discriminator,
2623 					 is_stmt);
2624 	    clear_next_view_needed (seen);
2625 	  }
2626 	else
2627 	  maybe_output_next_view (seen);
2628 
2629 	gcc_checking_assert (!DEBUG_INSN_P (insn));
2630 
2631 	if (GET_CODE (body) == PARALLEL
2632 	    && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2633 	  body = XVECEXP (body, 0, 0);
2634 
2635 	if (GET_CODE (body) == ASM_INPUT)
2636 	  {
2637 	    const char *string = XSTR (body, 0);
2638 
2639 	    /* There's no telling what that did to the condition codes.  */
2640 	    CC_STATUS_INIT;
2641 
2642 	    if (string[0])
2643 	      {
2644 		expanded_location loc;
2645 
2646 		app_enable ();
2647 		loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2648 		if (*loc.file && loc.line)
2649 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2650 			   ASM_COMMENT_START, loc.line, loc.file);
2651 		fprintf (asm_out_file, "\t%s\n", string);
2652 #if HAVE_AS_LINE_ZERO
2653 		if (*loc.file && loc.line)
2654 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2655 #endif
2656 	      }
2657 	    break;
2658 	  }
2659 
2660 	/* Detect `asm' construct with operands.  */
2661 	if (asm_noperands (body) >= 0)
2662 	  {
2663 	    unsigned int noperands = asm_noperands (body);
2664 	    rtx *ops = XALLOCAVEC (rtx, noperands);
2665 	    const char *string;
2666 	    location_t loc;
2667 	    expanded_location expanded;
2668 
2669 	    /* There's no telling what that did to the condition codes.  */
2670 	    CC_STATUS_INIT;
2671 
2672 	    /* Get out the operand values.  */
2673 	    string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2674 	    /* Inhibit dying on what would otherwise be compiler bugs.  */
2675 	    insn_noperands = noperands;
2676 	    this_is_asm_operands = insn;
2677 	    expanded = expand_location (loc);
2678 
2679 #ifdef FINAL_PRESCAN_INSN
2680 	    FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2681 #endif
2682 
2683 	    /* Output the insn using them.  */
2684 	    if (string[0])
2685 	      {
2686 		app_enable ();
2687 		if (expanded.file && expanded.line)
2688 		  fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2689 			   ASM_COMMENT_START, expanded.line, expanded.file);
2690 	        output_asm_insn (string, ops);
2691 #if HAVE_AS_LINE_ZERO
2692 		if (expanded.file && expanded.line)
2693 		  fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2694 #endif
2695 	      }
2696 
2697 	    if (targetm.asm_out.final_postscan_insn)
2698 	      targetm.asm_out.final_postscan_insn (file, insn, ops,
2699 						   insn_noperands);
2700 
2701 	    this_is_asm_operands = 0;
2702 	    break;
2703 	  }
2704 
2705 	app_disable ();
2706 
2707 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2708 	  {
2709 	    /* A delayed-branch sequence */
2710 	    int i;
2711 
2712 	    final_sequence = seq;
2713 
2714 	    /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2715 	       force the restoration of a comparison that was previously
2716 	       thought unnecessary.  If that happens, cancel this sequence
2717 	       and cause that insn to be restored.  */
2718 
2719 	    next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2720 	    if (next != seq->insn (1))
2721 	      {
2722 		final_sequence = 0;
2723 		return next;
2724 	      }
2725 
2726 	    for (i = 1; i < seq->len (); i++)
2727 	      {
2728 		rtx_insn *insn = seq->insn (i);
2729 		rtx_insn *next = NEXT_INSN (insn);
2730 		/* We loop in case any instruction in a delay slot gets
2731 		   split.  */
2732 		do
2733 		  insn = final_scan_insn (insn, file, 0, 1, seen);
2734 		while (insn != next);
2735 	      }
2736 #ifdef DBR_OUTPUT_SEQEND
2737 	    DBR_OUTPUT_SEQEND (file);
2738 #endif
2739 	    final_sequence = 0;
2740 
2741 	    /* If the insn requiring the delay slot was a CALL_INSN, the
2742 	       insns in the delay slot are actually executed before the
2743 	       called function.  Hence we don't preserve any CC-setting
2744 	       actions in these insns and the CC must be marked as being
2745 	       clobbered by the function.  */
2746 	    if (CALL_P (seq->insn (0)))
2747 	      {
2748 		CC_STATUS_INIT;
2749 	      }
2750 	    break;
2751 	  }
2752 
2753 	/* We have a real machine instruction as rtl.  */
2754 
2755 	body = PATTERN (insn);
2756 
2757 	/* Do machine-specific peephole optimizations if desired.  */
2758 
2759 	if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2760 	  {
2761 	    rtx_insn *next = peephole (insn);
2762 	    /* When peepholing, if there were notes within the peephole,
2763 	       emit them before the peephole.  */
2764 	    if (next != 0 && next != NEXT_INSN (insn))
2765 	      {
2766 		rtx_insn *note, *prev = PREV_INSN (insn);
2767 
2768 		for (note = NEXT_INSN (insn); note != next;
2769 		     note = NEXT_INSN (note))
2770 		  final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2771 
2772 		/* Put the notes in the proper position for a later
2773 		   rescan.  For example, the SH target can do this
2774 		   when generating a far jump in a delayed branch
2775 		   sequence.  */
2776 		note = NEXT_INSN (insn);
2777 		SET_PREV_INSN (note) = prev;
2778 		SET_NEXT_INSN (prev) = note;
2779 		SET_NEXT_INSN (PREV_INSN (next)) = insn;
2780 		SET_PREV_INSN (insn) = PREV_INSN (next);
2781 		SET_NEXT_INSN (insn) = next;
2782 		SET_PREV_INSN (next) = insn;
2783 	      }
2784 
2785 	    /* PEEPHOLE might have changed this.  */
2786 	    body = PATTERN (insn);
2787 	  }
2788 
2789 	/* Try to recognize the instruction.
2790 	   If successful, verify that the operands satisfy the
2791 	   constraints for the instruction.  Crash if they don't,
2792 	   since `reload' should have changed them so that they do.  */
2793 
2794 	insn_code_number = recog_memoized (insn);
2795 	cleanup_subreg_operands (insn);
2796 
2797 	/* Dump the insn in the assembly for debugging (-dAP).
2798 	   If the final dump is requested as slim RTL, dump slim
2799 	   RTL to the assembly file also.  */
2800 	if (flag_dump_rtl_in_asm)
2801 	  {
2802 	    print_rtx_head = ASM_COMMENT_START;
2803 	    if (! (dump_flags & TDF_SLIM))
2804 	      print_rtl_single (asm_out_file, insn);
2805 	    else
2806 	      dump_insn_slim (asm_out_file, insn);
2807 	    print_rtx_head = "";
2808 	  }
2809 
2810 	if (! constrain_operands_cached (insn, 1))
2811 	  fatal_insn_not_found (insn);
2812 
2813 	/* Some target machines need to prescan each insn before
2814 	   it is output.  */
2815 
2816 #ifdef FINAL_PRESCAN_INSN
2817 	FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2818 #endif
2819 
2820 	if (targetm.have_conditional_execution ()
2821 	    && GET_CODE (PATTERN (insn)) == COND_EXEC)
2822 	  current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2823 
2824 	current_output_insn = debug_insn = insn;
2825 
2826 	/* Find the proper template for this insn.  */
2827 	templ = get_insn_template (insn_code_number, insn);
2828 
2829 	/* If the C code returns 0, it means that it is a jump insn
2830 	   which follows a deleted test insn, and that test insn
2831 	   needs to be reinserted.  */
2832 	if (templ == 0)
2833 	  {
2834 	    rtx_insn *prev;
2835 
2836 	    gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2837 
2838 	    /* We have already processed the notes between the setter and
2839 	       the user.  Make sure we don't process them again, this is
2840 	       particularly important if one of the notes is a block
2841 	       scope note or an EH note.  */
2842 	    for (prev = insn;
2843 		 prev != last_ignored_compare;
2844 		 prev = PREV_INSN (prev))
2845 	      {
2846 		if (NOTE_P (prev))
2847 		  delete_insn (prev);	/* Use delete_note.  */
2848 	      }
2849 
2850 	    return prev;
2851 	  }
2852 
2853 	/* If the template is the string "#", it means that this insn must
2854 	   be split.  */
2855 	if (templ[0] == '#' && templ[1] == '\0')
2856 	  {
2857 	    rtx_insn *new_rtx = try_split (body, insn, 0);
2858 
2859 	    /* If we didn't split the insn, go away.  */
2860 	    if (new_rtx == insn && PATTERN (new_rtx) == body)
2861 	      fatal_insn ("could not split insn", insn);
2862 
2863 	    /* If we have a length attribute, this instruction should have
2864 	       been split in shorten_branches, to ensure that we would have
2865 	       valid length info for the splitees.  */
2866 	    gcc_assert (!HAVE_ATTR_length);
2867 
2868 	    return new_rtx;
2869 	  }
2870 
2871 	/* ??? This will put the directives in the wrong place if
2872 	   get_insn_template outputs assembly directly.  However calling it
2873 	   before get_insn_template breaks if the insns is split.  */
2874 	if (targetm.asm_out.unwind_emit_before_insn
2875 	    && targetm.asm_out.unwind_emit)
2876 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
2877 
2878 	rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
2879 	if (call_insn != NULL)
2880 	  {
2881 	    rtx x = call_from_call_insn (call_insn);
2882 	    x = XEXP (x, 0);
2883 	    if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2884 	      {
2885 		tree t;
2886 		x = XEXP (x, 0);
2887 		t = SYMBOL_REF_DECL (x);
2888 		if (t)
2889 		  assemble_external (t);
2890 	      }
2891 	  }
2892 
2893 	/* Output assembler code from the template.  */
2894 	output_asm_insn (templ, recog_data.operand);
2895 
2896 	/* Some target machines need to postscan each insn after
2897 	   it is output.  */
2898 	if (targetm.asm_out.final_postscan_insn)
2899 	  targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2900 					       recog_data.n_operands);
2901 
2902 	if (!targetm.asm_out.unwind_emit_before_insn
2903 	    && targetm.asm_out.unwind_emit)
2904 	  targetm.asm_out.unwind_emit (asm_out_file, insn);
2905 
2906 	/* Let the debug info back-end know about this call.  We do this only
2907 	   after the instruction has been emitted because labels that may be
2908 	   created to reference the call instruction must appear after it.  */
2909 	if ((debug_variable_location_views || call_insn != NULL)
2910 	    && !DECL_IGNORED_P (current_function_decl))
2911 	  debug_hooks->var_location (insn);
2912 
2913 	current_output_insn = debug_insn = 0;
2914       }
2915     }
2916   return NEXT_INSN (insn);
2917 }
2918 
2919 /* This is a wrapper around final_scan_insn_1 that allows ports to
2920    call it recursively without a known value for SEEN.  The value is
2921    saved at the outermost call, and recovered for recursive calls.
2922    Recursive calls MUST pass NULL, or the same pointer if they can
2923    otherwise get to it.  */
2924 
2925 rtx_insn *
final_scan_insn(rtx_insn * insn,FILE * file,int optimize_p,int nopeepholes,int * seen)2926 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p,
2927 		 int nopeepholes, int *seen)
2928 {
2929   static int *enclosing_seen;
2930   static int recursion_counter;
2931 
2932   gcc_assert (seen || recursion_counter);
2933   gcc_assert (!recursion_counter || !seen || seen == enclosing_seen);
2934 
2935   if (!recursion_counter++)
2936     enclosing_seen = seen;
2937   else if (!seen)
2938     seen = enclosing_seen;
2939 
2940   rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen);
2941 
2942   if (!--recursion_counter)
2943     enclosing_seen = NULL;
2944 
2945   return ret;
2946 }
2947 
2948 
2949 
2950 /* Map DECLs to instance discriminators.  This is allocated and
2951    defined in ada/gcc-interfaces/trans.cc, when compiling with -gnateS.
2952    Mappings from this table are saved and restored for LTO, so
2953    link-time compilation will have this map set, at least in
2954    partitions containing at least one DECL with an associated instance
2955    discriminator.  */
2956 
2957 decl_to_instance_map_t *decl_to_instance_map;
2958 
2959 /* Return the instance number assigned to DECL.  */
2960 
2961 static inline int
map_decl_to_instance(const_tree decl)2962 map_decl_to_instance (const_tree decl)
2963 {
2964   int *inst;
2965 
2966   if (!decl_to_instance_map || !decl || !DECL_P (decl))
2967     return 0;
2968 
2969   inst = decl_to_instance_map->get (decl);
2970 
2971   if (!inst)
2972     return 0;
2973 
2974   return *inst;
2975 }
2976 
2977 /* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC.  */
2978 
2979 static inline int
compute_discriminator(location_t loc)2980 compute_discriminator (location_t loc)
2981 {
2982   int discriminator;
2983 
2984   if (!decl_to_instance_map)
2985     discriminator = bb_discriminator;
2986   else
2987     {
2988       tree block = LOCATION_BLOCK (loc);
2989 
2990       while (block && TREE_CODE (block) == BLOCK
2991 	     && !inlined_function_outer_scope_p (block))
2992 	block = BLOCK_SUPERCONTEXT (block);
2993 
2994       tree decl;
2995 
2996       if (!block)
2997 	decl = current_function_decl;
2998       else if (DECL_P (block))
2999 	decl = block;
3000       else
3001 	decl = block_ultimate_origin (block);
3002 
3003       discriminator = map_decl_to_instance (decl);
3004     }
3005 
3006   return discriminator;
3007 }
3008 
3009 /* Return whether a source line note needs to be emitted before INSN.
3010    Sets IS_STMT to TRUE if the line should be marked as a possible
3011    breakpoint location.  */
3012 
3013 static bool
notice_source_line(rtx_insn * insn,bool * is_stmt)3014 notice_source_line (rtx_insn *insn, bool *is_stmt)
3015 {
3016   const char *filename;
3017   int linenum, columnnum;
3018 
3019   if (NOTE_MARKER_P (insn))
3020     {
3021       location_t loc = NOTE_MARKER_LOCATION (insn);
3022       expanded_location xloc = expand_location (loc);
3023       if (xloc.line == 0
3024 	  && (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION
3025 	      || LOCATION_LOCUS (loc) == BUILTINS_LOCATION))
3026 	return false;
3027 
3028       filename = xloc.file;
3029       linenum = xloc.line;
3030       columnnum = xloc.column;
3031       discriminator = compute_discriminator (loc);
3032       force_source_line = true;
3033     }
3034   else if (override_filename)
3035     {
3036       filename = override_filename;
3037       linenum = override_linenum;
3038       columnnum = override_columnnum;
3039       discriminator = override_discriminator;
3040     }
3041   else if (INSN_HAS_LOCATION (insn))
3042     {
3043       expanded_location xloc = insn_location (insn);
3044       filename = xloc.file;
3045       linenum = xloc.line;
3046       columnnum = xloc.column;
3047       discriminator = compute_discriminator (INSN_LOCATION (insn));
3048     }
3049   else
3050     {
3051       filename = NULL;
3052       linenum = 0;
3053       columnnum = 0;
3054       discriminator = 0;
3055     }
3056 
3057   if (filename == NULL)
3058     return false;
3059 
3060   if (force_source_line
3061       || filename != last_filename
3062       || last_linenum != linenum
3063       || (debug_column_info && last_columnnum != columnnum))
3064     {
3065       force_source_line = false;
3066       last_filename = filename;
3067       last_linenum = linenum;
3068       last_columnnum = columnnum;
3069       last_discriminator = discriminator;
3070       if (is_stmt)
3071 	*is_stmt = true;
3072       high_block_linenum = MAX (last_linenum, high_block_linenum);
3073       high_function_linenum = MAX (last_linenum, high_function_linenum);
3074       return true;
3075     }
3076 
3077   if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3078     {
3079       /* If the discriminator changed, but the line number did not,
3080          output the line table entry with is_stmt false so the
3081          debugger does not treat this as a breakpoint location.  */
3082       last_discriminator = discriminator;
3083       if (is_stmt)
3084 	*is_stmt = false;
3085       return true;
3086     }
3087 
3088   return false;
3089 }
3090 
3091 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3092    directly to the desired hard register.  */
3093 
3094 void
cleanup_subreg_operands(rtx_insn * insn)3095 cleanup_subreg_operands (rtx_insn *insn)
3096 {
3097   int i;
3098   bool changed = false;
3099   extract_insn_cached (insn);
3100   for (i = 0; i < recog_data.n_operands; i++)
3101     {
3102       /* The following test cannot use recog_data.operand when testing
3103 	 for a SUBREG: the underlying object might have been changed
3104 	 already if we are inside a match_operator expression that
3105 	 matches the else clause.  Instead we test the underlying
3106 	 expression directly.  */
3107       if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3108 	{
3109 	  recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3110 	  changed = true;
3111 	}
3112       else if (GET_CODE (recog_data.operand[i]) == PLUS
3113 	       || GET_CODE (recog_data.operand[i]) == MULT
3114 	       || MEM_P (recog_data.operand[i]))
3115 	recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3116     }
3117 
3118   for (i = 0; i < recog_data.n_dups; i++)
3119     {
3120       if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3121 	{
3122 	  *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3123 	  changed = true;
3124 	}
3125       else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3126 	       || GET_CODE (*recog_data.dup_loc[i]) == MULT
3127 	       || MEM_P (*recog_data.dup_loc[i]))
3128 	*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3129     }
3130   if (changed)
3131     df_insn_rescan (insn);
3132 }
3133 
3134 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3135    the thing it is a subreg of.  Do it anyway if FINAL_P.  */
3136 
3137 rtx
alter_subreg(rtx * xp,bool final_p)3138 alter_subreg (rtx *xp, bool final_p)
3139 {
3140   rtx x = *xp;
3141   rtx y = SUBREG_REG (x);
3142 
3143   /* simplify_subreg does not remove subreg from volatile references.
3144      We are required to.  */
3145   if (MEM_P (y))
3146     {
3147       poly_int64 offset = SUBREG_BYTE (x);
3148 
3149       /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3150 	 contains 0 instead of the proper offset.  See simplify_subreg.  */
3151       if (paradoxical_subreg_p (x))
3152 	offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3153 
3154       if (final_p)
3155 	*xp = adjust_address (y, GET_MODE (x), offset);
3156       else
3157 	*xp = adjust_address_nv (y, GET_MODE (x), offset);
3158     }
3159   else if (REG_P (y) && HARD_REGISTER_P (y))
3160     {
3161       rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3162 				     SUBREG_BYTE (x));
3163 
3164       if (new_rtx != 0)
3165 	*xp = new_rtx;
3166       else if (final_p && REG_P (y))
3167 	{
3168 	  /* Simplify_subreg can't handle some REG cases, but we have to.  */
3169 	  unsigned int regno;
3170 	  poly_int64 offset;
3171 
3172 	  regno = subreg_regno (x);
3173 	  if (subreg_lowpart_p (x))
3174 	    offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3175 	  else
3176 	    offset = SUBREG_BYTE (x);
3177 	  *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3178 	}
3179     }
3180 
3181   return *xp;
3182 }
3183 
3184 /* Do alter_subreg on all the SUBREGs contained in X.  */
3185 
3186 static rtx
walk_alter_subreg(rtx * xp,bool * changed)3187 walk_alter_subreg (rtx *xp, bool *changed)
3188 {
3189   rtx x = *xp;
3190   switch (GET_CODE (x))
3191     {
3192     case PLUS:
3193     case MULT:
3194     case AND:
3195       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3196       XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3197       break;
3198 
3199     case MEM:
3200     case ZERO_EXTEND:
3201       XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3202       break;
3203 
3204     case SUBREG:
3205       *changed = true;
3206       return alter_subreg (xp, true);
3207 
3208     default:
3209       break;
3210     }
3211 
3212   return *xp;
3213 }
3214 
3215 /* Report inconsistency between the assembler template and the operands.
3216    In an `asm', it's the user's fault; otherwise, the compiler's fault.  */
3217 
3218 void
output_operand_lossage(const char * cmsgid,...)3219 output_operand_lossage (const char *cmsgid, ...)
3220 {
3221   char *fmt_string;
3222   char *new_message;
3223   const char *pfx_str;
3224   va_list ap;
3225 
3226   va_start (ap, cmsgid);
3227 
3228   pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3229   fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3230   new_message = xvasprintf (fmt_string, ap);
3231 
3232   if (this_is_asm_operands)
3233     error_for_asm (this_is_asm_operands, "%s", new_message);
3234   else
3235     internal_error ("%s", new_message);
3236 
3237   free (fmt_string);
3238   free (new_message);
3239   va_end (ap);
3240 }
3241 
3242 /* Output of assembler code from a template, and its subroutines.  */
3243 
3244 /* Annotate the assembly with a comment describing the pattern and
3245    alternative used.  */
3246 
3247 static void
output_asm_name(void)3248 output_asm_name (void)
3249 {
3250   if (debug_insn)
3251     {
3252       fprintf (asm_out_file, "\t%s %d\t",
3253 	       ASM_COMMENT_START, INSN_UID (debug_insn));
3254 
3255       fprintf (asm_out_file, "[c=%d",
3256 	       insn_cost (debug_insn, optimize_insn_for_speed_p ()));
3257       if (HAVE_ATTR_length)
3258 	fprintf (asm_out_file, " l=%d",
3259 		 get_attr_length (debug_insn));
3260       fprintf (asm_out_file, "]  ");
3261 
3262       int num = INSN_CODE (debug_insn);
3263       fprintf (asm_out_file, "%s", insn_data[num].name);
3264       if (insn_data[num].n_alternatives > 1)
3265 	fprintf (asm_out_file, "/%d", which_alternative);
3266 
3267       /* Clear this so only the first assembler insn
3268 	 of any rtl insn will get the special comment for -dp.  */
3269       debug_insn = 0;
3270     }
3271 }
3272 
3273 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3274    or its address, return that expr .  Set *PADDRESSP to 1 if the expr
3275    corresponds to the address of the object and 0 if to the object.  */
3276 
3277 static tree
get_mem_expr_from_op(rtx op,int * paddressp)3278 get_mem_expr_from_op (rtx op, int *paddressp)
3279 {
3280   tree expr;
3281   int inner_addressp;
3282 
3283   *paddressp = 0;
3284 
3285   if (REG_P (op))
3286     return REG_EXPR (op);
3287   else if (!MEM_P (op))
3288     return 0;
3289 
3290   if (MEM_EXPR (op) != 0)
3291     return MEM_EXPR (op);
3292 
3293   /* Otherwise we have an address, so indicate it and look at the address.  */
3294   *paddressp = 1;
3295   op = XEXP (op, 0);
3296 
3297   /* First check if we have a decl for the address, then look at the right side
3298      if it is a PLUS.  Otherwise, strip off arithmetic and keep looking.
3299      But don't allow the address to itself be indirect.  */
3300   if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3301     return expr;
3302   else if (GET_CODE (op) == PLUS
3303 	   && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3304     return expr;
3305 
3306   while (UNARY_P (op)
3307 	 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3308     op = XEXP (op, 0);
3309 
3310   expr = get_mem_expr_from_op (op, &inner_addressp);
3311   return inner_addressp ? 0 : expr;
3312 }
3313 
3314 /* Output operand names for assembler instructions.  OPERANDS is the
3315    operand vector, OPORDER is the order to write the operands, and NOPS
3316    is the number of operands to write.  */
3317 
3318 static void
output_asm_operand_names(rtx * operands,int * oporder,int nops)3319 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3320 {
3321   int wrote = 0;
3322   int i;
3323 
3324   for (i = 0; i < nops; i++)
3325     {
3326       int addressp;
3327       rtx op = operands[oporder[i]];
3328       tree expr = get_mem_expr_from_op (op, &addressp);
3329 
3330       fprintf (asm_out_file, "%c%s",
3331 	       wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3332       wrote = 1;
3333       if (expr)
3334 	{
3335 	  fprintf (asm_out_file, "%s",
3336 		   addressp ? "*" : "");
3337 	  print_mem_expr (asm_out_file, expr);
3338 	  wrote = 1;
3339 	}
3340       else if (REG_P (op) && ORIGINAL_REGNO (op)
3341 	       && ORIGINAL_REGNO (op) != REGNO (op))
3342 	fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3343     }
3344 }
3345 
3346 #ifdef ASSEMBLER_DIALECT
3347 /* Helper function to parse assembler dialects in the asm string.
3348    This is called from output_asm_insn and asm_fprintf.  */
3349 static const char *
do_assembler_dialects(const char * p,int * dialect)3350 do_assembler_dialects (const char *p, int *dialect)
3351 {
3352   char c = *(p - 1);
3353 
3354   switch (c)
3355     {
3356     case '{':
3357       {
3358         int i;
3359 
3360         if (*dialect)
3361           output_operand_lossage ("nested assembly dialect alternatives");
3362         else
3363           *dialect = 1;
3364 
3365         /* If we want the first dialect, do nothing.  Otherwise, skip
3366            DIALECT_NUMBER of strings ending with '|'.  */
3367         for (i = 0; i < dialect_number; i++)
3368           {
3369             while (*p && *p != '}')
3370 	      {
3371 		if (*p == '|')
3372 		  {
3373 		    p++;
3374 		    break;
3375 		  }
3376 
3377 		/* Skip over any character after a percent sign.  */
3378 		if (*p == '%')
3379 		  p++;
3380 		if (*p)
3381 		  p++;
3382 	      }
3383 
3384             if (*p == '}')
3385 	      break;
3386           }
3387 
3388         if (*p == '\0')
3389           output_operand_lossage ("unterminated assembly dialect alternative");
3390       }
3391       break;
3392 
3393     case '|':
3394       if (*dialect)
3395         {
3396           /* Skip to close brace.  */
3397           do
3398             {
3399 	      if (*p == '\0')
3400 		{
3401 		  output_operand_lossage ("unterminated assembly dialect alternative");
3402 		  break;
3403 		}
3404 
3405 	      /* Skip over any character after a percent sign.  */
3406 	      if (*p == '%' && p[1])
3407 		{
3408 		  p += 2;
3409 		  continue;
3410 		}
3411 
3412 	      if (*p++ == '}')
3413 		break;
3414             }
3415           while (1);
3416 
3417           *dialect = 0;
3418         }
3419       else
3420         putc (c, asm_out_file);
3421       break;
3422 
3423     case '}':
3424       if (! *dialect)
3425         putc (c, asm_out_file);
3426       *dialect = 0;
3427       break;
3428     default:
3429       gcc_unreachable ();
3430     }
3431 
3432   return p;
3433 }
3434 #endif
3435 
3436 /* Output text from TEMPLATE to the assembler output file,
3437    obeying %-directions to substitute operands taken from
3438    the vector OPERANDS.
3439 
3440    %N (for N a digit) means print operand N in usual manner.
3441    %lN means require operand N to be a CODE_LABEL or LABEL_REF
3442       and print the label name with no punctuation.
3443    %cN means require operand N to be a constant
3444       and print the constant expression with no punctuation.
3445    %aN means expect operand N to be a memory address
3446       (not a memory reference!) and print a reference
3447       to that address.
3448    %nN means expect operand N to be a constant
3449       and print a constant expression for minus the value
3450       of the operand, with no other punctuation.  */
3451 
3452 void
output_asm_insn(const char * templ,rtx * operands)3453 output_asm_insn (const char *templ, rtx *operands)
3454 {
3455   const char *p;
3456   int c;
3457 #ifdef ASSEMBLER_DIALECT
3458   int dialect = 0;
3459 #endif
3460   int oporder[MAX_RECOG_OPERANDS];
3461   char opoutput[MAX_RECOG_OPERANDS];
3462   int ops = 0;
3463 
3464   /* An insn may return a null string template
3465      in a case where no assembler code is needed.  */
3466   if (*templ == 0)
3467     return;
3468 
3469   memset (opoutput, 0, sizeof opoutput);
3470   p = templ;
3471   putc ('\t', asm_out_file);
3472 
3473 #ifdef ASM_OUTPUT_OPCODE
3474   ASM_OUTPUT_OPCODE (asm_out_file, p);
3475 #endif
3476 
3477   while ((c = *p++))
3478     switch (c)
3479       {
3480       case '\n':
3481 	if (flag_verbose_asm)
3482 	  output_asm_operand_names (operands, oporder, ops);
3483 	if (flag_print_asm_name)
3484 	  output_asm_name ();
3485 
3486 	ops = 0;
3487 	memset (opoutput, 0, sizeof opoutput);
3488 
3489 	putc (c, asm_out_file);
3490 #ifdef ASM_OUTPUT_OPCODE
3491 	while ((c = *p) == '\t')
3492 	  {
3493 	    putc (c, asm_out_file);
3494 	    p++;
3495 	  }
3496 	ASM_OUTPUT_OPCODE (asm_out_file, p);
3497 #endif
3498 	break;
3499 
3500 #ifdef ASSEMBLER_DIALECT
3501       case '{':
3502       case '}':
3503       case '|':
3504 	p = do_assembler_dialects (p, &dialect);
3505 	break;
3506 #endif
3507 
3508       case '%':
3509 	/* %% outputs a single %.  %{, %} and %| print {, } and | respectively
3510 	   if ASSEMBLER_DIALECT defined and these characters have a special
3511 	   meaning as dialect delimiters.*/
3512 	if (*p == '%'
3513 #ifdef ASSEMBLER_DIALECT
3514 	    || *p == '{' || *p == '}' || *p == '|'
3515 #endif
3516 	    )
3517 	  {
3518 	    putc (*p, asm_out_file);
3519 	    p++;
3520 	  }
3521 	/* %= outputs a number which is unique to each insn in the entire
3522 	   compilation.  This is useful for making local labels that are
3523 	   referred to more than once in a given insn.  */
3524 	else if (*p == '=')
3525 	  {
3526 	    p++;
3527 	    fprintf (asm_out_file, "%d", insn_counter);
3528 	  }
3529 	/* % followed by a letter and some digits
3530 	   outputs an operand in a special way depending on the letter.
3531 	   Letters `acln' are implemented directly.
3532 	   Other letters are passed to `output_operand' so that
3533 	   the TARGET_PRINT_OPERAND hook can define them.  */
3534 	else if (ISALPHA (*p))
3535 	  {
3536 	    int letter = *p++;
3537 	    unsigned long opnum;
3538 	    char *endptr;
3539 
3540 	    opnum = strtoul (p, &endptr, 10);
3541 
3542 	    if (endptr == p)
3543 	      output_operand_lossage ("operand number missing "
3544 				      "after %%-letter");
3545 	    else if (this_is_asm_operands && opnum >= insn_noperands)
3546 	      output_operand_lossage ("operand number out of range");
3547 	    else if (letter == 'l')
3548 	      output_asm_label (operands[opnum]);
3549 	    else if (letter == 'a')
3550 	      output_address (VOIDmode, operands[opnum]);
3551 	    else if (letter == 'c')
3552 	      {
3553 		if (CONSTANT_ADDRESS_P (operands[opnum]))
3554 		  output_addr_const (asm_out_file, operands[opnum]);
3555 		else
3556 		  output_operand (operands[opnum], 'c');
3557 	      }
3558 	    else if (letter == 'n')
3559 	      {
3560 		if (CONST_INT_P (operands[opnum]))
3561 		  fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3562 			   - INTVAL (operands[opnum]));
3563 		else
3564 		  {
3565 		    putc ('-', asm_out_file);
3566 		    output_addr_const (asm_out_file, operands[opnum]);
3567 		  }
3568 	      }
3569 	    else
3570 	      output_operand (operands[opnum], letter);
3571 
3572 	    if (!opoutput[opnum])
3573 	      oporder[ops++] = opnum;
3574 	    opoutput[opnum] = 1;
3575 
3576 	    p = endptr;
3577 	    c = *p;
3578 	  }
3579 	/* % followed by a digit outputs an operand the default way.  */
3580 	else if (ISDIGIT (*p))
3581 	  {
3582 	    unsigned long opnum;
3583 	    char *endptr;
3584 
3585 	    opnum = strtoul (p, &endptr, 10);
3586 	    if (this_is_asm_operands && opnum >= insn_noperands)
3587 	      output_operand_lossage ("operand number out of range");
3588 	    else
3589 	      output_operand (operands[opnum], 0);
3590 
3591 	    if (!opoutput[opnum])
3592 	      oporder[ops++] = opnum;
3593 	    opoutput[opnum] = 1;
3594 
3595 	    p = endptr;
3596 	    c = *p;
3597 	  }
3598 	/* % followed by punctuation: output something for that
3599 	   punctuation character alone, with no operand.  The
3600 	   TARGET_PRINT_OPERAND hook decides what is actually done.  */
3601 	else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3602 	  output_operand (NULL_RTX, *p++);
3603 	else
3604 	  output_operand_lossage ("invalid %%-code");
3605 	break;
3606 
3607       default:
3608 	putc (c, asm_out_file);
3609       }
3610 
3611   /* Try to keep the asm a bit more readable.  */
3612   if ((flag_verbose_asm || flag_print_asm_name) && strlen (templ) < 9)
3613     putc ('\t', asm_out_file);
3614 
3615   /* Write out the variable names for operands, if we know them.  */
3616   if (flag_verbose_asm)
3617     output_asm_operand_names (operands, oporder, ops);
3618   if (flag_print_asm_name)
3619     output_asm_name ();
3620 
3621   putc ('\n', asm_out_file);
3622 }
3623 
3624 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol.  */
3625 
3626 void
output_asm_label(rtx x)3627 output_asm_label (rtx x)
3628 {
3629   char buf[256];
3630 
3631   if (GET_CODE (x) == LABEL_REF)
3632     x = label_ref_label (x);
3633   if (LABEL_P (x)
3634       || (NOTE_P (x)
3635 	  && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3636     ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3637   else
3638     output_operand_lossage ("'%%l' operand isn't a label");
3639 
3640   assemble_name (asm_out_file, buf);
3641 }
3642 
3643 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external.  */
3644 
3645 void
mark_symbol_refs_as_used(rtx x)3646 mark_symbol_refs_as_used (rtx x)
3647 {
3648   subrtx_iterator::array_type array;
3649   FOR_EACH_SUBRTX (iter, array, x, ALL)
3650     {
3651       const_rtx x = *iter;
3652       if (GET_CODE (x) == SYMBOL_REF)
3653 	if (tree t = SYMBOL_REF_DECL (x))
3654 	  assemble_external (t);
3655     }
3656 }
3657 
3658 /* Print operand X using machine-dependent assembler syntax.
3659    CODE is a non-digit that preceded the operand-number in the % spec,
3660    such as 'z' if the spec was `%z3'.  CODE is 0 if there was no char
3661    between the % and the digits.
3662    When CODE is a non-letter, X is 0.
3663 
3664    The meanings of the letters are machine-dependent and controlled
3665    by TARGET_PRINT_OPERAND.  */
3666 
3667 void
output_operand(rtx x,int code ATTRIBUTE_UNUSED)3668 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3669 {
3670   if (x && GET_CODE (x) == SUBREG)
3671     x = alter_subreg (&x, true);
3672 
3673   /* X must not be a pseudo reg.  */
3674   if (!targetm.no_register_allocation)
3675     gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3676 
3677   targetm.asm_out.print_operand (asm_out_file, x, code);
3678 
3679   if (x == NULL_RTX)
3680     return;
3681 
3682   mark_symbol_refs_as_used (x);
3683 }
3684 
3685 /* Print a memory reference operand for address X using
3686    machine-dependent assembler syntax.  */
3687 
3688 void
output_address(machine_mode mode,rtx x)3689 output_address (machine_mode mode, rtx x)
3690 {
3691   bool changed = false;
3692   walk_alter_subreg (&x, &changed);
3693   targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3694 }
3695 
3696 /* Print an integer constant expression in assembler syntax.
3697    Addition and subtraction are the only arithmetic
3698    that may appear in these expressions.  */
3699 
3700 void
output_addr_const(FILE * file,rtx x)3701 output_addr_const (FILE *file, rtx x)
3702 {
3703   char buf[256];
3704 
3705  restart:
3706   switch (GET_CODE (x))
3707     {
3708     case PC:
3709       putc ('.', file);
3710       break;
3711 
3712     case SYMBOL_REF:
3713       if (SYMBOL_REF_DECL (x))
3714 	assemble_external (SYMBOL_REF_DECL (x));
3715 #ifdef ASM_OUTPUT_SYMBOL_REF
3716       ASM_OUTPUT_SYMBOL_REF (file, x);
3717 #else
3718       assemble_name (file, XSTR (x, 0));
3719 #endif
3720       break;
3721 
3722     case LABEL_REF:
3723       x = label_ref_label (x);
3724       /* Fall through.  */
3725     case CODE_LABEL:
3726       ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3727 #ifdef ASM_OUTPUT_LABEL_REF
3728       ASM_OUTPUT_LABEL_REF (file, buf);
3729 #else
3730       assemble_name (file, buf);
3731 #endif
3732       break;
3733 
3734     case CONST_INT:
3735       fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3736       break;
3737 
3738     case CONST:
3739       /* This used to output parentheses around the expression,
3740 	 but that does not work on the 386 (either ATT or BSD assembler).  */
3741       output_addr_const (file, XEXP (x, 0));
3742       break;
3743 
3744     case CONST_WIDE_INT:
3745       /* We do not know the mode here so we have to use a round about
3746 	 way to build a wide-int to get it printed properly.  */
3747       {
3748 	wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3749 					   CONST_WIDE_INT_NUNITS (x),
3750 					   CONST_WIDE_INT_NUNITS (x)
3751 					   * HOST_BITS_PER_WIDE_INT,
3752 					   false);
3753 	print_decs (w, file);
3754       }
3755       break;
3756 
3757     case CONST_DOUBLE:
3758       if (CONST_DOUBLE_AS_INT_P (x))
3759 	{
3760 	  /* We can use %d if the number is one word and positive.  */
3761 	  if (CONST_DOUBLE_HIGH (x))
3762 	    fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3763 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3764 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3765 	  else if (CONST_DOUBLE_LOW (x) < 0)
3766 	    fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3767 		     (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3768 	  else
3769 	    fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3770 	}
3771       else
3772 	/* We can't handle floating point constants;
3773 	   PRINT_OPERAND must handle them.  */
3774 	output_operand_lossage ("floating constant misused");
3775       break;
3776 
3777     case CONST_FIXED:
3778       fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3779       break;
3780 
3781     case PLUS:
3782       /* Some assemblers need integer constants to appear last (eg masm).  */
3783       if (CONST_INT_P (XEXP (x, 0)))
3784 	{
3785 	  output_addr_const (file, XEXP (x, 1));
3786 	  if (INTVAL (XEXP (x, 0)) >= 0)
3787 	    fprintf (file, "+");
3788 	  output_addr_const (file, XEXP (x, 0));
3789 	}
3790       else
3791 	{
3792 	  output_addr_const (file, XEXP (x, 0));
3793 	  if (!CONST_INT_P (XEXP (x, 1))
3794 	      || INTVAL (XEXP (x, 1)) >= 0)
3795 	    fprintf (file, "+");
3796 	  output_addr_const (file, XEXP (x, 1));
3797 	}
3798       break;
3799 
3800     case MINUS:
3801       /* Avoid outputting things like x-x or x+5-x,
3802 	 since some assemblers can't handle that.  */
3803       x = simplify_subtraction (x);
3804       if (GET_CODE (x) != MINUS)
3805 	goto restart;
3806 
3807       output_addr_const (file, XEXP (x, 0));
3808       fprintf (file, "-");
3809       if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3810 	  || GET_CODE (XEXP (x, 1)) == PC
3811 	  || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3812 	output_addr_const (file, XEXP (x, 1));
3813       else
3814 	{
3815 	  fputs (targetm.asm_out.open_paren, file);
3816 	  output_addr_const (file, XEXP (x, 1));
3817 	  fputs (targetm.asm_out.close_paren, file);
3818 	}
3819       break;
3820 
3821     case ZERO_EXTEND:
3822     case SIGN_EXTEND:
3823     case SUBREG:
3824     case TRUNCATE:
3825       output_addr_const (file, XEXP (x, 0));
3826       break;
3827 
3828     default:
3829       if (targetm.asm_out.output_addr_const_extra (file, x))
3830 	break;
3831 
3832       output_operand_lossage ("invalid expression as operand");
3833     }
3834 }
3835 
3836 /* Output a quoted string.  */
3837 
3838 void
output_quoted_string(FILE * asm_file,const char * string)3839 output_quoted_string (FILE *asm_file, const char *string)
3840 {
3841 #ifdef OUTPUT_QUOTED_STRING
3842   OUTPUT_QUOTED_STRING (asm_file, string);
3843 #else
3844   char c;
3845 
3846   putc ('\"', asm_file);
3847   while ((c = *string++) != 0)
3848     {
3849       if (ISPRINT (c))
3850 	{
3851 	  if (c == '\"' || c == '\\')
3852 	    putc ('\\', asm_file);
3853 	  putc (c, asm_file);
3854 	}
3855       else
3856 	fprintf (asm_file, "\\%03o", (unsigned char) c);
3857     }
3858   putc ('\"', asm_file);
3859 #endif
3860 }
3861 
3862 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3863 
3864 void
fprint_whex(FILE * f,unsigned HOST_WIDE_INT value)3865 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3866 {
3867   char buf[2 + CHAR_BIT * sizeof (value) / 4];
3868   if (value == 0)
3869     putc ('0', f);
3870   else
3871     {
3872       char *p = buf + sizeof (buf);
3873       do
3874         *--p = "0123456789abcdef"[value % 16];
3875       while ((value /= 16) != 0);
3876       *--p = 'x';
3877       *--p = '0';
3878       fwrite (p, 1, buf + sizeof (buf) - p, f);
3879     }
3880 }
3881 
3882 /* Internal function that prints an unsigned long in decimal in reverse.
3883    The output string IS NOT null-terminated. */
3884 
3885 static int
sprint_ul_rev(char * s,unsigned long value)3886 sprint_ul_rev (char *s, unsigned long value)
3887 {
3888   int i = 0;
3889   do
3890     {
3891       s[i] = "0123456789"[value % 10];
3892       value /= 10;
3893       i++;
3894       /* alternate version, without modulo */
3895       /* oldval = value; */
3896       /* value /= 10; */
3897       /* s[i] = "0123456789" [oldval - 10*value]; */
3898       /* i++ */
3899     }
3900   while (value != 0);
3901   return i;
3902 }
3903 
3904 /* Write an unsigned long as decimal to a file, fast. */
3905 
3906 void
fprint_ul(FILE * f,unsigned long value)3907 fprint_ul (FILE *f, unsigned long value)
3908 {
3909   /* python says: len(str(2**64)) == 20 */
3910   char s[20];
3911   int i;
3912 
3913   i = sprint_ul_rev (s, value);
3914 
3915   /* It's probably too small to bother with string reversal and fputs. */
3916   do
3917     {
3918       i--;
3919       putc (s[i], f);
3920     }
3921   while (i != 0);
3922 }
3923 
3924 /* Write an unsigned long as decimal to a string, fast.
3925    s must be wide enough to not overflow, at least 21 chars.
3926    Returns the length of the string (without terminating '\0'). */
3927 
3928 int
sprint_ul(char * s,unsigned long value)3929 sprint_ul (char *s, unsigned long value)
3930 {
3931   int len = sprint_ul_rev (s, value);
3932   s[len] = '\0';
3933 
3934   std::reverse (s, s + len);
3935   return len;
3936 }
3937 
3938 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3939    %R prints the value of REGISTER_PREFIX.
3940    %L prints the value of LOCAL_LABEL_PREFIX.
3941    %U prints the value of USER_LABEL_PREFIX.
3942    %I prints the value of IMMEDIATE_PREFIX.
3943    %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3944    Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3945 
3946    We handle alternate assembler dialects here, just like output_asm_insn.  */
3947 
3948 void
asm_fprintf(FILE * file,const char * p,...)3949 asm_fprintf (FILE *file, const char *p, ...)
3950 {
3951   char buf[10];
3952   char *q, c;
3953 #ifdef ASSEMBLER_DIALECT
3954   int dialect = 0;
3955 #endif
3956   va_list argptr;
3957 
3958   va_start (argptr, p);
3959 
3960   buf[0] = '%';
3961 
3962   while ((c = *p++))
3963     switch (c)
3964       {
3965 #ifdef ASSEMBLER_DIALECT
3966       case '{':
3967       case '}':
3968       case '|':
3969 	p = do_assembler_dialects (p, &dialect);
3970 	break;
3971 #endif
3972 
3973       case '%':
3974 	c = *p++;
3975 	q = &buf[1];
3976 	while (strchr ("-+ #0", c))
3977 	  {
3978 	    *q++ = c;
3979 	    c = *p++;
3980 	  }
3981 	while (ISDIGIT (c) || c == '.')
3982 	  {
3983 	    *q++ = c;
3984 	    c = *p++;
3985 	  }
3986 	switch (c)
3987 	  {
3988 	  case '%':
3989 	    putc ('%', file);
3990 	    break;
3991 
3992 	  case 'd':  case 'i':  case 'u':
3993 	  case 'x':  case 'X':  case 'o':
3994 	  case 'c':
3995 	    *q++ = c;
3996 	    *q = 0;
3997 	    fprintf (file, buf, va_arg (argptr, int));
3998 	    break;
3999 
4000 	  case 'w':
4001 	    /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4002 	       'o' cases, but we do not check for those cases.  It
4003 	       means that the value is a HOST_WIDE_INT, which may be
4004 	       either `long' or `long long'.  */
4005 	    memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4006 	    q += strlen (HOST_WIDE_INT_PRINT);
4007 	    *q++ = *p++;
4008 	    *q = 0;
4009 	    fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4010 	    break;
4011 
4012 	  case 'l':
4013 	    *q++ = c;
4014 #ifdef HAVE_LONG_LONG
4015 	    if (*p == 'l')
4016 	      {
4017 		*q++ = *p++;
4018 		*q++ = *p++;
4019 		*q = 0;
4020 		fprintf (file, buf, va_arg (argptr, long long));
4021 	      }
4022 	    else
4023 #endif
4024 	      {
4025 		*q++ = *p++;
4026 		*q = 0;
4027 		fprintf (file, buf, va_arg (argptr, long));
4028 	      }
4029 
4030 	    break;
4031 
4032 	  case 's':
4033 	    *q++ = c;
4034 	    *q = 0;
4035 	    fprintf (file, buf, va_arg (argptr, char *));
4036 	    break;
4037 
4038 	  case 'O':
4039 #ifdef ASM_OUTPUT_OPCODE
4040 	    ASM_OUTPUT_OPCODE (asm_out_file, p);
4041 #endif
4042 	    break;
4043 
4044 	  case 'R':
4045 #ifdef REGISTER_PREFIX
4046 	    fprintf (file, "%s", REGISTER_PREFIX);
4047 #endif
4048 	    break;
4049 
4050 	  case 'I':
4051 #ifdef IMMEDIATE_PREFIX
4052 	    fprintf (file, "%s", IMMEDIATE_PREFIX);
4053 #endif
4054 	    break;
4055 
4056 	  case 'L':
4057 #ifdef LOCAL_LABEL_PREFIX
4058 	    fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4059 #endif
4060 	    break;
4061 
4062 	  case 'U':
4063 	    fputs (user_label_prefix, file);
4064 	    break;
4065 
4066 #ifdef ASM_FPRINTF_EXTENSIONS
4067 	    /* Uppercase letters are reserved for general use by asm_fprintf
4068 	       and so are not available to target specific code.  In order to
4069 	       prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4070 	       they are defined here.  As they get turned into real extensions
4071 	       to asm_fprintf they should be removed from this list.  */
4072 	  case 'A': case 'B': case 'C': case 'D': case 'E':
4073 	  case 'F': case 'G': case 'H': case 'J': case 'K':
4074 	  case 'M': case 'N': case 'P': case 'Q': case 'S':
4075 	  case 'T': case 'V': case 'W': case 'Y': case 'Z':
4076 	    break;
4077 
4078 	  ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4079 #endif
4080 	  default:
4081 	    gcc_unreachable ();
4082 	  }
4083 	break;
4084 
4085       default:
4086 	putc (c, file);
4087       }
4088   va_end (argptr);
4089 }
4090 
4091 /* Return nonzero if this function has no function calls.  */
4092 
4093 int
leaf_function_p(void)4094 leaf_function_p (void)
4095 {
4096   rtx_insn *insn;
4097 
4098   /* Ensure we walk the entire function body.  */
4099   gcc_assert (!in_sequence_p ());
4100 
4101   /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4102      functions even if they call mcount.  */
4103   if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4104     return 0;
4105 
4106   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4107     {
4108       if (CALL_P (insn)
4109 	  && ! SIBLING_CALL_P (insn)
4110 	  && ! FAKE_CALL_P (insn))
4111 	return 0;
4112       if (NONJUMP_INSN_P (insn)
4113 	  && GET_CODE (PATTERN (insn)) == SEQUENCE
4114 	  && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4115 	  && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4116 	return 0;
4117     }
4118 
4119   return 1;
4120 }
4121 
4122 /* Return 1 if branch is a forward branch.
4123    Uses insn_shuid array, so it works only in the final pass.  May be used by
4124    output templates to customary add branch prediction hints.
4125  */
4126 int
final_forward_branch_p(rtx_insn * insn)4127 final_forward_branch_p (rtx_insn *insn)
4128 {
4129   int insn_id, label_id;
4130 
4131   gcc_assert (uid_shuid);
4132   insn_id = INSN_SHUID (insn);
4133   label_id = INSN_SHUID (JUMP_LABEL (insn));
4134   /* We've hit some insns that does not have id information available.  */
4135   gcc_assert (insn_id && label_id);
4136   return insn_id < label_id;
4137 }
4138 
4139 /* On some machines, a function with no call insns
4140    can run faster if it doesn't create its own register window.
4141    When output, the leaf function should use only the "output"
4142    registers.  Ordinarily, the function would be compiled to use
4143    the "input" registers to find its arguments; it is a candidate
4144    for leaf treatment if it uses only the "input" registers.
4145    Leaf function treatment means renumbering so the function
4146    uses the "output" registers instead.  */
4147 
4148 #ifdef LEAF_REGISTERS
4149 
4150 /* Return 1 if this function uses only the registers that can be
4151    safely renumbered.  */
4152 
4153 int
only_leaf_regs_used(void)4154 only_leaf_regs_used (void)
4155 {
4156   int i;
4157   const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4158 
4159   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4160     if ((df_regs_ever_live_p (i) || global_regs[i])
4161 	&& ! permitted_reg_in_leaf_functions[i])
4162       return 0;
4163 
4164   if (crtl->uses_pic_offset_table
4165       && pic_offset_table_rtx != 0
4166       && REG_P (pic_offset_table_rtx)
4167       && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4168     return 0;
4169 
4170   return 1;
4171 }
4172 
4173 /* Scan all instructions and renumber all registers into those
4174    available in leaf functions.  */
4175 
4176 static void
leaf_renumber_regs(rtx_insn * first)4177 leaf_renumber_regs (rtx_insn *first)
4178 {
4179   rtx_insn *insn;
4180 
4181   /* Renumber only the actual patterns.
4182      The reg-notes can contain frame pointer refs,
4183      and renumbering them could crash, and should not be needed.  */
4184   for (insn = first; insn; insn = NEXT_INSN (insn))
4185     if (INSN_P (insn))
4186       leaf_renumber_regs_insn (PATTERN (insn));
4187 }
4188 
4189 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4190    available in leaf functions.  */
4191 
4192 void
leaf_renumber_regs_insn(rtx in_rtx)4193 leaf_renumber_regs_insn (rtx in_rtx)
4194 {
4195   int i, j;
4196   const char *format_ptr;
4197 
4198   if (in_rtx == 0)
4199     return;
4200 
4201   /* Renumber all input-registers into output-registers.
4202      renumbered_regs would be 1 for an output-register;
4203      they  */
4204 
4205   if (REG_P (in_rtx))
4206     {
4207       int newreg;
4208 
4209       /* Don't renumber the same reg twice.  */
4210       if (in_rtx->used)
4211 	return;
4212 
4213       newreg = REGNO (in_rtx);
4214       /* Don't try to renumber pseudo regs.  It is possible for a pseudo reg
4215 	 to reach here as part of a REG_NOTE.  */
4216       if (newreg >= FIRST_PSEUDO_REGISTER)
4217 	{
4218 	  in_rtx->used = 1;
4219 	  return;
4220 	}
4221       newreg = LEAF_REG_REMAP (newreg);
4222       gcc_assert (newreg >= 0);
4223       df_set_regs_ever_live (REGNO (in_rtx), false);
4224       df_set_regs_ever_live (newreg, true);
4225       SET_REGNO (in_rtx, newreg);
4226       in_rtx->used = 1;
4227       return;
4228     }
4229 
4230   if (INSN_P (in_rtx))
4231     {
4232       /* Inside a SEQUENCE, we find insns.
4233 	 Renumber just the patterns of these insns,
4234 	 just as we do for the top-level insns.  */
4235       leaf_renumber_regs_insn (PATTERN (in_rtx));
4236       return;
4237     }
4238 
4239   format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4240 
4241   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4242     switch (*format_ptr++)
4243       {
4244       case 'e':
4245 	leaf_renumber_regs_insn (XEXP (in_rtx, i));
4246 	break;
4247 
4248       case 'E':
4249 	if (XVEC (in_rtx, i) != NULL)
4250 	  for (j = 0; j < XVECLEN (in_rtx, i); j++)
4251 	    leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4252 	break;
4253 
4254       case 'S':
4255       case 's':
4256       case '0':
4257       case 'i':
4258       case 'w':
4259       case 'p':
4260       case 'n':
4261       case 'u':
4262 	break;
4263 
4264       default:
4265 	gcc_unreachable ();
4266       }
4267 }
4268 #endif
4269 
4270 /* Turn the RTL into assembly.  */
4271 static unsigned int
rest_of_handle_final(void)4272 rest_of_handle_final (void)
4273 {
4274   const char *fnname = get_fnname_from_decl (current_function_decl);
4275 
4276   /* Turn debug markers into notes if the var-tracking pass has not
4277      been invoked.  */
4278   if (!flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNS)
4279     delete_vta_debug_insns (false);
4280 
4281   assemble_start_function (current_function_decl, fnname);
4282   rtx_insn *first = get_insns ();
4283   int seen = 0;
4284   final_start_function_1 (&first, asm_out_file, &seen, optimize);
4285   final_1 (first, asm_out_file, seen, optimize);
4286   if (flag_ipa_ra
4287       && !lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl))
4288       /* Functions with naked attributes are supported only with basic asm
4289 	 statements in the body, thus for supported use cases the information
4290 	 on clobbered registers is not available.  */
4291       && !lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)))
4292     collect_fn_hard_reg_usage ();
4293   final_end_function ();
4294 
4295   /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4296      directive that closes the procedure descriptor.  Similarly, for x64 SEH.
4297      Otherwise it's not strictly necessary, but it doesn't hurt either.  */
4298   output_function_exception_table (crtl->has_bb_partition ? 1 : 0);
4299 
4300   assemble_end_function (current_function_decl, fnname);
4301 
4302   /* Free up reg info memory.  */
4303   free_reg_info ();
4304 
4305   if (! quiet_flag)
4306     fflush (asm_out_file);
4307 
4308   /* Write DBX symbols if requested.  */
4309 
4310   /* Note that for those inline functions where we don't initially
4311      know for certain that we will be generating an out-of-line copy,
4312      the first invocation of this routine (rest_of_compilation) will
4313      skip over this code by doing a `goto exit_rest_of_compilation;'.
4314      Later on, wrapup_global_declarations will (indirectly) call
4315      rest_of_compilation again for those inline functions that need
4316      to have out-of-line copies generated.  During that call, we
4317      *will* be routed past here.  */
4318 
4319   timevar_push (TV_SYMOUT);
4320   if (!DECL_IGNORED_P (current_function_decl))
4321     debug_hooks->function_decl (current_function_decl);
4322   timevar_pop (TV_SYMOUT);
4323 
4324   /* Release the blocks that are linked to DECL_INITIAL() to free the memory.  */
4325   DECL_INITIAL (current_function_decl) = error_mark_node;
4326 
4327   if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4328       && targetm.have_ctors_dtors)
4329     targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4330 				 decl_init_priority_lookup
4331 				   (current_function_decl));
4332   if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4333       && targetm.have_ctors_dtors)
4334     targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4335 				decl_fini_priority_lookup
4336 				  (current_function_decl));
4337   return 0;
4338 }
4339 
4340 namespace {
4341 
4342 const pass_data pass_data_final =
4343 {
4344   RTL_PASS, /* type */
4345   "final", /* name */
4346   OPTGROUP_NONE, /* optinfo_flags */
4347   TV_FINAL, /* tv_id */
4348   0, /* properties_required */
4349   0, /* properties_provided */
4350   0, /* properties_destroyed */
4351   0, /* todo_flags_start */
4352   0, /* todo_flags_finish */
4353 };
4354 
4355 class pass_final : public rtl_opt_pass
4356 {
4357 public:
pass_final(gcc::context * ctxt)4358   pass_final (gcc::context *ctxt)
4359     : rtl_opt_pass (pass_data_final, ctxt)
4360   {}
4361 
4362   /* opt_pass methods: */
execute(function *)4363   virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4364 
4365 }; // class pass_final
4366 
4367 } // anon namespace
4368 
4369 rtl_opt_pass *
make_pass_final(gcc::context * ctxt)4370 make_pass_final (gcc::context *ctxt)
4371 {
4372   return new pass_final (ctxt);
4373 }
4374 
4375 
4376 static unsigned int
rest_of_handle_shorten_branches(void)4377 rest_of_handle_shorten_branches (void)
4378 {
4379   /* Shorten branches.  */
4380   shorten_branches (get_insns ());
4381   return 0;
4382 }
4383 
4384 namespace {
4385 
4386 const pass_data pass_data_shorten_branches =
4387 {
4388   RTL_PASS, /* type */
4389   "shorten", /* name */
4390   OPTGROUP_NONE, /* optinfo_flags */
4391   TV_SHORTEN_BRANCH, /* tv_id */
4392   0, /* properties_required */
4393   0, /* properties_provided */
4394   0, /* properties_destroyed */
4395   0, /* todo_flags_start */
4396   0, /* todo_flags_finish */
4397 };
4398 
4399 class pass_shorten_branches : public rtl_opt_pass
4400 {
4401 public:
pass_shorten_branches(gcc::context * ctxt)4402   pass_shorten_branches (gcc::context *ctxt)
4403     : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4404   {}
4405 
4406   /* opt_pass methods: */
execute(function *)4407   virtual unsigned int execute (function *)
4408     {
4409       return rest_of_handle_shorten_branches ();
4410     }
4411 
4412 }; // class pass_shorten_branches
4413 
4414 } // anon namespace
4415 
4416 rtl_opt_pass *
make_pass_shorten_branches(gcc::context * ctxt)4417 make_pass_shorten_branches (gcc::context *ctxt)
4418 {
4419   return new pass_shorten_branches (ctxt);
4420 }
4421 
4422 
4423 static unsigned int
rest_of_clean_state(void)4424 rest_of_clean_state (void)
4425 {
4426   rtx_insn *insn, *next;
4427   FILE *final_output = NULL;
4428   int save_unnumbered = flag_dump_unnumbered;
4429   int save_noaddr = flag_dump_noaddr;
4430 
4431   if (flag_dump_final_insns)
4432     {
4433       final_output = fopen (flag_dump_final_insns, "a");
4434       if (!final_output)
4435 	{
4436 	  error ("could not open final insn dump file %qs: %m",
4437 		 flag_dump_final_insns);
4438 	  flag_dump_final_insns = NULL;
4439 	}
4440       else
4441 	{
4442 	  flag_dump_noaddr = flag_dump_unnumbered = 1;
4443 	  if (flag_compare_debug_opt || flag_compare_debug)
4444 	    dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG;
4445 	  dump_function_header (final_output, current_function_decl,
4446 				dump_flags);
4447 	  final_insns_dump_p = true;
4448 
4449 	  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4450 	    if (LABEL_P (insn))
4451 	      INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4452 	    else
4453 	      {
4454 		if (NOTE_P (insn))
4455 		  set_block_for_insn (insn, NULL);
4456 		INSN_UID (insn) = 0;
4457 	      }
4458 	}
4459     }
4460 
4461   /* It is very important to decompose the RTL instruction chain here:
4462      debug information keeps pointing into CODE_LABEL insns inside the function
4463      body.  If these remain pointing to the other insns, we end up preserving
4464      whole RTL chain and attached detailed debug info in memory.  */
4465   for (insn = get_insns (); insn; insn = next)
4466     {
4467       next = NEXT_INSN (insn);
4468       SET_NEXT_INSN (insn) = NULL;
4469       SET_PREV_INSN (insn) = NULL;
4470 
4471       rtx_insn *call_insn = insn;
4472       if (NONJUMP_INSN_P (call_insn)
4473 	  && GET_CODE (PATTERN (call_insn)) == SEQUENCE)
4474 	{
4475 	  rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (call_insn));
4476 	  call_insn = seq->insn (0);
4477 	}
4478       if (CALL_P (call_insn))
4479 	{
4480 	  rtx note
4481 	    = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
4482 	  if (note)
4483 	    remove_note (call_insn, note);
4484 	}
4485 
4486       if (final_output
4487 	  && (!NOTE_P (insn)
4488 	      || (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4489 		  && NOTE_KIND (insn) != NOTE_INSN_BEGIN_STMT
4490 		  && NOTE_KIND (insn) != NOTE_INSN_INLINE_ENTRY
4491 		  && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4492 		  && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4493 		  && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4494 	print_rtl_single (final_output, insn);
4495     }
4496 
4497   if (final_output)
4498     {
4499       flag_dump_noaddr = save_noaddr;
4500       flag_dump_unnumbered = save_unnumbered;
4501       final_insns_dump_p = false;
4502 
4503       if (fclose (final_output))
4504 	{
4505 	  error ("could not close final insn dump file %qs: %m",
4506 		 flag_dump_final_insns);
4507 	  flag_dump_final_insns = NULL;
4508 	}
4509     }
4510 
4511   flag_rerun_cse_after_global_opts = 0;
4512   reload_completed = 0;
4513   epilogue_completed = 0;
4514 #ifdef STACK_REGS
4515   regstack_completed = 0;
4516 #endif
4517 
4518   /* Clear out the insn_length contents now that they are no
4519      longer valid.  */
4520   init_insn_lengths ();
4521 
4522   /* Show no temporary slots allocated.  */
4523   init_temp_slots ();
4524 
4525   free_bb_for_insn ();
4526 
4527   if (cfun->gimple_df)
4528     delete_tree_ssa (cfun);
4529 
4530   /* We can reduce stack alignment on call site only when we are sure that
4531      the function body just produced will be actually used in the final
4532      executable.  */
4533   if (flag_ipa_stack_alignment
4534       && decl_binds_to_current_def_p (current_function_decl))
4535     {
4536       unsigned int pref = crtl->preferred_stack_boundary;
4537       if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4538         pref = crtl->stack_alignment_needed;
4539       cgraph_node::rtl_info (current_function_decl)
4540 	->preferred_incoming_stack_boundary = pref;
4541     }
4542 
4543   /* Make sure volatile mem refs aren't considered valid operands for
4544      arithmetic insns.  We must call this here if this is a nested inline
4545      function, since the above code leaves us in the init_recog state,
4546      and the function context push/pop code does not save/restore volatile_ok.
4547 
4548      ??? Maybe it isn't necessary for expand_start_function to call this
4549      anymore if we do it here?  */
4550 
4551   init_recog_no_volatile ();
4552 
4553   /* We're done with this function.  Free up memory if we can.  */
4554   free_after_parsing (cfun);
4555   free_after_compilation (cfun);
4556   return 0;
4557 }
4558 
4559 namespace {
4560 
4561 const pass_data pass_data_clean_state =
4562 {
4563   RTL_PASS, /* type */
4564   "*clean_state", /* name */
4565   OPTGROUP_NONE, /* optinfo_flags */
4566   TV_FINAL, /* tv_id */
4567   0, /* properties_required */
4568   0, /* properties_provided */
4569   PROP_rtl, /* properties_destroyed */
4570   0, /* todo_flags_start */
4571   0, /* todo_flags_finish */
4572 };
4573 
4574 class pass_clean_state : public rtl_opt_pass
4575 {
4576 public:
pass_clean_state(gcc::context * ctxt)4577   pass_clean_state (gcc::context *ctxt)
4578     : rtl_opt_pass (pass_data_clean_state, ctxt)
4579   {}
4580 
4581   /* opt_pass methods: */
execute(function *)4582   virtual unsigned int execute (function *)
4583     {
4584       return rest_of_clean_state ();
4585     }
4586 
4587 }; // class pass_clean_state
4588 
4589 } // anon namespace
4590 
4591 rtl_opt_pass *
make_pass_clean_state(gcc::context * ctxt)4592 make_pass_clean_state (gcc::context *ctxt)
4593 {
4594   return new pass_clean_state (ctxt);
4595 }
4596 
4597 /* Return true if INSN is a call to the current function.  */
4598 
4599 static bool
self_recursive_call_p(rtx_insn * insn)4600 self_recursive_call_p (rtx_insn *insn)
4601 {
4602   tree fndecl = get_call_fndecl (insn);
4603   return (fndecl == current_function_decl
4604 	  && decl_binds_to_current_def_p (fndecl));
4605 }
4606 
4607 /* Collect hard register usage for the current function.  */
4608 
4609 static void
collect_fn_hard_reg_usage(void)4610 collect_fn_hard_reg_usage (void)
4611 {
4612   rtx_insn *insn;
4613 #ifdef STACK_REGS
4614   int i;
4615 #endif
4616   struct cgraph_rtl_info *node;
4617   HARD_REG_SET function_used_regs;
4618 
4619   /* ??? To be removed when all the ports have been fixed.  */
4620   if (!targetm.call_fusage_contains_non_callee_clobbers)
4621     return;
4622 
4623   /* Be conservative - mark fixed and global registers as used.  */
4624   function_used_regs = fixed_reg_set;
4625 
4626 #ifdef STACK_REGS
4627   /* Handle STACK_REGS conservatively, since the df-framework does not
4628      provide accurate information for them.  */
4629 
4630   for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4631     SET_HARD_REG_BIT (function_used_regs, i);
4632 #endif
4633 
4634   for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4635     {
4636       HARD_REG_SET insn_used_regs;
4637 
4638       if (!NONDEBUG_INSN_P (insn))
4639 	continue;
4640 
4641       if (CALL_P (insn)
4642 	  && !self_recursive_call_p (insn))
4643 	function_used_regs
4644 	  |= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4645 
4646       find_all_hard_reg_sets (insn, &insn_used_regs, false);
4647       function_used_regs |= insn_used_regs;
4648 
4649       if (hard_reg_set_subset_p (crtl->abi->full_and_partial_reg_clobbers (),
4650 				 function_used_regs))
4651 	return;
4652     }
4653 
4654   /* Mask out fully-saved registers, so that they don't affect equality
4655      comparisons between function_abis.  */
4656   function_used_regs &= crtl->abi->full_and_partial_reg_clobbers ();
4657 
4658   node = cgraph_node::rtl_info (current_function_decl);
4659   gcc_assert (node != NULL);
4660 
4661   node->function_used_regs = function_used_regs;
4662 }
4663