11debfc3dSmrg /* Shared code for before and after reload gcse implementations.
2*8feb0f0bSmrg Copyright (C) 1997-2020 Free Software Foundation, Inc.
31debfc3dSmrg
41debfc3dSmrg This file is part of GCC.
51debfc3dSmrg
61debfc3dSmrg GCC is free software; you can redistribute it and/or modify it under
71debfc3dSmrg the terms of the GNU General Public License as published by the Free
81debfc3dSmrg Software Foundation; either version 3, or (at your option) any later
91debfc3dSmrg version.
101debfc3dSmrg
111debfc3dSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
121debfc3dSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
131debfc3dSmrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
141debfc3dSmrg for more details.
151debfc3dSmrg
161debfc3dSmrg You should have received a copy of the GNU General Public License
171debfc3dSmrg along with GCC; see the file COPYING3. If not see
181debfc3dSmrg <http://www.gnu.org/licenses/>.
191debfc3dSmrg
201debfc3dSmrg It is expected that more hunks of gcse.c and postreload-gcse.c should
211debfc3dSmrg migrate into this file. */
221debfc3dSmrg
231debfc3dSmrg #include "config.h"
241debfc3dSmrg #include "system.h"
251debfc3dSmrg #include "coretypes.h"
261debfc3dSmrg #include "backend.h"
271debfc3dSmrg #include "rtl.h"
281debfc3dSmrg #include "df.h"
291debfc3dSmrg #include "gcse-common.h"
301debfc3dSmrg
311debfc3dSmrg
321debfc3dSmrg /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
331debfc3dSmrg Note we store a pair of elements in the list, so they have to be
341debfc3dSmrg taken off pairwise. */
351debfc3dSmrg
361debfc3dSmrg void
canon_list_insert(rtx dest,const_rtx x ATTRIBUTE_UNUSED,void * data)371debfc3dSmrg canon_list_insert (rtx dest, const_rtx x ATTRIBUTE_UNUSED, void *data)
381debfc3dSmrg {
391debfc3dSmrg rtx dest_addr;
401debfc3dSmrg int bb;
411debfc3dSmrg modify_pair pair;
421debfc3dSmrg
431debfc3dSmrg while (GET_CODE (dest) == SUBREG
441debfc3dSmrg || GET_CODE (dest) == ZERO_EXTRACT
451debfc3dSmrg || GET_CODE (dest) == STRICT_LOW_PART)
461debfc3dSmrg dest = XEXP (dest, 0);
471debfc3dSmrg
481debfc3dSmrg /* If DEST is not a MEM, then it will not conflict with a load. Note
491debfc3dSmrg that function calls are assumed to clobber memory, but are handled
501debfc3dSmrg elsewhere. */
511debfc3dSmrg
521debfc3dSmrg if (! MEM_P (dest))
531debfc3dSmrg return;
541debfc3dSmrg
551debfc3dSmrg dest_addr = get_addr (XEXP (dest, 0));
561debfc3dSmrg dest_addr = canon_rtx (dest_addr);
571debfc3dSmrg rtx_insn *insn = ((struct gcse_note_stores_info *)data)->insn;
581debfc3dSmrg bb = BLOCK_FOR_INSN (insn)->index;
591debfc3dSmrg
601debfc3dSmrg pair.dest = dest;
611debfc3dSmrg pair.dest_addr = dest_addr;
621debfc3dSmrg vec<modify_pair> *canon_mem_list
631debfc3dSmrg = ((struct gcse_note_stores_info *)data)->canon_mem_list;
641debfc3dSmrg canon_mem_list[bb].safe_push (pair);
651debfc3dSmrg }
661debfc3dSmrg
671debfc3dSmrg /* Record memory modification information for INSN. We do not actually care
681debfc3dSmrg about the memory location(s) that are set, or even how they are set (consider
691debfc3dSmrg a CALL_INSN). We merely need to record which insns modify memory. */
701debfc3dSmrg
711debfc3dSmrg void
record_last_mem_set_info_common(rtx_insn * insn,vec<rtx_insn * > * modify_mem_list,vec<modify_pair> * canon_modify_mem_list,bitmap modify_mem_list_set,bitmap blocks_with_calls)721debfc3dSmrg record_last_mem_set_info_common (rtx_insn *insn,
731debfc3dSmrg vec<rtx_insn *> *modify_mem_list,
741debfc3dSmrg vec<modify_pair> *canon_modify_mem_list,
751debfc3dSmrg bitmap modify_mem_list_set,
761debfc3dSmrg bitmap blocks_with_calls)
771debfc3dSmrg
781debfc3dSmrg {
791debfc3dSmrg int bb;
801debfc3dSmrg
811debfc3dSmrg bb = BLOCK_FOR_INSN (insn)->index;
821debfc3dSmrg modify_mem_list[bb].safe_push (insn);
831debfc3dSmrg bitmap_set_bit (modify_mem_list_set, bb);
841debfc3dSmrg
851debfc3dSmrg if (CALL_P (insn))
861debfc3dSmrg bitmap_set_bit (blocks_with_calls, bb);
871debfc3dSmrg else
881debfc3dSmrg {
891debfc3dSmrg struct gcse_note_stores_info data;
901debfc3dSmrg data.insn = insn;
911debfc3dSmrg data.canon_mem_list = canon_modify_mem_list;
92*8feb0f0bSmrg note_stores (insn, canon_list_insert, (void*) &data);
931debfc3dSmrg }
941debfc3dSmrg }
951debfc3dSmrg
961debfc3dSmrg
971debfc3dSmrg /* For each block, compute whether X is transparent. X is either an
981debfc3dSmrg expression or an assignment [though we don't care which, for this context
991debfc3dSmrg an assignment is treated as an expression]. For each block where an
1001debfc3dSmrg element of X is modified, reset the INDX bit in BMAP.
1011debfc3dSmrg
1021debfc3dSmrg BLOCKS_WITH_CALLS indicates which blocks contain CALL_INSNs which kill
1031debfc3dSmrg memory.
1041debfc3dSmrg
1051debfc3dSmrg MODIFY_MEM_LIST_SET indicates which blocks have memory stores which might
1061debfc3dSmrg kill a particular memory location.
1071debfc3dSmrg
1081debfc3dSmrg CANON_MODIFY_MEM_LIST is the canonicalized list of memory locations modified
1091debfc3dSmrg for each block. */
1101debfc3dSmrg
1111debfc3dSmrg void
compute_transp(const_rtx x,int indx,sbitmap * bmap,bitmap blocks_with_calls,bitmap modify_mem_list_set,vec<modify_pair> * canon_modify_mem_list)1121debfc3dSmrg compute_transp (const_rtx x, int indx, sbitmap *bmap,
1131debfc3dSmrg bitmap blocks_with_calls,
1141debfc3dSmrg bitmap modify_mem_list_set,
1151debfc3dSmrg vec<modify_pair> *canon_modify_mem_list)
1161debfc3dSmrg {
1171debfc3dSmrg int i, j;
1181debfc3dSmrg enum rtx_code code;
1191debfc3dSmrg const char *fmt;
1201debfc3dSmrg
1211debfc3dSmrg /* repeat is used to turn tail-recursion into iteration since GCC
1221debfc3dSmrg can't do it when there's no return value. */
1231debfc3dSmrg repeat:
1241debfc3dSmrg
1251debfc3dSmrg if (x == 0)
1261debfc3dSmrg return;
1271debfc3dSmrg
1281debfc3dSmrg code = GET_CODE (x);
1291debfc3dSmrg switch (code)
1301debfc3dSmrg {
1311debfc3dSmrg case REG:
1321debfc3dSmrg {
1331debfc3dSmrg df_ref def;
1341debfc3dSmrg for (def = DF_REG_DEF_CHAIN (REGNO (x));
1351debfc3dSmrg def;
1361debfc3dSmrg def = DF_REF_NEXT_REG (def))
1371debfc3dSmrg bitmap_clear_bit (bmap[DF_REF_BB (def)->index], indx);
1381debfc3dSmrg }
1391debfc3dSmrg
1401debfc3dSmrg return;
1411debfc3dSmrg
1421debfc3dSmrg case MEM:
1431debfc3dSmrg if (! MEM_READONLY_P (x))
1441debfc3dSmrg {
1451debfc3dSmrg bitmap_iterator bi;
1461debfc3dSmrg unsigned bb_index;
1471debfc3dSmrg rtx x_addr;
1481debfc3dSmrg
1491debfc3dSmrg x_addr = get_addr (XEXP (x, 0));
1501debfc3dSmrg x_addr = canon_rtx (x_addr);
1511debfc3dSmrg
1521debfc3dSmrg /* First handle all the blocks with calls. We don't need to
1531debfc3dSmrg do any list walking for them. */
1541debfc3dSmrg EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
1551debfc3dSmrg {
1561debfc3dSmrg bitmap_clear_bit (bmap[bb_index], indx);
1571debfc3dSmrg }
1581debfc3dSmrg
1591debfc3dSmrg /* Now iterate over the blocks which have memory modifications
1601debfc3dSmrg but which do not have any calls. */
1611debfc3dSmrg EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
1621debfc3dSmrg blocks_with_calls,
1631debfc3dSmrg 0, bb_index, bi)
1641debfc3dSmrg {
1651debfc3dSmrg vec<modify_pair> list
1661debfc3dSmrg = canon_modify_mem_list[bb_index];
1671debfc3dSmrg modify_pair *pair;
1681debfc3dSmrg unsigned ix;
1691debfc3dSmrg
1701debfc3dSmrg FOR_EACH_VEC_ELT_REVERSE (list, ix, pair)
1711debfc3dSmrg {
1721debfc3dSmrg rtx dest = pair->dest;
1731debfc3dSmrg rtx dest_addr = pair->dest_addr;
1741debfc3dSmrg
1751debfc3dSmrg if (canon_true_dependence (dest, GET_MODE (dest),
1761debfc3dSmrg dest_addr, x, x_addr))
1771debfc3dSmrg {
1781debfc3dSmrg bitmap_clear_bit (bmap[bb_index], indx);
1791debfc3dSmrg break;
1801debfc3dSmrg }
1811debfc3dSmrg }
1821debfc3dSmrg }
1831debfc3dSmrg }
1841debfc3dSmrg
1851debfc3dSmrg x = XEXP (x, 0);
1861debfc3dSmrg goto repeat;
1871debfc3dSmrg
1881debfc3dSmrg case PC:
1891debfc3dSmrg case CC0: /*FIXME*/
1901debfc3dSmrg case CONST:
1911debfc3dSmrg CASE_CONST_ANY:
1921debfc3dSmrg case SYMBOL_REF:
1931debfc3dSmrg case LABEL_REF:
1941debfc3dSmrg case ADDR_VEC:
1951debfc3dSmrg case ADDR_DIFF_VEC:
1961debfc3dSmrg return;
1971debfc3dSmrg
1981debfc3dSmrg default:
1991debfc3dSmrg break;
2001debfc3dSmrg }
2011debfc3dSmrg
2021debfc3dSmrg for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2031debfc3dSmrg {
2041debfc3dSmrg if (fmt[i] == 'e')
2051debfc3dSmrg {
2061debfc3dSmrg /* If we are about to do the last recursive call
2071debfc3dSmrg needed at this level, change it into iteration.
2081debfc3dSmrg This function is called enough to be worth it. */
2091debfc3dSmrg if (i == 0)
2101debfc3dSmrg {
2111debfc3dSmrg x = XEXP (x, i);
2121debfc3dSmrg goto repeat;
2131debfc3dSmrg }
2141debfc3dSmrg
2151debfc3dSmrg compute_transp (XEXP (x, i), indx, bmap, blocks_with_calls,
2161debfc3dSmrg modify_mem_list_set, canon_modify_mem_list);
2171debfc3dSmrg }
2181debfc3dSmrg else if (fmt[i] == 'E')
2191debfc3dSmrg for (j = 0; j < XVECLEN (x, i); j++)
2201debfc3dSmrg compute_transp (XVECEXP (x, i, j), indx, bmap, blocks_with_calls,
2211debfc3dSmrg modify_mem_list_set, canon_modify_mem_list);
2221debfc3dSmrg }
2231debfc3dSmrg }
224