Fix incorrect optimization by cprop_hardreg.
[gcc.git] / gcc / regcprop.c
1 /* Copy propagation on hard registers for the GNU compiler.
2 Copyright (C) 2000-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "regs.h"
30 #include "emit-rtl.h"
31 #include "recog.h"
32 #include "diagnostic-core.h"
33 #include "addresses.h"
34 #include "tree-pass.h"
35 #include "rtl-iter.h"
36 #include "cfgrtl.h"
37 #include "target.h"
38 #include "function-abi.h"
39
40 /* The following code does forward propagation of hard register copies.
41 The object is to eliminate as many dependencies as possible, so that
42 we have the most scheduling freedom. As a side effect, we also clean
43 up some silly register allocation decisions made by reload. This
44 code may be obsoleted by a new register allocator. */
45
46 /* DEBUG_INSNs aren't changed right away, as doing so might extend the
47 lifetime of a register and get the DEBUG_INSN subsequently reset.
48 So they are queued instead, and updated only when the register is
49 used in some subsequent real insn before it is set. */
50 struct queued_debug_insn_change
51 {
52 struct queued_debug_insn_change *next;
53 rtx_insn *insn;
54 rtx *loc;
55 rtx new_rtx;
56 };
57
58 /* For each register, we have a list of registers that contain the same
59 value. The OLDEST_REGNO field points to the head of the list, and
60 the NEXT_REGNO field runs through the list. The MODE field indicates
61 what mode the data is known to be in; this field is VOIDmode when the
62 register is not known to contain valid data. */
63
64 struct value_data_entry
65 {
66 machine_mode mode;
67 unsigned int oldest_regno;
68 unsigned int next_regno;
69 struct queued_debug_insn_change *debug_insn_changes;
70 };
71
72 struct value_data
73 {
74 struct value_data_entry e[FIRST_PSEUDO_REGISTER];
75 unsigned int max_value_regs;
76 unsigned int n_debug_insn_changes;
77 };
78
79 static object_allocator<queued_debug_insn_change> queued_debug_insn_change_pool
80 ("debug insn changes pool");
81
82 static bool skip_debug_insn_p;
83
84 static void kill_value_one_regno (unsigned, struct value_data *);
85 static void kill_value_regno (unsigned, unsigned, struct value_data *);
86 static void kill_value (const_rtx, struct value_data *);
87 static void set_value_regno (unsigned, machine_mode, struct value_data *);
88 static void init_value_data (struct value_data *);
89 static void kill_clobbered_value (rtx, const_rtx, void *);
90 static void kill_set_value (rtx, const_rtx, void *);
91 static void copy_value (rtx, rtx, struct value_data *);
92 static bool mode_change_ok (machine_mode, machine_mode,
93 unsigned int);
94 static rtx maybe_mode_change (machine_mode, machine_mode,
95 machine_mode, unsigned int, unsigned int);
96 static rtx find_oldest_value_reg (enum reg_class, rtx, struct value_data *);
97 static bool replace_oldest_value_reg (rtx *, enum reg_class, rtx_insn *,
98 struct value_data *);
99 static bool replace_oldest_value_addr (rtx *, enum reg_class,
100 machine_mode, addr_space_t,
101 rtx_insn *, struct value_data *);
102 static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
103 static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
104 extern void debug_value_data (struct value_data *);
105 static void validate_value_data (struct value_data *);
106
107 /* Free all queued updates for DEBUG_INSNs that change some reg to
108 register REGNO. */
109
110 static void
111 free_debug_insn_changes (struct value_data *vd, unsigned int regno)
112 {
113 struct queued_debug_insn_change *cur, *next;
114 for (cur = vd->e[regno].debug_insn_changes; cur; cur = next)
115 {
116 next = cur->next;
117 --vd->n_debug_insn_changes;
118 queued_debug_insn_change_pool.remove (cur);
119 }
120 vd->e[regno].debug_insn_changes = NULL;
121 }
122
123 /* Kill register REGNO. This involves removing it from any value
124 lists, and resetting the value mode to VOIDmode. This is only a
125 helper function; it does not handle any hard registers overlapping
126 with REGNO. */
127
128 static void
129 kill_value_one_regno (unsigned int regno, struct value_data *vd)
130 {
131 unsigned int i, next;
132
133 if (vd->e[regno].oldest_regno != regno)
134 {
135 for (i = vd->e[regno].oldest_regno;
136 vd->e[i].next_regno != regno;
137 i = vd->e[i].next_regno)
138 continue;
139 vd->e[i].next_regno = vd->e[regno].next_regno;
140 }
141 else if ((next = vd->e[regno].next_regno) != INVALID_REGNUM)
142 {
143 for (i = next; i != INVALID_REGNUM; i = vd->e[i].next_regno)
144 vd->e[i].oldest_regno = next;
145 }
146
147 vd->e[regno].mode = VOIDmode;
148 vd->e[regno].oldest_regno = regno;
149 vd->e[regno].next_regno = INVALID_REGNUM;
150 if (vd->e[regno].debug_insn_changes)
151 free_debug_insn_changes (vd, regno);
152
153 if (flag_checking)
154 validate_value_data (vd);
155 }
156
157 /* Kill the value in register REGNO for NREGS, and any other registers
158 whose values overlap. */
159
160 static void
161 kill_value_regno (unsigned int regno, unsigned int nregs,
162 struct value_data *vd)
163 {
164 unsigned int j;
165
166 /* Kill the value we're told to kill. */
167 for (j = 0; j < nregs; ++j)
168 kill_value_one_regno (regno + j, vd);
169
170 /* Kill everything that overlapped what we're told to kill. */
171 if (regno < vd->max_value_regs)
172 j = 0;
173 else
174 j = regno - vd->max_value_regs;
175 for (; j < regno; ++j)
176 {
177 unsigned int i, n;
178 if (vd->e[j].mode == VOIDmode)
179 continue;
180 n = hard_regno_nregs (j, vd->e[j].mode);
181 if (j + n > regno)
182 for (i = 0; i < n; ++i)
183 kill_value_one_regno (j + i, vd);
184 }
185 }
186
187 /* Kill X. This is a convenience function wrapping kill_value_regno
188 so that we mind the mode the register is in. */
189
190 static void
191 kill_value (const_rtx x, struct value_data *vd)
192 {
193 if (GET_CODE (x) == SUBREG)
194 {
195 rtx tmp = simplify_subreg (GET_MODE (x), SUBREG_REG (x),
196 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
197 x = tmp ? tmp : SUBREG_REG (x);
198 }
199 if (REG_P (x))
200 kill_value_regno (REGNO (x), REG_NREGS (x), vd);
201 }
202
203 /* Remember that REGNO is valid in MODE. */
204
205 static void
206 set_value_regno (unsigned int regno, machine_mode mode,
207 struct value_data *vd)
208 {
209 unsigned int nregs;
210
211 vd->e[regno].mode = mode;
212
213 nregs = hard_regno_nregs (regno, mode);
214 if (nregs > vd->max_value_regs)
215 vd->max_value_regs = nregs;
216 }
217
218 /* Initialize VD such that there are no known relationships between regs. */
219
220 static void
221 init_value_data (struct value_data *vd)
222 {
223 int i;
224 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
225 {
226 vd->e[i].mode = VOIDmode;
227 vd->e[i].oldest_regno = i;
228 vd->e[i].next_regno = INVALID_REGNUM;
229 vd->e[i].debug_insn_changes = NULL;
230 }
231 vd->max_value_regs = 0;
232 vd->n_debug_insn_changes = 0;
233 }
234
235 /* Called through note_stores. If X is clobbered, kill its value. */
236
237 static void
238 kill_clobbered_value (rtx x, const_rtx set, void *data)
239 {
240 struct value_data *const vd = (struct value_data *) data;
241
242 if (GET_CODE (set) == CLOBBER)
243 kill_value (x, vd);
244 }
245
246 /* A structure passed as data to kill_set_value through note_stores. */
247 struct kill_set_value_data
248 {
249 struct value_data *vd;
250 rtx ignore_set_reg;
251 };
252
253 /* Called through note_stores. If X is set, not clobbered, kill its
254 current value and install it as the root of its own value list. */
255
256 static void
257 kill_set_value (rtx x, const_rtx set, void *data)
258 {
259 struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
260 if (rtx_equal_p (x, ksvd->ignore_set_reg))
261 return;
262
263 if (GET_CODE (set) != CLOBBER)
264 {
265 kill_value (x, ksvd->vd);
266 if (REG_P (x))
267 set_value_regno (REGNO (x), GET_MODE (x), ksvd->vd);
268 }
269 }
270
271 /* Kill any register used in X as the base of an auto-increment expression,
272 and install that register as the root of its own value list. */
273
274 static void
275 kill_autoinc_value (rtx_insn *insn, struct value_data *vd)
276 {
277 subrtx_iterator::array_type array;
278 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
279 {
280 const_rtx x = *iter;
281 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
282 {
283 x = XEXP (x, 0);
284 kill_value (x, vd);
285 set_value_regno (REGNO (x), GET_MODE (x), vd);
286 iter.skip_subrtxes ();
287 }
288 }
289 }
290
291 /* Assert that SRC has been copied to DEST. Adjust the data structures
292 to reflect that SRC contains an older copy of the shared value. */
293
294 static void
295 copy_value (rtx dest, rtx src, struct value_data *vd)
296 {
297 unsigned int dr = REGNO (dest);
298 unsigned int sr = REGNO (src);
299 unsigned int dn, sn;
300 unsigned int i;
301
302 /* ??? At present, it's possible to see noop sets. It'd be nice if
303 this were cleaned up beforehand... */
304 if (sr == dr)
305 return;
306
307 /* Do not propagate copies to the stack pointer, as that can leave
308 memory accesses with no scheduling dependency on the stack update. */
309 if (dr == STACK_POINTER_REGNUM)
310 return;
311
312 /* Likewise with the frame pointer, if we're using one. */
313 if (frame_pointer_needed && dr == HARD_FRAME_POINTER_REGNUM)
314 return;
315
316 /* Do not propagate copies to fixed or global registers, patterns
317 can be relying to see particular fixed register or users can
318 expect the chosen global register in asm. */
319 if (fixed_regs[dr] || global_regs[dr])
320 return;
321
322 /* If SRC and DEST overlap, don't record anything. */
323 dn = REG_NREGS (dest);
324 sn = REG_NREGS (src);
325 if ((dr > sr && dr < sr + sn)
326 || (sr > dr && sr < dr + dn))
327 return;
328
329 /* If SRC had no assigned mode (i.e. we didn't know it was live)
330 assign it now and assume the value came from an input argument
331 or somesuch. */
332 if (vd->e[sr].mode == VOIDmode)
333 set_value_regno (sr, vd->e[dr].mode, vd);
334
335 /* If we are narrowing the input to a smaller number of hard regs,
336 and it is in big endian, we are really extracting a high part.
337 Since we generally associate a low part of a value with the value itself,
338 we must not do the same for the high part.
339 Note we can still get low parts for the same mode combination through
340 a two-step copy involving differently sized hard regs.
341 Assume hard regs fr* are 32 bits each, while r* are 64 bits each:
342 (set (reg:DI r0) (reg:DI fr0))
343 (set (reg:SI fr2) (reg:SI r0))
344 loads the low part of (reg:DI fr0) - i.e. fr1 - into fr2, while:
345 (set (reg:SI fr2) (reg:SI fr0))
346 loads the high part of (reg:DI fr0) into fr2.
347
348 We can't properly represent the latter case in our tables, so don't
349 record anything then. */
350 else if (sn < hard_regno_nregs (sr, vd->e[sr].mode)
351 && maybe_ne (subreg_lowpart_offset (GET_MODE (dest),
352 vd->e[sr].mode), 0U))
353 return;
354
355 /* If SRC had been assigned a mode narrower than the copy, we can't
356 link DEST into the chain, because not all of the pieces of the
357 copy came from oldest_regno. */
358 else if (sn > hard_regno_nregs (sr, vd->e[sr].mode))
359 return;
360
361 /* It is not safe to link DEST into the chain if SRC was defined in some
362 narrower mode M and if M is also narrower than the mode of the first
363 register in the chain. For example:
364 (set (reg:DI r1) (reg:DI r0))
365 (set (reg:HI r2) (reg:HI r1))
366 (set (reg:SI r3) (reg:SI r2)) //Should be a new chain start at r3
367 (set (reg:SI r4) (reg:SI r1))
368 (set (reg:SI r5) (reg:SI r4))
369
370 the upper part of r3 is undefined. If we added it to the chain,
371 it may be used to replace r5, which has defined upper bits.
372 See PR98694 for details.
373
374 [A] partial_subreg_p (vd->e[sr].mode, GET_MODE (src))
375 [B] partial_subreg_p (vd->e[sr].mode, vd->e[vd->e[sr].oldest_regno].mode)
376 Condition B is added to to catch optimization opportunities of
377
378 (set (reg:HI R1) (reg:HI R0))
379 (set (reg:SI R2) (reg:SI R1)) // [A]
380 (set (reg:DI R3) (reg:DI R2)) // [A]
381 (set (reg:SI R4) (reg:SI R[0-3]))
382 (set (reg:HI R5) (reg:HI R[0-4]))
383
384 in which all registers have only 16 defined bits. */
385 else if (partial_subreg_p (vd->e[sr].mode, GET_MODE (src))
386 && partial_subreg_p (vd->e[sr].mode,
387 vd->e[vd->e[sr].oldest_regno].mode))
388 return;
389
390 /* Link DR at the end of the value chain used by SR. */
391
392 vd->e[dr].oldest_regno = vd->e[sr].oldest_regno;
393
394 for (i = sr; vd->e[i].next_regno != INVALID_REGNUM; i = vd->e[i].next_regno)
395 continue;
396 vd->e[i].next_regno = dr;
397
398 if (flag_checking)
399 validate_value_data (vd);
400 }
401
402 /* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
403
404 static bool
405 mode_change_ok (machine_mode orig_mode, machine_mode new_mode,
406 unsigned int regno ATTRIBUTE_UNUSED)
407 {
408 if (partial_subreg_p (orig_mode, new_mode))
409 return false;
410
411 return REG_CAN_CHANGE_MODE_P (regno, orig_mode, new_mode);
412 }
413
414 /* Register REGNO was originally set in ORIG_MODE. It - or a copy of it -
415 was copied in COPY_MODE to COPY_REGNO, and then COPY_REGNO was accessed
416 in NEW_MODE.
417 Return a NEW_MODE rtx for REGNO if that's OK, otherwise return NULL_RTX. */
418
419 static rtx
420 maybe_mode_change (machine_mode orig_mode, machine_mode copy_mode,
421 machine_mode new_mode, unsigned int regno,
422 unsigned int copy_regno ATTRIBUTE_UNUSED)
423 {
424 if (partial_subreg_p (copy_mode, orig_mode)
425 && partial_subreg_p (copy_mode, new_mode))
426 return NULL_RTX;
427
428 /* Avoid creating multiple copies of the stack pointer. Some ports
429 assume there is one and only one stack pointer.
430
431 It's unclear if we need to do the same for other special registers. */
432 if (regno == STACK_POINTER_REGNUM)
433 return NULL_RTX;
434
435 if (orig_mode == new_mode)
436 return gen_raw_REG (new_mode, regno);
437 else if (mode_change_ok (orig_mode, new_mode, regno))
438 {
439 int copy_nregs = hard_regno_nregs (copy_regno, copy_mode);
440 int use_nregs = hard_regno_nregs (copy_regno, new_mode);
441 poly_uint64 bytes_per_reg;
442 if (!can_div_trunc_p (GET_MODE_SIZE (copy_mode),
443 copy_nregs, &bytes_per_reg))
444 return NULL_RTX;
445 poly_uint64 copy_offset = bytes_per_reg * (copy_nregs - use_nregs);
446 poly_uint64 offset
447 = subreg_size_lowpart_offset (GET_MODE_SIZE (new_mode) + copy_offset,
448 GET_MODE_SIZE (orig_mode));
449 regno += subreg_regno_offset (regno, orig_mode, offset, new_mode);
450 if (targetm.hard_regno_mode_ok (regno, new_mode))
451 return gen_raw_REG (new_mode, regno);
452 }
453 return NULL_RTX;
454 }
455
456 /* Find the oldest copy of the value contained in REGNO that is in
457 register class CL and has mode MODE. If found, return an rtx
458 of that oldest register, otherwise return NULL. */
459
460 static rtx
461 find_oldest_value_reg (enum reg_class cl, rtx reg, struct value_data *vd)
462 {
463 unsigned int regno = REGNO (reg);
464 machine_mode mode = GET_MODE (reg);
465 unsigned int i;
466
467 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
468
469 /* If we are accessing REG in some mode other that what we set it in,
470 make sure that the replacement is valid. In particular, consider
471 (set (reg:DI r11) (...))
472 (set (reg:SI r9) (reg:SI r11))
473 (set (reg:SI r10) (...))
474 (set (...) (reg:DI r9))
475 Replacing r9 with r11 is invalid. */
476 if (mode != vd->e[regno].mode
477 && REG_NREGS (reg) > hard_regno_nregs (regno, vd->e[regno].mode))
478 return NULL_RTX;
479
480 for (i = vd->e[regno].oldest_regno; i != regno; i = vd->e[i].next_regno)
481 {
482 machine_mode oldmode = vd->e[i].mode;
483 rtx new_rtx;
484
485 if (!in_hard_reg_set_p (reg_class_contents[cl], mode, i))
486 continue;
487
488 new_rtx = maybe_mode_change (oldmode, vd->e[regno].mode, mode, i, regno);
489 if (new_rtx)
490 {
491 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (reg);
492 REG_ATTRS (new_rtx) = REG_ATTRS (reg);
493 REG_POINTER (new_rtx) = REG_POINTER (reg);
494 return new_rtx;
495 }
496 }
497
498 return NULL_RTX;
499 }
500
501 /* If possible, replace the register at *LOC with the oldest register
502 in register class CL. Return true if successfully replaced. */
503
504 static bool
505 replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx_insn *insn,
506 struct value_data *vd)
507 {
508 rtx new_rtx = find_oldest_value_reg (cl, *loc, vd);
509 if (new_rtx && (!DEBUG_INSN_P (insn) || !skip_debug_insn_p))
510 {
511 if (DEBUG_INSN_P (insn))
512 {
513 struct queued_debug_insn_change *change;
514
515 if (dump_file)
516 fprintf (dump_file, "debug_insn %u: queued replacing reg %u with %u\n",
517 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
518
519 change = queued_debug_insn_change_pool.allocate ();
520 change->next = vd->e[REGNO (new_rtx)].debug_insn_changes;
521 change->insn = insn;
522 change->loc = loc;
523 change->new_rtx = new_rtx;
524 vd->e[REGNO (new_rtx)].debug_insn_changes = change;
525 ++vd->n_debug_insn_changes;
526 return true;
527 }
528 if (dump_file)
529 fprintf (dump_file, "insn %u: replaced reg %u with %u\n",
530 INSN_UID (insn), REGNO (*loc), REGNO (new_rtx));
531
532 validate_change (insn, loc, new_rtx, 1);
533 return true;
534 }
535 return false;
536 }
537
538 /* Similar to replace_oldest_value_reg, but *LOC contains an address.
539 Adapted from find_reloads_address_1. CL is INDEX_REG_CLASS or
540 BASE_REG_CLASS depending on how the register is being considered. */
541
542 static bool
543 replace_oldest_value_addr (rtx *loc, enum reg_class cl,
544 machine_mode mode, addr_space_t as,
545 rtx_insn *insn, struct value_data *vd)
546 {
547 rtx x = *loc;
548 RTX_CODE code = GET_CODE (x);
549 const char *fmt;
550 int i, j;
551 bool changed = false;
552
553 switch (code)
554 {
555 case PLUS:
556 if (DEBUG_INSN_P (insn))
557 break;
558
559 {
560 rtx orig_op0 = XEXP (x, 0);
561 rtx orig_op1 = XEXP (x, 1);
562 RTX_CODE code0 = GET_CODE (orig_op0);
563 RTX_CODE code1 = GET_CODE (orig_op1);
564 rtx op0 = orig_op0;
565 rtx op1 = orig_op1;
566 rtx *locI = NULL;
567 rtx *locB = NULL;
568 enum rtx_code index_code = SCRATCH;
569
570 if (GET_CODE (op0) == SUBREG)
571 {
572 op0 = SUBREG_REG (op0);
573 code0 = GET_CODE (op0);
574 }
575
576 if (GET_CODE (op1) == SUBREG)
577 {
578 op1 = SUBREG_REG (op1);
579 code1 = GET_CODE (op1);
580 }
581
582 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
583 || code0 == ZERO_EXTEND || code1 == MEM)
584 {
585 locI = &XEXP (x, 0);
586 locB = &XEXP (x, 1);
587 index_code = GET_CODE (*locI);
588 }
589 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
590 || code1 == ZERO_EXTEND || code0 == MEM)
591 {
592 locI = &XEXP (x, 1);
593 locB = &XEXP (x, 0);
594 index_code = GET_CODE (*locI);
595 }
596 else if (code0 == CONST_INT || code0 == CONST
597 || code0 == SYMBOL_REF || code0 == LABEL_REF)
598 {
599 locB = &XEXP (x, 1);
600 index_code = GET_CODE (XEXP (x, 0));
601 }
602 else if (code1 == CONST_INT || code1 == CONST
603 || code1 == SYMBOL_REF || code1 == LABEL_REF)
604 {
605 locB = &XEXP (x, 0);
606 index_code = GET_CODE (XEXP (x, 1));
607 }
608 else if (code0 == REG && code1 == REG)
609 {
610 int index_op;
611 unsigned regno0 = REGNO (op0), regno1 = REGNO (op1);
612
613 if (REGNO_OK_FOR_INDEX_P (regno1)
614 && regno_ok_for_base_p (regno0, mode, as, PLUS, REG))
615 index_op = 1;
616 else if (REGNO_OK_FOR_INDEX_P (regno0)
617 && regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
618 index_op = 0;
619 else if (regno_ok_for_base_p (regno0, mode, as, PLUS, REG)
620 || REGNO_OK_FOR_INDEX_P (regno1))
621 index_op = 1;
622 else if (regno_ok_for_base_p (regno1, mode, as, PLUS, REG))
623 index_op = 0;
624 else
625 index_op = 1;
626
627 locI = &XEXP (x, index_op);
628 locB = &XEXP (x, !index_op);
629 index_code = GET_CODE (*locI);
630 }
631 else if (code0 == REG)
632 {
633 locI = &XEXP (x, 0);
634 locB = &XEXP (x, 1);
635 index_code = GET_CODE (*locI);
636 }
637 else if (code1 == REG)
638 {
639 locI = &XEXP (x, 1);
640 locB = &XEXP (x, 0);
641 index_code = GET_CODE (*locI);
642 }
643
644 if (locI)
645 changed |= replace_oldest_value_addr (locI, INDEX_REG_CLASS,
646 mode, as, insn, vd);
647 if (locB)
648 changed |= replace_oldest_value_addr (locB,
649 base_reg_class (mode, as, PLUS,
650 index_code),
651 mode, as, insn, vd);
652 return changed;
653 }
654
655 case POST_INC:
656 case POST_DEC:
657 case POST_MODIFY:
658 case PRE_INC:
659 case PRE_DEC:
660 case PRE_MODIFY:
661 return false;
662
663 case MEM:
664 return replace_oldest_value_mem (x, insn, vd);
665
666 case REG:
667 return replace_oldest_value_reg (loc, cl, insn, vd);
668
669 default:
670 break;
671 }
672
673 fmt = GET_RTX_FORMAT (code);
674 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
675 {
676 if (fmt[i] == 'e')
677 changed |= replace_oldest_value_addr (&XEXP (x, i), cl, mode, as,
678 insn, vd);
679 else if (fmt[i] == 'E')
680 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
681 changed |= replace_oldest_value_addr (&XVECEXP (x, i, j), cl,
682 mode, as, insn, vd);
683 }
684
685 return changed;
686 }
687
688 /* Similar to replace_oldest_value_reg, but X contains a memory. */
689
690 static bool
691 replace_oldest_value_mem (rtx x, rtx_insn *insn, struct value_data *vd)
692 {
693 enum reg_class cl;
694
695 if (DEBUG_INSN_P (insn))
696 cl = ALL_REGS;
697 else
698 cl = base_reg_class (GET_MODE (x), MEM_ADDR_SPACE (x), MEM, SCRATCH);
699
700 return replace_oldest_value_addr (&XEXP (x, 0), cl,
701 GET_MODE (x), MEM_ADDR_SPACE (x),
702 insn, vd);
703 }
704
705 /* Apply all queued updates for DEBUG_INSNs that change some reg to
706 register REGNO. */
707
708 static void
709 apply_debug_insn_changes (struct value_data *vd, unsigned int regno)
710 {
711 struct queued_debug_insn_change *change;
712 rtx_insn *last_insn = vd->e[regno].debug_insn_changes->insn;
713
714 for (change = vd->e[regno].debug_insn_changes;
715 change;
716 change = change->next)
717 {
718 if (last_insn != change->insn)
719 {
720 apply_change_group ();
721 last_insn = change->insn;
722 }
723 validate_change (change->insn, change->loc, change->new_rtx, 1);
724 }
725 apply_change_group ();
726 }
727
728 /* Called via note_uses, for all used registers in a real insn
729 apply DEBUG_INSN changes that change registers to the used
730 registers. */
731
732 static void
733 cprop_find_used_regs (rtx *loc, void *data)
734 {
735 struct value_data *const vd = (struct value_data *) data;
736 subrtx_iterator::array_type array;
737 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
738 {
739 const_rtx x = *iter;
740 if (REG_P (x))
741 {
742 unsigned int regno = REGNO (x);
743 if (vd->e[regno].debug_insn_changes)
744 {
745 apply_debug_insn_changes (vd, regno);
746 free_debug_insn_changes (vd, regno);
747 }
748 }
749 }
750 }
751
752 /* Apply clobbers of INSN in PATTERN and C_I_F_U to value_data VD. */
753
754 static void
755 kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
756 {
757 note_stores (insn, kill_clobbered_value, vd);
758 }
759
760 /* Perform the forward copy propagation on basic block BB. */
761
762 static bool
763 copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
764 {
765 bool anything_changed = false;
766 rtx_insn *insn, *next;
767
768 for (insn = BB_HEAD (bb); ; insn = next)
769 {
770 int n_ops, i, predicated;
771 bool is_asm, any_replacements;
772 rtx set;
773 rtx link;
774 bool changed = false;
775 struct kill_set_value_data ksvd;
776
777 next = NEXT_INSN (insn);
778 if (!NONDEBUG_INSN_P (insn))
779 {
780 if (DEBUG_BIND_INSN_P (insn))
781 {
782 rtx loc = INSN_VAR_LOCATION_LOC (insn);
783 if (!VAR_LOC_UNKNOWN_P (loc))
784 replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn),
785 ALL_REGS, GET_MODE (loc),
786 ADDR_SPACE_GENERIC, insn, vd);
787 }
788
789 if (insn == BB_END (bb))
790 break;
791 else
792 continue;
793 }
794
795 set = single_set (insn);
796
797 /* Detect noop sets and remove them before processing side effects. */
798 if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
799 {
800 unsigned int regno = REGNO (SET_SRC (set));
801 rtx r1 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
802 SET_DEST (set), vd);
803 rtx r2 = find_oldest_value_reg (REGNO_REG_CLASS (regno),
804 SET_SRC (set), vd);
805 if (rtx_equal_p (r1 ? r1 : SET_DEST (set), r2 ? r2 : SET_SRC (set)))
806 {
807 bool last = insn == BB_END (bb);
808 delete_insn (insn);
809 if (last)
810 break;
811 continue;
812 }
813 }
814
815 /* Detect obviously dead sets (via REG_UNUSED notes) and remove them. */
816 if (set
817 && !RTX_FRAME_RELATED_P (insn)
818 && !may_trap_p (set)
819 && find_reg_note (insn, REG_UNUSED, SET_DEST (set))
820 && !side_effects_p (SET_SRC (set))
821 && !side_effects_p (SET_DEST (set)))
822 {
823 bool last = insn == BB_END (bb);
824 delete_insn (insn);
825 if (last)
826 break;
827 continue;
828 }
829
830
831 extract_constrain_insn (insn);
832 preprocess_constraints (insn);
833 const operand_alternative *op_alt = which_op_alt ();
834 n_ops = recog_data.n_operands;
835 is_asm = asm_noperands (PATTERN (insn)) >= 0;
836
837 /* Simplify the code below by promoting OP_OUT to OP_INOUT
838 in predicated instructions. */
839
840 predicated = GET_CODE (PATTERN (insn)) == COND_EXEC;
841 for (i = 0; i < n_ops; ++i)
842 {
843 int matches = op_alt[i].matches;
844 if (matches >= 0 || op_alt[i].matched >= 0
845 || (predicated && recog_data.operand_type[i] == OP_OUT))
846 recog_data.operand_type[i] = OP_INOUT;
847 }
848
849 /* Apply changes to earlier DEBUG_INSNs if possible. */
850 if (vd->n_debug_insn_changes)
851 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
852
853 /* For each earlyclobber operand, zap the value data. */
854 for (i = 0; i < n_ops; i++)
855 if (op_alt[i].earlyclobber)
856 kill_value (recog_data.operand[i], vd);
857
858 /* Within asms, a clobber cannot overlap inputs or outputs.
859 I wouldn't think this were true for regular insns, but
860 scan_rtx treats them like that... */
861 kill_clobbered_values (insn, vd);
862
863 /* Kill all auto-incremented values. */
864 /* ??? REG_INC is useless, since stack pushes aren't done that way. */
865 kill_autoinc_value (insn, vd);
866
867 /* Kill all early-clobbered operands. */
868 for (i = 0; i < n_ops; i++)
869 if (op_alt[i].earlyclobber)
870 kill_value (recog_data.operand[i], vd);
871
872 /* If we have dead sets in the insn, then we need to note these as we
873 would clobbers. */
874 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
875 {
876 if (REG_NOTE_KIND (link) == REG_UNUSED)
877 {
878 kill_value (XEXP (link, 0), vd);
879 /* Furthermore, if the insn looked like a single-set,
880 but the dead store kills the source value of that
881 set, then we can no-longer use the plain move
882 special case below. */
883 if (set
884 && reg_overlap_mentioned_p (XEXP (link, 0), SET_SRC (set)))
885 set = NULL;
886 }
887
888 /* We need to keep CFI info correct, and the same on all paths,
889 so we cannot normally replace the registers REG_CFA_REGISTER
890 refers to. Bail. */
891 if (REG_NOTE_KIND (link) == REG_CFA_REGISTER)
892 goto did_replacement;
893 }
894
895 /* Special-case plain move instructions, since we may well
896 be able to do the move from a different register class. */
897 if (set && REG_P (SET_SRC (set)))
898 {
899 rtx src = SET_SRC (set);
900 unsigned int regno = REGNO (src);
901 machine_mode mode = GET_MODE (src);
902 unsigned int i;
903 rtx new_rtx;
904
905 /* If we are accessing SRC in some mode other that what we
906 set it in, make sure that the replacement is valid. */
907 if (mode != vd->e[regno].mode)
908 {
909 if (REG_NREGS (src)
910 > hard_regno_nregs (regno, vd->e[regno].mode))
911 goto no_move_special_case;
912
913 /* And likewise, if we are narrowing on big endian the transformation
914 is also invalid. */
915 if (REG_NREGS (src) < hard_regno_nregs (regno, vd->e[regno].mode)
916 && maybe_ne (subreg_lowpart_offset (mode,
917 vd->e[regno].mode), 0U))
918 goto no_move_special_case;
919 }
920
921 /* If the destination is also a register, try to find a source
922 register in the same class. */
923 if (REG_P (SET_DEST (set)))
924 {
925 new_rtx = find_oldest_value_reg (REGNO_REG_CLASS (regno),
926 src, vd);
927
928 if (new_rtx && validate_change (insn, &SET_SRC (set), new_rtx, 0))
929 {
930 if (dump_file)
931 fprintf (dump_file,
932 "insn %u: replaced reg %u with %u\n",
933 INSN_UID (insn), regno, REGNO (new_rtx));
934 changed = true;
935 goto did_replacement;
936 }
937 /* We need to re-extract as validate_change clobbers
938 recog_data. */
939 extract_constrain_insn (insn);
940 preprocess_constraints (insn);
941 }
942
943 /* Otherwise, try all valid registers and see if its valid. */
944 for (i = vd->e[regno].oldest_regno; i != regno;
945 i = vd->e[i].next_regno)
946 {
947 new_rtx = maybe_mode_change (vd->e[i].mode, vd->e[regno].mode,
948 mode, i, regno);
949 if (new_rtx != NULL_RTX)
950 {
951 if (validate_change (insn, &SET_SRC (set), new_rtx, 0))
952 {
953 ORIGINAL_REGNO (new_rtx) = ORIGINAL_REGNO (src);
954 REG_ATTRS (new_rtx) = REG_ATTRS (src);
955 REG_POINTER (new_rtx) = REG_POINTER (src);
956 if (dump_file)
957 fprintf (dump_file,
958 "insn %u: replaced reg %u with %u\n",
959 INSN_UID (insn), regno, REGNO (new_rtx));
960 changed = true;
961 goto did_replacement;
962 }
963 /* We need to re-extract as validate_change clobbers
964 recog_data. */
965 extract_constrain_insn (insn);
966 preprocess_constraints (insn);
967 }
968 }
969 }
970 no_move_special_case:
971
972 any_replacements = false;
973
974 /* For each input operand, replace a hard register with the
975 eldest live copy that's in an appropriate register class. */
976 for (i = 0; i < n_ops; i++)
977 {
978 bool replaced = false;
979
980 /* Don't scan match_operand here, since we've no reg class
981 information to pass down. Any operands that we could
982 substitute in will be represented elsewhere. */
983 if (recog_data.constraints[i][0] == '\0')
984 continue;
985
986 /* Don't replace in asms intentionally referencing hard regs. */
987 if (is_asm && REG_P (recog_data.operand[i])
988 && (REGNO (recog_data.operand[i])
989 == ORIGINAL_REGNO (recog_data.operand[i])))
990 continue;
991
992 if (recog_data.operand_type[i] == OP_IN)
993 {
994 if (op_alt[i].is_address)
995 replaced
996 = replace_oldest_value_addr (recog_data.operand_loc[i],
997 alternative_class (op_alt, i),
998 VOIDmode, ADDR_SPACE_GENERIC,
999 insn, vd);
1000 else if (REG_P (recog_data.operand[i]))
1001 replaced
1002 = replace_oldest_value_reg (recog_data.operand_loc[i],
1003 alternative_class (op_alt, i),
1004 insn, vd);
1005 else if (MEM_P (recog_data.operand[i]))
1006 replaced = replace_oldest_value_mem (recog_data.operand[i],
1007 insn, vd);
1008 }
1009 else if (MEM_P (recog_data.operand[i]))
1010 replaced = replace_oldest_value_mem (recog_data.operand[i],
1011 insn, vd);
1012
1013 /* If we performed any replacement, update match_dups. */
1014 if (replaced)
1015 {
1016 int j;
1017 rtx new_rtx;
1018
1019 new_rtx = *recog_data.operand_loc[i];
1020 recog_data.operand[i] = new_rtx;
1021 for (j = 0; j < recog_data.n_dups; j++)
1022 if (recog_data.dup_num[j] == i)
1023 validate_unshare_change (insn, recog_data.dup_loc[j], new_rtx, 1);
1024
1025 any_replacements = true;
1026 }
1027 }
1028
1029 if (any_replacements)
1030 {
1031 if (! apply_change_group ())
1032 {
1033 if (dump_file)
1034 fprintf (dump_file,
1035 "insn %u: reg replacements not verified\n",
1036 INSN_UID (insn));
1037 }
1038 else
1039 changed = true;
1040 }
1041
1042 did_replacement:
1043 if (changed)
1044 {
1045 anything_changed = true;
1046
1047 /* If something changed, perhaps further changes to earlier
1048 DEBUG_INSNs can be applied. */
1049 if (vd->n_debug_insn_changes)
1050 note_uses (&PATTERN (insn), cprop_find_used_regs, vd);
1051 df_insn_rescan (insn);
1052 }
1053
1054 ksvd.vd = vd;
1055 ksvd.ignore_set_reg = NULL_RTX;
1056
1057 /* Clobber call-clobbered registers. */
1058 if (CALL_P (insn))
1059 {
1060 unsigned int set_regno = INVALID_REGNUM;
1061 unsigned int set_nregs = 0;
1062 unsigned int regno;
1063 rtx exp;
1064
1065 for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
1066 {
1067 rtx x = XEXP (exp, 0);
1068 if (GET_CODE (x) == SET)
1069 {
1070 rtx dest = SET_DEST (x);
1071 kill_value (dest, vd);
1072 set_value_regno (REGNO (dest), GET_MODE (dest), vd);
1073 copy_value (dest, SET_SRC (x), vd);
1074 ksvd.ignore_set_reg = dest;
1075 set_regno = REGNO (dest);
1076 set_nregs = REG_NREGS (dest);
1077 break;
1078 }
1079 }
1080
1081 function_abi callee_abi = insn_callee_abi (insn);
1082 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1083 if (vd->e[regno].mode != VOIDmode
1084 && callee_abi.clobbers_reg_p (vd->e[regno].mode, regno)
1085 && (regno < set_regno || regno >= set_regno + set_nregs))
1086 kill_value_regno (regno, 1, vd);
1087
1088 /* If SET was seen in CALL_INSN_FUNCTION_USAGE, and SET_SRC
1089 of the SET isn't clobbered by CALLEE_ABI, but instead among
1090 CLOBBERs on the CALL_INSN, we could wrongly assume the
1091 value in it is still live. */
1092 if (ksvd.ignore_set_reg)
1093 kill_clobbered_values (insn, vd);
1094 }
1095
1096 bool copy_p = (set
1097 && REG_P (SET_DEST (set))
1098 && REG_P (SET_SRC (set)));
1099 bool noop_p = (copy_p
1100 && rtx_equal_p (SET_DEST (set), SET_SRC (set)));
1101
1102 /* If a noop move is using narrower mode than we have recorded,
1103 we need to either remove the noop move, or kill_set_value. */
1104 if (noop_p
1105 && partial_subreg_p (GET_MODE (SET_DEST (set)),
1106 vd->e[REGNO (SET_DEST (set))].mode))
1107 {
1108 if (noop_move_p (insn))
1109 {
1110 bool last = insn == BB_END (bb);
1111 delete_insn (insn);
1112 if (last)
1113 break;
1114 }
1115 else
1116 noop_p = false;
1117 }
1118
1119 if (!noop_p)
1120 {
1121 /* Notice stores. */
1122 note_stores (insn, kill_set_value, &ksvd);
1123
1124 /* Notice copies. */
1125 if (copy_p)
1126 {
1127 df_insn_rescan (insn);
1128 copy_value (SET_DEST (set), SET_SRC (set), vd);
1129 }
1130 }
1131
1132 if (insn == BB_END (bb))
1133 break;
1134 }
1135
1136 return anything_changed;
1137 }
1138
1139 /* Dump the value chain data to stderr. */
1140
1141 DEBUG_FUNCTION void
1142 debug_value_data (struct value_data *vd)
1143 {
1144 HARD_REG_SET set;
1145 unsigned int i, j;
1146
1147 CLEAR_HARD_REG_SET (set);
1148
1149 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1150 if (vd->e[i].oldest_regno == i)
1151 {
1152 if (vd->e[i].mode == VOIDmode)
1153 {
1154 if (vd->e[i].next_regno != INVALID_REGNUM)
1155 fprintf (stderr, "[%u] Bad next_regno for empty chain (%u)\n",
1156 i, vd->e[i].next_regno);
1157 continue;
1158 }
1159
1160 SET_HARD_REG_BIT (set, i);
1161 fprintf (stderr, "[%u %s] ", i, GET_MODE_NAME (vd->e[i].mode));
1162
1163 for (j = vd->e[i].next_regno;
1164 j != INVALID_REGNUM;
1165 j = vd->e[j].next_regno)
1166 {
1167 if (TEST_HARD_REG_BIT (set, j))
1168 {
1169 fprintf (stderr, "[%u] Loop in regno chain\n", j);
1170 return;
1171 }
1172
1173 if (vd->e[j].oldest_regno != i)
1174 {
1175 fprintf (stderr, "[%u] Bad oldest_regno (%u)\n",
1176 j, vd->e[j].oldest_regno);
1177 return;
1178 }
1179 SET_HARD_REG_BIT (set, j);
1180 fprintf (stderr, "[%u %s] ", j, GET_MODE_NAME (vd->e[j].mode));
1181 }
1182 fputc ('\n', stderr);
1183 }
1184
1185 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1186 if (! TEST_HARD_REG_BIT (set, i)
1187 && (vd->e[i].mode != VOIDmode
1188 || vd->e[i].oldest_regno != i
1189 || vd->e[i].next_regno != INVALID_REGNUM))
1190 fprintf (stderr, "[%u] Non-empty reg in chain (%s %u %i)\n",
1191 i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1192 vd->e[i].next_regno);
1193 }
1194
1195 /* Do copyprop_hardreg_forward_1 for a single basic block BB.
1196 DEBUG_INSN is skipped since we do not want to involve DF related
1197 staff as how it is handled in function pass_cprop_hardreg::execute.
1198
1199 NOTE: Currently it is only used for shrink-wrap. Maybe extend it
1200 to handle DEBUG_INSN for other uses. */
1201
1202 void
1203 copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
1204 {
1205 struct value_data *vd;
1206 vd = XNEWVEC (struct value_data, 1);
1207 init_value_data (vd);
1208
1209 skip_debug_insn_p = true;
1210 copyprop_hardreg_forward_1 (bb, vd);
1211 free (vd);
1212 skip_debug_insn_p = false;
1213 }
1214
1215 static void
1216 validate_value_data (struct value_data *vd)
1217 {
1218 HARD_REG_SET set;
1219 unsigned int i, j;
1220
1221 CLEAR_HARD_REG_SET (set);
1222
1223 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1224 if (vd->e[i].oldest_regno == i)
1225 {
1226 if (vd->e[i].mode == VOIDmode)
1227 {
1228 if (vd->e[i].next_regno != INVALID_REGNUM)
1229 internal_error ("%qs: [%u] bad %<next_regno%> for empty chain (%u)",
1230 __func__, i, vd->e[i].next_regno);
1231 continue;
1232 }
1233
1234 SET_HARD_REG_BIT (set, i);
1235
1236 for (j = vd->e[i].next_regno;
1237 j != INVALID_REGNUM;
1238 j = vd->e[j].next_regno)
1239 {
1240 if (TEST_HARD_REG_BIT (set, j))
1241 internal_error ("%qs: loop in %<next_regno%> chain (%u)",
1242 __func__, j);
1243 if (vd->e[j].oldest_regno != i)
1244 internal_error ("%qs: [%u] bad %<oldest_regno%> (%u)",
1245 __func__, j, vd->e[j].oldest_regno);
1246
1247 SET_HARD_REG_BIT (set, j);
1248 }
1249 }
1250
1251 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1252 if (! TEST_HARD_REG_BIT (set, i)
1253 && (vd->e[i].mode != VOIDmode
1254 || vd->e[i].oldest_regno != i
1255 || vd->e[i].next_regno != INVALID_REGNUM))
1256 internal_error ("%qs: [%u] non-empty register in chain (%s %u %i)",
1257 __func__, i,
1258 GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
1259 vd->e[i].next_regno);
1260 }
1261
1262 \f
1263 namespace {
1264
1265 const pass_data pass_data_cprop_hardreg =
1266 {
1267 RTL_PASS, /* type */
1268 "cprop_hardreg", /* name */
1269 OPTGROUP_NONE, /* optinfo_flags */
1270 TV_CPROP_REGISTERS, /* tv_id */
1271 0, /* properties_required */
1272 0, /* properties_provided */
1273 0, /* properties_destroyed */
1274 0, /* todo_flags_start */
1275 TODO_df_finish, /* todo_flags_finish */
1276 };
1277
1278 class pass_cprop_hardreg : public rtl_opt_pass
1279 {
1280 public:
1281 pass_cprop_hardreg (gcc::context *ctxt)
1282 : rtl_opt_pass (pass_data_cprop_hardreg, ctxt)
1283 {}
1284
1285 /* opt_pass methods: */
1286 virtual bool gate (function *)
1287 {
1288 return (optimize > 0 && (flag_cprop_registers));
1289 }
1290
1291 virtual unsigned int execute (function *);
1292
1293 }; // class pass_cprop_hardreg
1294
1295 static bool
1296 cprop_hardreg_bb (basic_block bb, struct value_data *all_vd, sbitmap visited)
1297 {
1298 bitmap_set_bit (visited, bb->index);
1299
1300 /* If a block has a single predecessor, that we've already
1301 processed, begin with the value data that was live at
1302 the end of the predecessor block. */
1303 /* ??? Ought to use more intelligent queuing of blocks. */
1304 if (single_pred_p (bb)
1305 && bitmap_bit_p (visited, single_pred (bb)->index)
1306 && ! (single_pred_edge (bb)->flags & (EDGE_ABNORMAL_CALL | EDGE_EH)))
1307 {
1308 all_vd[bb->index] = all_vd[single_pred (bb)->index];
1309 if (all_vd[bb->index].n_debug_insn_changes)
1310 {
1311 unsigned int regno;
1312
1313 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1314 {
1315 if (all_vd[bb->index].e[regno].debug_insn_changes)
1316 {
1317 struct queued_debug_insn_change *cur;
1318 for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1319 cur; cur = cur->next)
1320 --all_vd[bb->index].n_debug_insn_changes;
1321 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1322 if (all_vd[bb->index].n_debug_insn_changes == 0)
1323 break;
1324 }
1325 }
1326 }
1327 }
1328 else
1329 init_value_data (all_vd + bb->index);
1330
1331 return copyprop_hardreg_forward_1 (bb, all_vd + bb->index);
1332 }
1333
1334 static void
1335 cprop_hardreg_debug (function *fun, struct value_data *all_vd)
1336 {
1337 basic_block bb;
1338
1339 FOR_EACH_BB_FN (bb, fun)
1340 if (all_vd[bb->index].n_debug_insn_changes)
1341 {
1342 unsigned int regno;
1343 bitmap live;
1344
1345 live = df_get_live_out (bb);
1346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1347 if (all_vd[bb->index].e[regno].debug_insn_changes)
1348 {
1349 if (REGNO_REG_SET_P (live, regno))
1350 apply_debug_insn_changes (all_vd + bb->index, regno);
1351
1352 struct queued_debug_insn_change *cur;
1353 for (cur = all_vd[bb->index].e[regno].debug_insn_changes;
1354 cur; cur = cur->next)
1355 --all_vd[bb->index].n_debug_insn_changes;
1356 all_vd[bb->index].e[regno].debug_insn_changes = NULL;
1357 if (all_vd[bb->index].n_debug_insn_changes == 0)
1358 break;
1359 }
1360 }
1361
1362 queued_debug_insn_change_pool.release ();
1363 }
1364
1365 unsigned int
1366 pass_cprop_hardreg::execute (function *fun)
1367 {
1368 struct value_data *all_vd;
1369 basic_block bb;
1370
1371 all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
1372
1373 auto_sbitmap visited (last_basic_block_for_fn (fun));
1374 bitmap_clear (visited);
1375
1376 auto_vec<int> worklist;
1377 bool any_debug_changes = false;
1378
1379 /* We need accurate notes. Earlier passes such as if-conversion may
1380 leave notes in an inconsistent state. */
1381 df_note_add_problem ();
1382 df_analyze ();
1383
1384 /* It is tempting to set DF_LR_RUN_DCE, but DCE may choose to delete
1385 an insn and this pass would not have visibility into the removal.
1386 This pass would then potentially use the source of that
1387 INSN for propagation purposes, generating invalid code.
1388
1389 So we just ask for updated notes and handle trivial deletions
1390 within this pass where we can update this passes internal
1391 data structures appropriately. */
1392 df_set_flags (DF_DEFER_INSN_RESCAN);
1393
1394 FOR_EACH_BB_FN (bb, fun)
1395 {
1396 if (cprop_hardreg_bb (bb, all_vd, visited))
1397 worklist.safe_push (bb->index);
1398 if (all_vd[bb->index].n_debug_insn_changes)
1399 any_debug_changes = true;
1400 }
1401
1402 /* We must call df_analyze here unconditionally to ensure that the
1403 REG_UNUSED and REG_DEAD notes are consistent with and without -g. */
1404 df_analyze ();
1405
1406 if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1407 cprop_hardreg_debug (fun, all_vd);
1408
1409 /* Second pass if we've changed anything, only for the bbs where we have
1410 changed anything though. */
1411 if (!worklist.is_empty ())
1412 {
1413 unsigned int i;
1414 int index;
1415
1416 any_debug_changes = false;
1417 bitmap_clear (visited);
1418 FOR_EACH_VEC_ELT (worklist, i, index)
1419 {
1420 bb = BASIC_BLOCK_FOR_FN (fun, index);
1421 cprop_hardreg_bb (bb, all_vd, visited);
1422 if (all_vd[bb->index].n_debug_insn_changes)
1423 any_debug_changes = true;
1424 }
1425
1426 df_analyze ();
1427 if (MAY_HAVE_DEBUG_BIND_INSNS && any_debug_changes)
1428 cprop_hardreg_debug (fun, all_vd);
1429 }
1430
1431 free (all_vd);
1432 return 0;
1433 }
1434
1435 } // anon namespace
1436
1437 rtl_opt_pass *
1438 make_pass_cprop_hardreg (gcc::context *ctxt)
1439 {
1440 return new pass_cprop_hardreg (ctxt);
1441 }