Daily bump.
[gcc.git] / gcc / jump.c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This is the pathetic reminder of old fame of the jump-optimization pass
21 of the compiler. Now it contains basically a set of utility functions to
22 operate with jumps.
23
24 Each CODE_LABEL has a count of the times it is used
25 stored in the LABEL_NUSES internal field, and each JUMP_INSN
26 has one label that it refers to stored in the
27 JUMP_LABEL internal field. With this we can detect labels that
28 become unused because of the deletion of all the jumps that
29 formerly used them. The JUMP_LABEL info is sometimes looked
30 at by later passes. For return insns, it contains either a
31 RETURN or a SIMPLE_RETURN rtx.
32
33 The subroutines redirect_jump and invert_jump are used
34 from other passes as well. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "target.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "memmodel.h"
46 #include "tm_p.h"
47 #include "insn-config.h"
48 #include "regs.h"
49 #include "emit-rtl.h"
50 #include "recog.h"
51 #include "cfgrtl.h"
52 #include "rtl-iter.h"
53
54 /* Optimize jump y; x: ... y: jumpif... x?
55 Don't know if it is worth bothering with. */
56 /* Optimize two cases of conditional jump to conditional jump?
57 This can never delete any instruction or make anything dead,
58 or even change what is live at any point.
59 So perhaps let combiner do it. */
60
61 static void init_label_info (rtx_insn *);
62 static void mark_all_labels (rtx_insn *);
63 static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
64 static void mark_jump_label_asm (rtx, rtx_insn *);
65 static void redirect_exp_1 (rtx *, rtx, rtx, rtx_insn *);
66 static int invert_exp_1 (rtx, rtx_insn *);
67 \f
68 /* Worker for rebuild_jump_labels and rebuild_jump_labels_chain. */
69 static void
70 rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
71 {
72 timevar_push (TV_REBUILD_JUMP);
73 init_label_info (f);
74 mark_all_labels (f);
75
76 /* Keep track of labels used from static data; we don't track them
77 closely enough to delete them here, so make sure their reference
78 count doesn't drop to zero. */
79
80 if (count_forced)
81 {
82 rtx_insn *insn;
83 unsigned int i;
84 FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
85 if (LABEL_P (insn))
86 LABEL_NUSES (insn)++;
87 }
88 timevar_pop (TV_REBUILD_JUMP);
89 }
90
91 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
92 notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
93 instructions and jumping insns that have labels as operands
94 (e.g. cbranchsi4). */
95 void
96 rebuild_jump_labels (rtx_insn *f)
97 {
98 rebuild_jump_labels_1 (f, true);
99 }
100
101 /* This function is like rebuild_jump_labels, but doesn't run over
102 forced_labels. It can be used on insn chains that aren't the
103 main function chain. */
104 void
105 rebuild_jump_labels_chain (rtx_insn *chain)
106 {
107 rebuild_jump_labels_1 (chain, false);
108 }
109 \f
110 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
111 non-fallthru insn. This is not generally true, as multiple barriers
112 may have crept in, or the BARRIER may be separated from the last
113 real insn by one or more NOTEs.
114
115 This simple pass moves barriers and removes duplicates so that the
116 old code is happy.
117 */
118 static unsigned int
119 cleanup_barriers (void)
120 {
121 rtx_insn *insn;
122 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
123 {
124 if (BARRIER_P (insn))
125 {
126 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
127 if (!prev)
128 continue;
129
130 if (BARRIER_P (prev))
131 delete_insn (insn);
132 else if (prev != PREV_INSN (insn))
133 {
134 basic_block bb = BLOCK_FOR_INSN (prev);
135 rtx_insn *end = PREV_INSN (insn);
136 reorder_insns_nobb (insn, insn, prev);
137 if (bb)
138 {
139 /* If the backend called in machine reorg compute_bb_for_insn
140 and didn't free_bb_for_insn again, preserve basic block
141 boundaries. Move the end of basic block to PREV since
142 it is followed by a barrier now, and clear BLOCK_FOR_INSN
143 on the following notes.
144 ??? Maybe the proper solution for the targets that have
145 cfg around after machine reorg is not to run cleanup_barriers
146 pass at all. */
147 BB_END (bb) = prev;
148 do
149 {
150 prev = NEXT_INSN (prev);
151 if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
152 BLOCK_FOR_INSN (prev) = NULL;
153 }
154 while (prev != end);
155 }
156 }
157 }
158 }
159 return 0;
160 }
161
162 namespace {
163
164 const pass_data pass_data_cleanup_barriers =
165 {
166 RTL_PASS, /* type */
167 "barriers", /* name */
168 OPTGROUP_NONE, /* optinfo_flags */
169 TV_NONE, /* tv_id */
170 0, /* properties_required */
171 0, /* properties_provided */
172 0, /* properties_destroyed */
173 0, /* todo_flags_start */
174 0, /* todo_flags_finish */
175 };
176
177 class pass_cleanup_barriers : public rtl_opt_pass
178 {
179 public:
180 pass_cleanup_barriers (gcc::context *ctxt)
181 : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
182 {}
183
184 /* opt_pass methods: */
185 virtual unsigned int execute (function *) { return cleanup_barriers (); }
186
187 }; // class pass_cleanup_barriers
188
189 } // anon namespace
190
191 rtl_opt_pass *
192 make_pass_cleanup_barriers (gcc::context *ctxt)
193 {
194 return new pass_cleanup_barriers (ctxt);
195 }
196
197 \f
198 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
199 for remaining targets for JUMP_P. Delete any REG_LABEL_OPERAND
200 notes whose labels don't occur in the insn any more. */
201
202 static void
203 init_label_info (rtx_insn *f)
204 {
205 rtx_insn *insn;
206
207 for (insn = f; insn; insn = NEXT_INSN (insn))
208 {
209 if (LABEL_P (insn))
210 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
211
212 /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
213 sticky and not reset here; that way we won't lose association
214 with a label when e.g. the source for a target register
215 disappears out of reach for targets that may use jump-target
216 registers. Jump transformations are supposed to transform
217 any REG_LABEL_TARGET notes. The target label reference in a
218 branch may disappear from the branch (and from the
219 instruction before it) for other reasons, like register
220 allocation. */
221
222 if (INSN_P (insn))
223 {
224 rtx note, next;
225
226 for (note = REG_NOTES (insn); note; note = next)
227 {
228 next = XEXP (note, 1);
229 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
230 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
231 remove_note (insn, note);
232 }
233 }
234 }
235 }
236
237 /* A subroutine of mark_all_labels. Trivially propagate a simple label
238 load into a jump_insn that uses it. */
239
240 static void
241 maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
242 {
243 rtx label_note, pc, pc_src;
244
245 pc = pc_set (jump_insn);
246 pc_src = pc != NULL ? SET_SRC (pc) : NULL;
247 label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
248
249 /* If the previous non-jump insn sets something to a label,
250 something that this jump insn uses, make that label the primary
251 target of this insn if we don't yet have any. That previous
252 insn must be a single_set and not refer to more than one label.
253 The jump insn must not refer to other labels as jump targets
254 and must be a plain (set (pc) ...), maybe in a parallel, and
255 may refer to the item being set only directly or as one of the
256 arms in an IF_THEN_ELSE. */
257
258 if (label_note != NULL && pc_src != NULL)
259 {
260 rtx label_set = single_set (prev_nonjump_insn);
261 rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
262
263 if (label_set != NULL
264 /* The source must be the direct LABEL_REF, not a
265 PLUS, UNSPEC, IF_THEN_ELSE etc. */
266 && GET_CODE (SET_SRC (label_set)) == LABEL_REF
267 && (rtx_equal_p (label_dest, pc_src)
268 || (GET_CODE (pc_src) == IF_THEN_ELSE
269 && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
270 || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
271 {
272 /* The CODE_LABEL referred to in the note must be the
273 CODE_LABEL in the LABEL_REF of the "set". We can
274 conveniently use it for the marker function, which
275 requires a LABEL_REF wrapping. */
276 gcc_assert (XEXP (label_note, 0) == label_ref_label (SET_SRC (label_set)));
277
278 mark_jump_label_1 (label_set, jump_insn, false, true);
279
280 gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
281 }
282 }
283 }
284
285 /* Mark the label each jump jumps to.
286 Combine consecutive labels, and count uses of labels. */
287
288 static void
289 mark_all_labels (rtx_insn *f)
290 {
291 rtx_insn *insn;
292
293 if (current_ir_type () == IR_RTL_CFGLAYOUT)
294 {
295 basic_block bb;
296 FOR_EACH_BB_FN (bb, cfun)
297 {
298 /* In cfglayout mode, we don't bother with trivial next-insn
299 propagation of LABEL_REFs into JUMP_LABEL. This will be
300 handled by other optimizers using better algorithms. */
301 FOR_BB_INSNS (bb, insn)
302 {
303 gcc_assert (! insn->deleted ());
304 if (NONDEBUG_INSN_P (insn))
305 mark_jump_label (PATTERN (insn), insn, 0);
306 }
307
308 /* In cfglayout mode, there may be non-insns between the
309 basic blocks. If those non-insns represent tablejump data,
310 they contain label references that we must record. */
311 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
312 if (JUMP_TABLE_DATA_P (insn))
313 mark_jump_label (PATTERN (insn), insn, 0);
314 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
315 if (JUMP_TABLE_DATA_P (insn))
316 mark_jump_label (PATTERN (insn), insn, 0);
317 }
318 }
319 else
320 {
321 rtx_insn *prev_nonjump_insn = NULL;
322 for (insn = f; insn; insn = NEXT_INSN (insn))
323 {
324 if (insn->deleted ())
325 ;
326 else if (LABEL_P (insn))
327 prev_nonjump_insn = NULL;
328 else if (JUMP_TABLE_DATA_P (insn))
329 mark_jump_label (PATTERN (insn), insn, 0);
330 else if (NONDEBUG_INSN_P (insn))
331 {
332 mark_jump_label (PATTERN (insn), insn, 0);
333 if (JUMP_P (insn))
334 {
335 if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
336 maybe_propagate_label_ref (insn, prev_nonjump_insn);
337 }
338 else
339 prev_nonjump_insn = insn;
340 }
341 }
342 }
343 }
344 \f
345 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
346 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
347 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
348 know whether it's source is floating point or integer comparison. Machine
349 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
350 to help this function avoid overhead in these cases. */
351 enum rtx_code
352 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
353 const_rtx arg1, const rtx_insn *insn)
354 {
355 machine_mode mode;
356
357 /* If this is not actually a comparison, we can't reverse it. */
358 if (GET_RTX_CLASS (code) != RTX_COMPARE
359 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
360 return UNKNOWN;
361
362 mode = GET_MODE (arg0);
363 if (mode == VOIDmode)
364 mode = GET_MODE (arg1);
365
366 /* First see if machine description supplies us way to reverse the
367 comparison. Give it priority over everything else to allow
368 machine description to do tricks. */
369 if (GET_MODE_CLASS (mode) == MODE_CC
370 && REVERSIBLE_CC_MODE (mode))
371 return REVERSE_CONDITION (code, mode);
372
373 /* Try a few special cases based on the comparison code. */
374 switch (code)
375 {
376 case GEU:
377 case GTU:
378 case LEU:
379 case LTU:
380 case NE:
381 case EQ:
382 /* It is always safe to reverse EQ and NE, even for the floating
383 point. Similarly the unsigned comparisons are never used for
384 floating point so we can reverse them in the default way. */
385 return reverse_condition (code);
386 case ORDERED:
387 case UNORDERED:
388 case LTGT:
389 case UNEQ:
390 /* In case we already see unordered comparison, we can be sure to
391 be dealing with floating point so we don't need any more tests. */
392 return reverse_condition_maybe_unordered (code);
393 case UNLT:
394 case UNLE:
395 case UNGT:
396 case UNGE:
397 /* We don't have safe way to reverse these yet. */
398 return UNKNOWN;
399 default:
400 break;
401 }
402
403 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
404 {
405 /* Try to search for the comparison to determine the real mode.
406 This code is expensive, but with sane machine description it
407 will be never used, since REVERSIBLE_CC_MODE will return true
408 in all cases. */
409 if (! insn)
410 return UNKNOWN;
411
412 /* These CONST_CAST's are okay because prev_nonnote_insn just
413 returns its argument and we assign it to a const_rtx
414 variable. */
415 for (rtx_insn *prev = prev_nonnote_insn (const_cast<rtx_insn *> (insn));
416 prev != 0 && !LABEL_P (prev);
417 prev = prev_nonnote_insn (prev))
418 {
419 const_rtx set = set_of (arg0, prev);
420 if (set && GET_CODE (set) == SET
421 && rtx_equal_p (SET_DEST (set), arg0))
422 {
423 rtx src = SET_SRC (set);
424
425 if (GET_CODE (src) == COMPARE)
426 {
427 rtx comparison = src;
428 arg0 = XEXP (src, 0);
429 mode = GET_MODE (arg0);
430 if (mode == VOIDmode)
431 mode = GET_MODE (XEXP (comparison, 1));
432 break;
433 }
434 /* We can get past reg-reg moves. This may be useful for model
435 of i387 comparisons that first move flag registers around. */
436 if (REG_P (src))
437 {
438 arg0 = src;
439 continue;
440 }
441 }
442 /* If register is clobbered in some ununderstandable way,
443 give up. */
444 if (set)
445 return UNKNOWN;
446 }
447 }
448
449 /* Test for an integer condition, or a floating-point comparison
450 in which NaNs can be ignored. */
451 if (CONST_INT_P (arg0)
452 || (GET_MODE (arg0) != VOIDmode
453 && GET_MODE_CLASS (mode) != MODE_CC
454 && !HONOR_NANS (mode)))
455 return reverse_condition (code);
456
457 return UNKNOWN;
458 }
459
460 /* A wrapper around the previous function to take COMPARISON as rtx
461 expression. This simplifies many callers. */
462 enum rtx_code
463 reversed_comparison_code (const_rtx comparison, const rtx_insn *insn)
464 {
465 if (!COMPARISON_P (comparison))
466 return UNKNOWN;
467 return reversed_comparison_code_parts (GET_CODE (comparison),
468 XEXP (comparison, 0),
469 XEXP (comparison, 1), insn);
470 }
471
472 /* Return comparison with reversed code of EXP.
473 Return NULL_RTX in case we fail to do the reversal. */
474 rtx
475 reversed_comparison (const_rtx exp, machine_mode mode)
476 {
477 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL);
478 if (reversed_code == UNKNOWN)
479 return NULL_RTX;
480 else
481 return simplify_gen_relational (reversed_code, mode, VOIDmode,
482 XEXP (exp, 0), XEXP (exp, 1));
483 }
484
485 \f
486 /* Given an rtx-code for a comparison, return the code for the negated
487 comparison. If no such code exists, return UNKNOWN.
488
489 WATCH OUT! reverse_condition is not safe to use on a jump that might
490 be acting on the results of an IEEE floating point comparison, because
491 of the special treatment of non-signaling nans in comparisons.
492 Use reversed_comparison_code instead. */
493
494 enum rtx_code
495 reverse_condition (enum rtx_code code)
496 {
497 switch (code)
498 {
499 case EQ:
500 return NE;
501 case NE:
502 return EQ;
503 case GT:
504 return LE;
505 case GE:
506 return LT;
507 case LT:
508 return GE;
509 case LE:
510 return GT;
511 case GTU:
512 return LEU;
513 case GEU:
514 return LTU;
515 case LTU:
516 return GEU;
517 case LEU:
518 return GTU;
519 case UNORDERED:
520 return ORDERED;
521 case ORDERED:
522 return UNORDERED;
523
524 case UNLT:
525 case UNLE:
526 case UNGT:
527 case UNGE:
528 case UNEQ:
529 case LTGT:
530 return UNKNOWN;
531
532 default:
533 gcc_unreachable ();
534 }
535 }
536
537 /* Similar, but we're allowed to generate unordered comparisons, which
538 makes it safe for IEEE floating-point. Of course, we have to recognize
539 that the target will support them too... */
540
541 enum rtx_code
542 reverse_condition_maybe_unordered (enum rtx_code code)
543 {
544 switch (code)
545 {
546 case EQ:
547 return NE;
548 case NE:
549 return EQ;
550 case GT:
551 return UNLE;
552 case GE:
553 return UNLT;
554 case LT:
555 return UNGE;
556 case LE:
557 return UNGT;
558 case LTGT:
559 return UNEQ;
560 case UNORDERED:
561 return ORDERED;
562 case ORDERED:
563 return UNORDERED;
564 case UNLT:
565 return GE;
566 case UNLE:
567 return GT;
568 case UNGT:
569 return LE;
570 case UNGE:
571 return LT;
572 case UNEQ:
573 return LTGT;
574
575 default:
576 gcc_unreachable ();
577 }
578 }
579
580 /* Similar, but return the code when two operands of a comparison are swapped.
581 This IS safe for IEEE floating-point. */
582
583 enum rtx_code
584 swap_condition (enum rtx_code code)
585 {
586 switch (code)
587 {
588 case EQ:
589 case NE:
590 case UNORDERED:
591 case ORDERED:
592 case UNEQ:
593 case LTGT:
594 return code;
595
596 case GT:
597 return LT;
598 case GE:
599 return LE;
600 case LT:
601 return GT;
602 case LE:
603 return GE;
604 case GTU:
605 return LTU;
606 case GEU:
607 return LEU;
608 case LTU:
609 return GTU;
610 case LEU:
611 return GEU;
612 case UNLT:
613 return UNGT;
614 case UNLE:
615 return UNGE;
616 case UNGT:
617 return UNLT;
618 case UNGE:
619 return UNLE;
620
621 default:
622 gcc_unreachable ();
623 }
624 }
625
626 /* Given a comparison CODE, return the corresponding unsigned comparison.
627 If CODE is an equality comparison or already an unsigned comparison,
628 CODE is returned. */
629
630 enum rtx_code
631 unsigned_condition (enum rtx_code code)
632 {
633 switch (code)
634 {
635 case EQ:
636 case NE:
637 case GTU:
638 case GEU:
639 case LTU:
640 case LEU:
641 return code;
642
643 case GT:
644 return GTU;
645 case GE:
646 return GEU;
647 case LT:
648 return LTU;
649 case LE:
650 return LEU;
651
652 default:
653 gcc_unreachable ();
654 }
655 }
656
657 /* Similarly, return the signed version of a comparison. */
658
659 enum rtx_code
660 signed_condition (enum rtx_code code)
661 {
662 switch (code)
663 {
664 case EQ:
665 case NE:
666 case GT:
667 case GE:
668 case LT:
669 case LE:
670 return code;
671
672 case GTU:
673 return GT;
674 case GEU:
675 return GE;
676 case LTU:
677 return LT;
678 case LEU:
679 return LE;
680
681 default:
682 gcc_unreachable ();
683 }
684 }
685 \f
686 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
687 truth of CODE1 implies the truth of CODE2. */
688
689 int
690 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
691 {
692 /* UNKNOWN comparison codes can happen as a result of trying to revert
693 comparison codes.
694 They can't match anything, so we have to reject them here. */
695 if (code1 == UNKNOWN || code2 == UNKNOWN)
696 return 0;
697
698 if (code1 == code2)
699 return 1;
700
701 switch (code1)
702 {
703 case UNEQ:
704 if (code2 == UNLE || code2 == UNGE)
705 return 1;
706 break;
707
708 case EQ:
709 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
710 || code2 == ORDERED)
711 return 1;
712 break;
713
714 case UNLT:
715 if (code2 == UNLE || code2 == NE)
716 return 1;
717 break;
718
719 case LT:
720 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
721 return 1;
722 break;
723
724 case UNGT:
725 if (code2 == UNGE || code2 == NE)
726 return 1;
727 break;
728
729 case GT:
730 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
731 return 1;
732 break;
733
734 case GE:
735 case LE:
736 if (code2 == ORDERED)
737 return 1;
738 break;
739
740 case LTGT:
741 if (code2 == NE || code2 == ORDERED)
742 return 1;
743 break;
744
745 case LTU:
746 if (code2 == LEU || code2 == NE)
747 return 1;
748 break;
749
750 case GTU:
751 if (code2 == GEU || code2 == NE)
752 return 1;
753 break;
754
755 case UNORDERED:
756 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
757 || code2 == UNGE || code2 == UNGT)
758 return 1;
759 break;
760
761 default:
762 break;
763 }
764
765 return 0;
766 }
767 \f
768 /* Return 1 if INSN is an unconditional jump and nothing else. */
769
770 int
771 simplejump_p (const rtx_insn *insn)
772 {
773 return (JUMP_P (insn)
774 && GET_CODE (PATTERN (insn)) == SET
775 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
776 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
777 }
778
779 /* Return nonzero if INSN is a (possibly) conditional jump
780 and nothing more.
781
782 Use of this function is deprecated, since we need to support combined
783 branch and compare insns. Use any_condjump_p instead whenever possible. */
784
785 int
786 condjump_p (const rtx_insn *insn)
787 {
788 const_rtx x = PATTERN (insn);
789
790 if (GET_CODE (x) != SET
791 || GET_CODE (SET_DEST (x)) != PC)
792 return 0;
793
794 x = SET_SRC (x);
795 if (GET_CODE (x) == LABEL_REF)
796 return 1;
797 else
798 return (GET_CODE (x) == IF_THEN_ELSE
799 && ((GET_CODE (XEXP (x, 2)) == PC
800 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
801 || ANY_RETURN_P (XEXP (x, 1))))
802 || (GET_CODE (XEXP (x, 1)) == PC
803 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
804 || ANY_RETURN_P (XEXP (x, 2))))));
805 }
806
807 /* Return nonzero if INSN is a (possibly) conditional jump inside a
808 PARALLEL.
809
810 Use this function is deprecated, since we need to support combined
811 branch and compare insns. Use any_condjump_p instead whenever possible. */
812
813 int
814 condjump_in_parallel_p (const rtx_insn *insn)
815 {
816 const_rtx x = PATTERN (insn);
817
818 if (GET_CODE (x) != PARALLEL)
819 return 0;
820 else
821 x = XVECEXP (x, 0, 0);
822
823 if (GET_CODE (x) != SET)
824 return 0;
825 if (GET_CODE (SET_DEST (x)) != PC)
826 return 0;
827 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
828 return 1;
829 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
830 return 0;
831 if (XEXP (SET_SRC (x), 2) == pc_rtx
832 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
833 || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
834 return 1;
835 if (XEXP (SET_SRC (x), 1) == pc_rtx
836 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
837 || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
838 return 1;
839 return 0;
840 }
841
842 /* Return set of PC, otherwise NULL. */
843
844 rtx
845 pc_set (const rtx_insn *insn)
846 {
847 rtx pat;
848 if (!JUMP_P (insn))
849 return NULL_RTX;
850 pat = PATTERN (insn);
851
852 /* The set is allowed to appear either as the insn pattern or
853 the first set in a PARALLEL, UNSPEC or UNSPEC_VOLATILE. */
854 switch (GET_CODE (pat))
855 {
856 case PARALLEL:
857 case UNSPEC:
858 case UNSPEC_VOLATILE:
859 pat = XVECEXP (pat, 0, 0);
860 break;
861 default:
862 break;
863 }
864 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
865 return pat;
866
867 return NULL_RTX;
868 }
869
870 /* Return true when insn is an unconditional direct jump,
871 possibly bundled inside a PARALLEL, UNSPEC or UNSPEC_VOLATILE.
872 The instruction may have various other effects so before removing the jump
873 you must verify onlyjump_p. */
874
875 int
876 any_uncondjump_p (const rtx_insn *insn)
877 {
878 const_rtx x = pc_set (insn);
879 if (!x)
880 return 0;
881 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
882 return 0;
883 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
884 return 0;
885 return 1;
886 }
887
888 /* Return true when insn is a conditional jump. This function works for
889 instructions containing PC sets in PARALLELs, UNSPECs or UNSPEC_VOLATILEs.
890 The instruction may have various other effects so before removing the jump
891 you must verify onlyjump_p.
892
893 Note that unlike condjump_p it returns false for unconditional jumps. */
894
895 int
896 any_condjump_p (const rtx_insn *insn)
897 {
898 const_rtx x = pc_set (insn);
899 enum rtx_code a, b;
900
901 if (!x)
902 return 0;
903 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
904 return 0;
905
906 a = GET_CODE (XEXP (SET_SRC (x), 1));
907 b = GET_CODE (XEXP (SET_SRC (x), 2));
908
909 return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
910 || (a == PC
911 && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
912 }
913
914 /* Return the label of a conditional jump. */
915
916 rtx
917 condjump_label (const rtx_insn *insn)
918 {
919 rtx x = pc_set (insn);
920
921 if (!x)
922 return NULL_RTX;
923 x = SET_SRC (x);
924 if (GET_CODE (x) == LABEL_REF)
925 return x;
926 if (GET_CODE (x) != IF_THEN_ELSE)
927 return NULL_RTX;
928 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
929 return XEXP (x, 1);
930 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
931 return XEXP (x, 2);
932 return NULL_RTX;
933 }
934
935 /* Return TRUE if INSN is a return jump. */
936
937 int
938 returnjump_p (const rtx_insn *insn)
939 {
940 if (JUMP_P (insn))
941 {
942 subrtx_iterator::array_type array;
943 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
944 {
945 const_rtx x = *iter;
946 switch (GET_CODE (x))
947 {
948 case RETURN:
949 case SIMPLE_RETURN:
950 case EH_RETURN:
951 return true;
952
953 case SET:
954 if (SET_IS_RETURN_P (x))
955 return true;
956 break;
957
958 default:
959 break;
960 }
961 }
962 }
963 return false;
964 }
965
966 /* Return true if INSN is a (possibly conditional) return insn. */
967
968 int
969 eh_returnjump_p (rtx_insn *insn)
970 {
971 if (JUMP_P (insn))
972 {
973 subrtx_iterator::array_type array;
974 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
975 if (GET_CODE (*iter) == EH_RETURN)
976 return true;
977 }
978 return false;
979 }
980
981 /* Return true if INSN is a jump that only transfers control and
982 nothing more. */
983
984 int
985 onlyjump_p (const rtx_insn *insn)
986 {
987 rtx set;
988
989 if (!JUMP_P (insn))
990 return 0;
991
992 set = single_set (insn);
993 if (set == NULL)
994 return 0;
995 if (GET_CODE (SET_DEST (set)) != PC)
996 return 0;
997 if (side_effects_p (SET_SRC (set)))
998 return 0;
999
1000 return 1;
1001 }
1002
1003 /* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
1004 NULL or a return. */
1005 bool
1006 jump_to_label_p (const rtx_insn *insn)
1007 {
1008 return (JUMP_P (insn)
1009 && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1010 }
1011
1012 /* Return nonzero if X is an RTX that only sets the condition codes
1013 and has no side effects. */
1014
1015 int
1016 only_sets_cc0_p (const_rtx x)
1017 {
1018 if (! x)
1019 return 0;
1020
1021 if (INSN_P (x))
1022 x = PATTERN (x);
1023
1024 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1025 }
1026
1027 /* Return 1 if X is an RTX that does nothing but set the condition codes
1028 and CLOBBER or USE registers.
1029 Return -1 if X does explicitly set the condition codes,
1030 but also does other things. */
1031
1032 int
1033 sets_cc0_p (const_rtx x)
1034 {
1035 if (! x)
1036 return 0;
1037
1038 if (INSN_P (x))
1039 x = PATTERN (x);
1040
1041 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1042 return 1;
1043 if (GET_CODE (x) == PARALLEL)
1044 {
1045 int i;
1046 int sets_cc0 = 0;
1047 int other_things = 0;
1048 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1049 {
1050 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1051 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1052 sets_cc0 = 1;
1053 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1054 other_things = 1;
1055 }
1056 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1057 }
1058 return 0;
1059 }
1060 \f
1061 /* Find all CODE_LABELs referred to in X, and increment their use
1062 counts. If INSN is a JUMP_INSN and there is at least one
1063 CODE_LABEL referenced in INSN as a jump target, then store the last
1064 one in JUMP_LABEL (INSN). For a tablejump, this must be the label
1065 for the ADDR_VEC. Store any other jump targets as REG_LABEL_TARGET
1066 notes. If INSN is an INSN or a CALL_INSN or non-target operands of
1067 a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1068 INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1069 For returnjumps, the JUMP_LABEL will also be set as appropriate.
1070
1071 Note that two labels separated by a loop-beginning note
1072 must be kept distinct if we have not yet done loop-optimization,
1073 because the gap between them is where loop-optimize
1074 will want to move invariant code to. CROSS_JUMP tells us
1075 that loop-optimization is done with. */
1076
1077 void
1078 mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
1079 {
1080 rtx asmop = extract_asm_operands (x);
1081 if (asmop)
1082 mark_jump_label_asm (asmop, insn);
1083 else
1084 mark_jump_label_1 (x, insn, in_mem != 0,
1085 (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1086 }
1087
1088 /* Worker function for mark_jump_label. IN_MEM is TRUE when X occurs
1089 within a (MEM ...). IS_TARGET is TRUE when X is to be treated as a
1090 jump-target; when the JUMP_LABEL field of INSN should be set or a
1091 REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1092 note. */
1093
1094 static void
1095 mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1096 {
1097 RTX_CODE code = GET_CODE (x);
1098 int i;
1099 const char *fmt;
1100
1101 switch (code)
1102 {
1103 case PC:
1104 case CC0:
1105 case REG:
1106 case CLOBBER:
1107 case CALL:
1108 return;
1109
1110 case RETURN:
1111 case SIMPLE_RETURN:
1112 if (is_target)
1113 {
1114 gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1115 JUMP_LABEL (insn) = x;
1116 }
1117 return;
1118
1119 case MEM:
1120 in_mem = true;
1121 break;
1122
1123 case SEQUENCE:
1124 {
1125 rtx_sequence *seq = as_a <rtx_sequence *> (x);
1126 for (i = 0; i < seq->len (); i++)
1127 mark_jump_label (PATTERN (seq->insn (i)),
1128 seq->insn (i), 0);
1129 }
1130 return;
1131
1132 case SYMBOL_REF:
1133 if (!in_mem)
1134 return;
1135
1136 /* If this is a constant-pool reference, see if it is a label. */
1137 if (CONSTANT_POOL_ADDRESS_P (x))
1138 mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1139 break;
1140
1141 /* Handle operands in the condition of an if-then-else as for a
1142 non-jump insn. */
1143 case IF_THEN_ELSE:
1144 if (!is_target)
1145 break;
1146 mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1147 mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1148 mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1149 return;
1150
1151 case LABEL_REF:
1152 {
1153 rtx_insn *label = label_ref_label (x);
1154
1155 /* Ignore remaining references to unreachable labels that
1156 have been deleted. */
1157 if (NOTE_P (label)
1158 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1159 break;
1160
1161 gcc_assert (LABEL_P (label));
1162
1163 /* Ignore references to labels of containing functions. */
1164 if (LABEL_REF_NONLOCAL_P (x))
1165 break;
1166
1167 set_label_ref_label (x, label);
1168 if (! insn || ! insn->deleted ())
1169 ++LABEL_NUSES (label);
1170
1171 if (insn)
1172 {
1173 if (is_target
1174 /* Do not change a previous setting of JUMP_LABEL. If the
1175 JUMP_LABEL slot is occupied by a different label,
1176 create a note for this label. */
1177 && (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1178 JUMP_LABEL (insn) = label;
1179 else
1180 {
1181 enum reg_note kind
1182 = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1183
1184 /* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1185 for LABEL unless there already is one. All uses of
1186 a label, except for the primary target of a jump,
1187 must have such a note. */
1188 if (! find_reg_note (insn, kind, label))
1189 add_reg_note (insn, kind, label);
1190 }
1191 }
1192 return;
1193 }
1194
1195 /* Do walk the labels in a vector, but not the first operand of an
1196 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1197 case ADDR_VEC:
1198 case ADDR_DIFF_VEC:
1199 if (! insn->deleted ())
1200 {
1201 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1202
1203 for (i = 0; i < XVECLEN (x, eltnum); i++)
1204 mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1205 is_target);
1206 }
1207 return;
1208
1209 default:
1210 break;
1211 }
1212
1213 fmt = GET_RTX_FORMAT (code);
1214
1215 /* The primary target of a tablejump is the label of the ADDR_VEC,
1216 which is canonically mentioned *last* in the insn. To get it
1217 marked as JUMP_LABEL, we iterate over items in reverse order. */
1218 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1219 {
1220 if (fmt[i] == 'e')
1221 mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1222 else if (fmt[i] == 'E')
1223 {
1224 int j;
1225
1226 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1227 mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1228 is_target);
1229 }
1230 }
1231 }
1232
1233 /* Worker function for mark_jump_label. Handle asm insns specially.
1234 In particular, output operands need not be considered so we can
1235 avoid re-scanning the replicated asm_operand. Also, the asm_labels
1236 need to be considered targets. */
1237
1238 static void
1239 mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1240 {
1241 int i;
1242
1243 for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1244 mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1245
1246 for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1247 mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1248 }
1249 \f
1250 /* Delete insn INSN from the chain of insns and update label ref counts
1251 and delete insns now unreachable.
1252
1253 Returns the first insn after INSN that was not deleted.
1254
1255 Usage of this instruction is deprecated. Use delete_insn instead and
1256 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1257
1258 rtx_insn *
1259 delete_related_insns (rtx uncast_insn)
1260 {
1261 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1262 int was_code_label = (LABEL_P (insn));
1263 rtx note;
1264 rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
1265
1266 while (next && next->deleted ())
1267 next = NEXT_INSN (next);
1268
1269 /* This insn is already deleted => return first following nondeleted. */
1270 if (insn->deleted ())
1271 return next;
1272
1273 delete_insn (insn);
1274
1275 /* If instruction is followed by a barrier,
1276 delete the barrier too. */
1277
1278 if (next != 0 && BARRIER_P (next))
1279 delete_insn (next);
1280
1281 /* If deleting a jump, decrement the count of the label,
1282 and delete the label if it is now unused. */
1283
1284 if (jump_to_label_p (insn))
1285 {
1286 rtx lab = JUMP_LABEL (insn);
1287 rtx_jump_table_data *lab_next;
1288
1289 if (LABEL_NUSES (lab) == 0)
1290 /* This can delete NEXT or PREV,
1291 either directly if NEXT is JUMP_LABEL (INSN),
1292 or indirectly through more levels of jumps. */
1293 delete_related_insns (lab);
1294 else if (tablejump_p (insn, NULL, &lab_next))
1295 {
1296 /* If we're deleting the tablejump, delete the dispatch table.
1297 We may not be able to kill the label immediately preceding
1298 just yet, as it might be referenced in code leading up to
1299 the tablejump. */
1300 delete_related_insns (lab_next);
1301 }
1302 }
1303
1304 /* Likewise if we're deleting a dispatch table. */
1305
1306 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1307 {
1308 rtvec labels = table->get_labels ();
1309 int i;
1310 int len = GET_NUM_ELEM (labels);
1311
1312 for (i = 0; i < len; i++)
1313 if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
1314 delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1315 while (next && next->deleted ())
1316 next = NEXT_INSN (next);
1317 return next;
1318 }
1319
1320 /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1321 REG_LABEL_OPERAND or REG_LABEL_TARGET note. */
1322 if (INSN_P (insn))
1323 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1324 if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1325 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1326 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1327 && LABEL_P (XEXP (note, 0)))
1328 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1329 delete_related_insns (XEXP (note, 0));
1330
1331 while (prev && (prev->deleted () || NOTE_P (prev)))
1332 prev = PREV_INSN (prev);
1333
1334 /* If INSN was a label and a dispatch table follows it,
1335 delete the dispatch table. The tablejump must have gone already.
1336 It isn't useful to fall through into a table. */
1337
1338 if (was_code_label
1339 && NEXT_INSN (insn) != 0
1340 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1341 next = delete_related_insns (NEXT_INSN (insn));
1342
1343 /* If INSN was a label, delete insns following it if now unreachable. */
1344
1345 if (was_code_label && prev && BARRIER_P (prev))
1346 {
1347 enum rtx_code code;
1348 while (next)
1349 {
1350 code = GET_CODE (next);
1351 if (code == NOTE)
1352 next = NEXT_INSN (next);
1353 /* Keep going past other deleted labels to delete what follows. */
1354 else if (code == CODE_LABEL && next->deleted ())
1355 next = NEXT_INSN (next);
1356 /* Keep the (use (insn))s created by dbr_schedule, which needs
1357 them in order to track liveness relative to a previous
1358 barrier. */
1359 else if (INSN_P (next)
1360 && GET_CODE (PATTERN (next)) == USE
1361 && INSN_P (XEXP (PATTERN (next), 0)))
1362 next = NEXT_INSN (next);
1363 else if (code == BARRIER || INSN_P (next))
1364 /* Note: if this deletes a jump, it can cause more
1365 deletion of unreachable code, after a different label.
1366 As long as the value from this recursive call is correct,
1367 this invocation functions correctly. */
1368 next = delete_related_insns (next);
1369 else
1370 break;
1371 }
1372 }
1373
1374 /* I feel a little doubtful about this loop,
1375 but I see no clean and sure alternative way
1376 to find the first insn after INSN that is not now deleted.
1377 I hope this works. */
1378 while (next && next->deleted ())
1379 next = NEXT_INSN (next);
1380 return next;
1381 }
1382 \f
1383 /* Delete a range of insns from FROM to TO, inclusive.
1384 This is for the sake of peephole optimization, so assume
1385 that whatever these insns do will still be done by a new
1386 peephole insn that will replace them. */
1387
1388 void
1389 delete_for_peephole (rtx_insn *from, rtx_insn *to)
1390 {
1391 rtx_insn *insn = from;
1392
1393 while (1)
1394 {
1395 rtx_insn *next = NEXT_INSN (insn);
1396 rtx_insn *prev = PREV_INSN (insn);
1397
1398 if (!NOTE_P (insn))
1399 {
1400 insn->set_deleted();
1401
1402 /* Patch this insn out of the chain. */
1403 /* We don't do this all at once, because we
1404 must preserve all NOTEs. */
1405 if (prev)
1406 SET_NEXT_INSN (prev) = next;
1407
1408 if (next)
1409 SET_PREV_INSN (next) = prev;
1410 }
1411
1412 if (insn == to)
1413 break;
1414 insn = next;
1415 }
1416
1417 /* Note that if TO is an unconditional jump
1418 we *do not* delete the BARRIER that follows,
1419 since the peephole that replaces this sequence
1420 is also an unconditional jump in that case. */
1421 }
1422 \f
1423 /* A helper function for redirect_exp_1; examines its input X and returns
1424 either a LABEL_REF around a label, or a RETURN if X was NULL. */
1425 static rtx
1426 redirect_target (rtx x)
1427 {
1428 if (x == NULL_RTX)
1429 return ret_rtx;
1430 if (!ANY_RETURN_P (x))
1431 return gen_rtx_LABEL_REF (Pmode, x);
1432 return x;
1433 }
1434
1435 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1436 NLABEL as a return. Accrue modifications into the change group. */
1437
1438 static void
1439 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx_insn *insn)
1440 {
1441 rtx x = *loc;
1442 RTX_CODE code = GET_CODE (x);
1443 int i;
1444 const char *fmt;
1445
1446 if ((code == LABEL_REF && label_ref_label (x) == olabel)
1447 || x == olabel)
1448 {
1449 x = redirect_target (nlabel);
1450 if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1451 x = gen_rtx_SET (pc_rtx, x);
1452 validate_change (insn, loc, x, 1);
1453 return;
1454 }
1455
1456 if (code == SET && SET_DEST (x) == pc_rtx
1457 && ANY_RETURN_P (nlabel)
1458 && GET_CODE (SET_SRC (x)) == LABEL_REF
1459 && label_ref_label (SET_SRC (x)) == olabel)
1460 {
1461 validate_change (insn, loc, nlabel, 1);
1462 return;
1463 }
1464
1465 if (code == IF_THEN_ELSE)
1466 {
1467 /* Skip the condition of an IF_THEN_ELSE. We only want to
1468 change jump destinations, not eventual label comparisons. */
1469 redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1470 redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1471 return;
1472 }
1473
1474 fmt = GET_RTX_FORMAT (code);
1475 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1476 {
1477 if (fmt[i] == 'e')
1478 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1479 else if (fmt[i] == 'E')
1480 {
1481 int j;
1482 for (j = 0; j < XVECLEN (x, i); j++)
1483 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1484 }
1485 }
1486 }
1487
1488 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1489 the modifications into the change group. Return false if we did
1490 not see how to do that. */
1491
1492 int
1493 redirect_jump_1 (rtx_insn *jump, rtx nlabel)
1494 {
1495 int ochanges = num_validated_changes ();
1496 rtx *loc, asmop;
1497
1498 gcc_assert (nlabel != NULL_RTX);
1499 asmop = extract_asm_operands (PATTERN (jump));
1500 if (asmop)
1501 {
1502 if (nlabel == NULL)
1503 return 0;
1504 gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1505 loc = &ASM_OPERANDS_LABEL (asmop, 0);
1506 }
1507 else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1508 loc = &XVECEXP (PATTERN (jump), 0, 0);
1509 else
1510 loc = &PATTERN (jump);
1511
1512 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1513 return num_validated_changes () > ochanges;
1514 }
1515
1516 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1517 jump target label is unused as a result, it and the code following
1518 it may be deleted.
1519
1520 Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1521 in that case we are to turn the jump into a (possibly conditional)
1522 return insn.
1523
1524 The return value will be 1 if the change was made, 0 if it wasn't
1525 (this can only occur when trying to produce return insns). */
1526
1527 int
1528 redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1529 {
1530 rtx olabel = jump->jump_label ();
1531
1532 if (!nlabel)
1533 {
1534 /* If there is no label, we are asked to redirect to the EXIT block.
1535 When before the epilogue is emitted, return/simple_return cannot be
1536 created so we return 0 immediately. After the epilogue is emitted,
1537 we always expect a label, either a non-null label, or a
1538 return/simple_return RTX. */
1539
1540 if (!epilogue_completed)
1541 return 0;
1542 gcc_unreachable ();
1543 }
1544
1545 if (nlabel == olabel)
1546 return 1;
1547
1548 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1549 return 0;
1550
1551 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1552 return 1;
1553 }
1554
1555 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1556 NLABEL in JUMP.
1557 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1558 count has dropped to zero. */
1559 void
1560 redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
1561 int invert)
1562 {
1563 rtx note;
1564
1565 gcc_assert (JUMP_LABEL (jump) == olabel);
1566
1567 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1568 moving FUNCTION_END note. Just sanity check that no user still worry
1569 about this. */
1570 gcc_assert (delete_unused >= 0);
1571 JUMP_LABEL (jump) = nlabel;
1572 if (!ANY_RETURN_P (nlabel))
1573 ++LABEL_NUSES (nlabel);
1574
1575 /* Update labels in any REG_EQUAL note. */
1576 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1577 {
1578 if (ANY_RETURN_P (nlabel)
1579 || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1580 remove_note (jump, note);
1581 else
1582 {
1583 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1584 confirm_change_group ();
1585 }
1586 }
1587
1588 /* Handle the case where we had a conditional crossing jump to a return
1589 label and are now changing it into a direct conditional return.
1590 The jump is no longer crossing in that case. */
1591 if (ANY_RETURN_P (nlabel))
1592 CROSSING_JUMP_P (jump) = 0;
1593
1594 if (!ANY_RETURN_P (olabel)
1595 && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1596 /* Undefined labels will remain outside the insn stream. */
1597 && INSN_UID (olabel))
1598 delete_related_insns (olabel);
1599 if (invert)
1600 invert_br_probabilities (jump);
1601 }
1602
1603 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1604 modifications into the change group. Return nonzero for success. */
1605 static int
1606 invert_exp_1 (rtx x, rtx_insn *insn)
1607 {
1608 RTX_CODE code = GET_CODE (x);
1609
1610 if (code == IF_THEN_ELSE)
1611 {
1612 rtx comp = XEXP (x, 0);
1613 rtx tem;
1614 enum rtx_code reversed_code;
1615
1616 /* We can do this in two ways: The preferable way, which can only
1617 be done if this is not an integer comparison, is to reverse
1618 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1619 of the IF_THEN_ELSE. If we can't do either, fail. */
1620
1621 reversed_code = reversed_comparison_code (comp, insn);
1622
1623 if (reversed_code != UNKNOWN)
1624 {
1625 validate_change (insn, &XEXP (x, 0),
1626 gen_rtx_fmt_ee (reversed_code,
1627 GET_MODE (comp), XEXP (comp, 0),
1628 XEXP (comp, 1)),
1629 1);
1630 return 1;
1631 }
1632
1633 tem = XEXP (x, 1);
1634 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1635 validate_change (insn, &XEXP (x, 2), tem, 1);
1636 return 1;
1637 }
1638 else
1639 return 0;
1640 }
1641
1642 /* Invert the condition of the jump JUMP, and make it jump to label
1643 NLABEL instead of where it jumps now. Accrue changes into the
1644 change group. Return false if we didn't see how to perform the
1645 inversion and redirection. */
1646
1647 int
1648 invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
1649 {
1650 rtx x = pc_set (jump);
1651 int ochanges;
1652 int ok;
1653
1654 ochanges = num_validated_changes ();
1655 if (x == NULL)
1656 return 0;
1657 ok = invert_exp_1 (SET_SRC (x), jump);
1658 gcc_assert (ok);
1659
1660 if (num_validated_changes () == ochanges)
1661 return 0;
1662
1663 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1664 in Pmode, so checking this is not merely an optimization. */
1665 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1666 }
1667
1668 /* Invert the condition of the jump JUMP, and make it jump to label
1669 NLABEL instead of where it jumps now. Return true if successful. */
1670
1671 int
1672 invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1673 {
1674 rtx olabel = JUMP_LABEL (jump);
1675
1676 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1677 {
1678 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1679 return 1;
1680 }
1681 cancel_changes (0);
1682 return 0;
1683 }
1684
1685 \f
1686 /* Like rtx_equal_p except that it considers two REGs as equal
1687 if they renumber to the same value and considers two commutative
1688 operations to be the same if the order of the operands has been
1689 reversed. */
1690
1691 int
1692 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1693 {
1694 int i;
1695 const enum rtx_code code = GET_CODE (x);
1696 const char *fmt;
1697
1698 if (x == y)
1699 return 1;
1700
1701 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1702 && (REG_P (y) || (GET_CODE (y) == SUBREG
1703 && REG_P (SUBREG_REG (y)))))
1704 {
1705 int reg_x = -1, reg_y = -1;
1706 poly_int64 byte_x = 0, byte_y = 0;
1707 struct subreg_info info;
1708
1709 if (GET_MODE (x) != GET_MODE (y))
1710 return 0;
1711
1712 /* If we haven't done any renumbering, don't
1713 make any assumptions. */
1714 if (reg_renumber == 0)
1715 return rtx_equal_p (x, y);
1716
1717 if (code == SUBREG)
1718 {
1719 reg_x = REGNO (SUBREG_REG (x));
1720 byte_x = SUBREG_BYTE (x);
1721
1722 if (reg_renumber[reg_x] >= 0)
1723 {
1724 subreg_get_info (reg_renumber[reg_x],
1725 GET_MODE (SUBREG_REG (x)), byte_x,
1726 GET_MODE (x), &info);
1727 if (!info.representable_p)
1728 return 0;
1729 reg_x = info.offset;
1730 byte_x = 0;
1731 }
1732 }
1733 else
1734 {
1735 reg_x = REGNO (x);
1736 if (reg_renumber[reg_x] >= 0)
1737 reg_x = reg_renumber[reg_x];
1738 }
1739
1740 if (GET_CODE (y) == SUBREG)
1741 {
1742 reg_y = REGNO (SUBREG_REG (y));
1743 byte_y = SUBREG_BYTE (y);
1744
1745 if (reg_renumber[reg_y] >= 0)
1746 {
1747 subreg_get_info (reg_renumber[reg_y],
1748 GET_MODE (SUBREG_REG (y)), byte_y,
1749 GET_MODE (y), &info);
1750 if (!info.representable_p)
1751 return 0;
1752 reg_y = info.offset;
1753 byte_y = 0;
1754 }
1755 }
1756 else
1757 {
1758 reg_y = REGNO (y);
1759 if (reg_renumber[reg_y] >= 0)
1760 reg_y = reg_renumber[reg_y];
1761 }
1762
1763 return reg_x >= 0 && reg_x == reg_y && known_eq (byte_x, byte_y);
1764 }
1765
1766 /* Now we have disposed of all the cases
1767 in which different rtx codes can match. */
1768 if (code != GET_CODE (y))
1769 return 0;
1770
1771 switch (code)
1772 {
1773 case PC:
1774 case CC0:
1775 case ADDR_VEC:
1776 case ADDR_DIFF_VEC:
1777 CASE_CONST_UNIQUE:
1778 return 0;
1779
1780 case LABEL_REF:
1781 /* We can't assume nonlocal labels have their following insns yet. */
1782 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1783 return label_ref_label (x) == label_ref_label (y);
1784
1785 /* Two label-refs are equivalent if they point at labels
1786 in the same position in the instruction stream. */
1787 else
1788 {
1789 rtx_insn *xi = next_nonnote_nondebug_insn (label_ref_label (x));
1790 rtx_insn *yi = next_nonnote_nondebug_insn (label_ref_label (y));
1791 while (xi && LABEL_P (xi))
1792 xi = next_nonnote_nondebug_insn (xi);
1793 while (yi && LABEL_P (yi))
1794 yi = next_nonnote_nondebug_insn (yi);
1795 return xi == yi;
1796 }
1797
1798 case SYMBOL_REF:
1799 return XSTR (x, 0) == XSTR (y, 0);
1800
1801 case CODE_LABEL:
1802 /* If we didn't match EQ equality above, they aren't the same. */
1803 return 0;
1804
1805 default:
1806 break;
1807 }
1808
1809 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1810
1811 if (GET_MODE (x) != GET_MODE (y))
1812 return 0;
1813
1814 /* MEMs referring to different address space are not equivalent. */
1815 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1816 return 0;
1817
1818 /* For commutative operations, the RTX match if the operand match in any
1819 order. Also handle the simple binary and unary cases without a loop. */
1820 if (targetm.commutative_p (x, UNKNOWN))
1821 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1822 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1823 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1824 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1825 else if (NON_COMMUTATIVE_P (x))
1826 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1827 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1828 else if (UNARY_P (x))
1829 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1830
1831 /* Compare the elements. If any pair of corresponding elements
1832 fail to match, return 0 for the whole things. */
1833
1834 fmt = GET_RTX_FORMAT (code);
1835 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1836 {
1837 int j;
1838 switch (fmt[i])
1839 {
1840 case 'w':
1841 if (XWINT (x, i) != XWINT (y, i))
1842 return 0;
1843 break;
1844
1845 case 'i':
1846 if (XINT (x, i) != XINT (y, i))
1847 {
1848 if (((code == ASM_OPERANDS && i == 6)
1849 || (code == ASM_INPUT && i == 1)))
1850 break;
1851 return 0;
1852 }
1853 break;
1854
1855 case 'p':
1856 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1857 return 0;
1858 break;
1859
1860 case 't':
1861 if (XTREE (x, i) != XTREE (y, i))
1862 return 0;
1863 break;
1864
1865 case 's':
1866 if (strcmp (XSTR (x, i), XSTR (y, i)))
1867 return 0;
1868 break;
1869
1870 case 'e':
1871 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1872 return 0;
1873 break;
1874
1875 case 'u':
1876 if (XEXP (x, i) != XEXP (y, i))
1877 return 0;
1878 /* Fall through. */
1879 case '0':
1880 break;
1881
1882 case 'E':
1883 if (XVECLEN (x, i) != XVECLEN (y, i))
1884 return 0;
1885 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1886 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1887 return 0;
1888 break;
1889
1890 default:
1891 gcc_unreachable ();
1892 }
1893 }
1894 return 1;
1895 }
1896 \f
1897 /* If X is a hard register or equivalent to one or a subregister of one,
1898 return the hard register number. If X is a pseudo register that was not
1899 assigned a hard register, return the pseudo register number. Otherwise,
1900 return -1. Any rtx is valid for X. */
1901
1902 int
1903 true_regnum (const_rtx x)
1904 {
1905 if (REG_P (x))
1906 {
1907 if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1908 && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1909 return reg_renumber[REGNO (x)];
1910 return REGNO (x);
1911 }
1912 if (GET_CODE (x) == SUBREG)
1913 {
1914 int base = true_regnum (SUBREG_REG (x));
1915 if (base >= 0
1916 && base < FIRST_PSEUDO_REGISTER)
1917 {
1918 struct subreg_info info;
1919
1920 subreg_get_info (lra_in_progress
1921 ? (unsigned) base : REGNO (SUBREG_REG (x)),
1922 GET_MODE (SUBREG_REG (x)),
1923 SUBREG_BYTE (x), GET_MODE (x), &info);
1924
1925 if (info.representable_p)
1926 return base + info.offset;
1927 }
1928 }
1929 return -1;
1930 }
1931
1932 /* Return regno of the register REG and handle subregs too. */
1933 unsigned int
1934 reg_or_subregno (const_rtx reg)
1935 {
1936 if (GET_CODE (reg) == SUBREG)
1937 reg = SUBREG_REG (reg);
1938 gcc_assert (REG_P (reg));
1939 return REGNO (reg);
1940 }