3 index::{LiveRangeIdx, SSAValIdx},
6 use std::{cmp::Ordering, collections::BTreeSet, iter::FusedIterator, ops::Range};
8 #[derive(Copy, Clone, Debug)]
9 /// a Range<ProgPoint> except that any overlapping ranges compare equal
10 pub struct ProgRange {
16 pub const fn is_empty(self) -> bool {
19 pub const fn len(self) -> usize {
20 self.end.as_usize().saturating_sub(self.start.as_usize())
22 pub fn overlaps(self, other: Self) -> bool {
23 self.start < other.end && other.start < self.end
25 pub const fn as_usize_range(self) -> Range<usize> {
26 self.start.as_usize()..self.end.as_usize()
28 pub const fn from_usize_range(value: Range<usize>) -> Self {
30 start: ProgPoint::from_usize(value.start),
31 end: ProgPoint::from_usize(value.end),
34 pub fn with_self_as_usize_range<R, F: FnOnce(&mut Range<usize>) -> R>(&mut self, f: F) -> R {
35 let mut range = self.as_usize_range();
36 let retval = f(&mut range);
37 *self = Self::from_usize_range(range);
42 impl Eq for ProgRange {}
44 impl PartialEq for ProgRange {
45 fn eq(&self, other: &Self) -> bool {
46 self.cmp(other) == Ordering::Equal
50 impl PartialOrd for ProgRange {
51 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
56 impl Ord for ProgRange {
57 fn cmp(&self, other: &Self) -> Ordering {
58 if self.overlaps(*other) {
61 (self.start, self.end).cmp(&(other.start, other.end))
66 impl IntoIterator for ProgRange {
67 type Item = ProgPoint;
68 type IntoIter = ProgRangeIter;
70 fn into_iter(self) -> Self::IntoIter {
71 ProgRangeIter { range: self }
75 #[derive(Clone, Debug)]
76 pub struct ProgRangeIter {
80 impl Iterator for ProgRangeIter {
81 type Item = ProgPoint;
83 fn next(&mut self) -> Option<Self::Item> {
85 .with_self_as_usize_range(|range| range.next())
86 .map(ProgPoint::from_usize)
89 fn size_hint(&self) -> (usize, Option<usize>) {
90 (self.len(), Some(self.len()))
93 fn count(self) -> usize {
94 self.range.as_usize_range().count()
97 fn last(self) -> Option<Self::Item> {
101 .map(ProgPoint::from_usize)
104 fn nth(&mut self, n: usize) -> Option<Self::Item> {
106 .with_self_as_usize_range(|range| range.nth(n))
107 .map(ProgPoint::from_usize)
110 fn fold<B, F>(mut self, init: B, mut f: F) -> B
112 F: FnMut(B, Self::Item) -> B,
114 self.range.with_self_as_usize_range(|range| {
115 range.fold(init, |a, b| f(a, ProgPoint::from_usize(b)))
120 impl DoubleEndedIterator for ProgRangeIter {
121 fn next_back(&mut self) -> Option<Self::Item> {
123 .with_self_as_usize_range(|range| range.next_back())
124 .map(ProgPoint::from_usize)
127 fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
129 .with_self_as_usize_range(|range| range.nth_back(n))
130 .map(ProgPoint::from_usize)
133 fn rfold<B, F>(mut self, init: B, mut f: F) -> B
135 F: FnMut(B, Self::Item) -> B,
137 self.range.with_self_as_usize_range(|range| {
138 range.rfold(init, |a, b| f(a, ProgPoint::from_usize(b)))
143 impl FusedIterator for ProgRangeIter {}
145 impl ExactSizeIterator for ProgRangeIter {
146 fn len(&self) -> usize {
147 self.range.as_usize_range().len()
151 /// ensure Hash isn't implemented for ProgRange,
152 /// since afaict there isn't a way to implement it consistently with Ord
153 fn _check_prog_range_is_not_hashable() {
154 struct Hashable<const HASHABLE: bool>;
155 trait ProgRangeHashCheck: Sized {
156 fn _f(self) -> Hashable<false> {
161 impl<T: std::hash::Hash> S<T> {
162 fn _f(self) -> Hashable<true> {
166 impl<T> ProgRangeHashCheck for S<T> {}
167 let _: Hashable<false> = S::<ProgRange>([])._f();
170 #[derive(Clone, Debug)]
171 pub struct LiveRange {
172 pub range: ProgRange,
173 pub ssa_val: SSAValIdx,
176 pub struct LiveRangeBundle {
177 pub live_ranges: BTreeSet<LiveRangeIdx>,
178 pub allocation: Option<Loc>,