change serde format for BigUint to use a hex string instead of an array of u32 chunks
[bigint-presentation-code.git] / register_allocator / src / loc_set.rs
1 use crate::{
2 error::{Error, Result},
3 interned::{GlobalState, Intern, InternTarget, Interned},
4 loc::{Loc, LocFields, LocKind, Ty},
5 };
6 use enum_map::{enum_map, EnumMap};
7 use num_bigint::BigUint;
8 use num_traits::{Num, Zero};
9 use once_cell::race::OnceBox;
10 use serde::{de, Deserialize, Serialize};
11 use std::{
12 borrow::{Borrow, Cow},
13 cell::Cell,
14 collections::{
15 btree_map::{self, Entry},
16 BTreeMap,
17 },
18 fmt,
19 hash::Hash,
20 iter::FusedIterator,
21 mem,
22 num::NonZeroU32,
23 ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Range, Sub, SubAssign},
24 };
25
26 #[inline]
27 fn zero_biguint<'a>() -> &'a BigUint {
28 static ZERO: OnceBox<BigUint> = OnceBox::new();
29 ZERO.get_or_init(
30 #[cold]
31 || BigUint::zero().into(),
32 )
33 }
34
35 #[derive(Debug, Clone)]
36 struct BigUintAsHexString<'a>(Cow<'a, BigUint>);
37
38 impl<'de, 'a> Deserialize<'de> for BigUintAsHexString<'a> {
39 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
40 where
41 D: serde::Deserializer<'de>,
42 {
43 let text = String::deserialize(deserializer)?;
44 if let Some(text) = text.strip_prefix("0x") {
45 Ok(Self(Cow::Owned(
46 BigUint::from_str_radix(text, 0x10).map_err(<D::Error as de::Error>::custom)?,
47 )))
48 } else {
49 Err(<D::Error as de::Error>::custom(
50 "expected hex string to start with `0x`",
51 ))
52 }
53 }
54 }
55
56 impl Serialize for BigUintAsHexString<'_> {
57 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
58 where
59 S: serde::Serializer,
60 {
61 format!("{:#x}", &*self.0).serialize(serializer)
62 }
63 }
64
65 #[derive(Deserialize, Serialize)]
66 struct LocSetSerialized<'a> {
67 starts_map: BTreeMap<NonZeroU32, BTreeMap<LocKind, BigUintAsHexString<'a>>>,
68 }
69
70 impl TryFrom<LocSetSerialized<'_>> for LocSet {
71 type Error = Error;
72
73 fn try_from(value: LocSetSerialized) -> Result<Self, Self::Error> {
74 Self::from_starts_map(
75 value
76 .starts_map
77 .into_iter()
78 .map(|(k, mut v)| {
79 let v = enum_map! {
80 k => v.remove(&k).map_or_else(BigUint::zero, |v| v.0.into_owned())
81 };
82 (k, v)
83 })
84 .collect(),
85 )
86 }
87 }
88
89 #[derive(Clone, Default, PartialEq, Eq, Hash, Deserialize)]
90 #[serde(try_from = "LocSetSerialized")]
91 pub struct LocSet {
92 starts_map: BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>,
93 }
94
95 impl Serialize for LocSet {
96 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
97 where
98 S: serde::Serializer,
99 {
100 LocSetSerialized {
101 starts_map: self
102 .starts_map
103 .iter()
104 .map(|(&k, v)| {
105 let v = v
106 .iter()
107 .map(|(k, v)| (k, BigUintAsHexString(Cow::Borrowed(v))))
108 .collect::<BTreeMap<_, _>>();
109 (k, v)
110 })
111 .collect(),
112 }
113 .serialize(serializer)
114 }
115 }
116
117 impl<'a> arbitrary::Arbitrary<'a> for LocSet {
118 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
119 u.arbitrary_iter()?.collect()
120 }
121 }
122
123 /// computes same value as `a & !b`, but more efficiently
124 fn and_not<A: Borrow<BigUint>, B>(a: A, b: B) -> BigUint
125 where
126 BigUint: for<'a> BitXor<A, Output = BigUint>,
127 B: for<'a> BitAnd<&'a BigUint, Output = BigUint>,
128 {
129 // use logical equivalent that avoids needing to use BigInt
130 (b & a.borrow()) ^ a
131 }
132
133 impl From<Loc> for LocSet {
134 fn from(value: Loc) -> Self {
135 Self::from_iter([value])
136 }
137 }
138
139 impl LocSet {
140 pub fn arbitrary_with_ty(
141 ty: Ty,
142 u: &mut arbitrary::Unstructured<'_>,
143 ) -> arbitrary::Result<Self> {
144 let kinds = ty.base_ty.loc_kinds();
145 type Mask = u128;
146 let kinds: Vec<_> = if kinds.len() > Mask::BITS as usize {
147 let chosen_kinds = kinds
148 .iter()
149 .zip(u.arbitrary_iter::<bool>()?)
150 .filter(|(_, cond)| !matches!(cond, Ok(false)))
151 .map(|(&kind, cond)| {
152 cond?;
153 Ok(kind)
154 })
155 .collect::<arbitrary::Result<Vec<_>>>()?;
156 if chosen_kinds.is_empty() {
157 vec![*u.choose(kinds)?]
158 } else {
159 chosen_kinds
160 }
161 } else {
162 let max_mask = Mask::wrapping_shl(1, kinds.len() as u32).wrapping_sub(1);
163 let mask = u.int_in_range(1..=max_mask)?; // non-zero
164 kinds
165 .iter()
166 .enumerate()
167 .filter_map(|(idx, &kind)| {
168 if mask & (1 << idx) != 0 {
169 Some(kind)
170 } else {
171 None
172 }
173 })
174 .collect()
175 };
176 let mut starts = EnumMap::<LocKind, BigUint>::default();
177 let mut all_zero = true;
178 for kind in kinds {
179 let bit_count = Loc::max_start(kind, ty.reg_len)? + 1;
180 let byte_count = (bit_count + u8::BITS - 1) / u8::BITS;
181 let bytes = u.bytes(byte_count as usize)?;
182 starts[kind] = BigUint::from_bytes_le(bytes);
183 starts[kind] &= (BigUint::from(1u8) << bit_count) - 1u8;
184 all_zero &= starts[kind].is_zero();
185 }
186 if all_zero {
187 Ok(Loc::arbitrary_with_ty(ty, u)?.into())
188 } else {
189 Ok(Self::from_starts_map_iter_unchecked([(ty.reg_len, starts)]))
190 }
191 }
192 pub fn starts(&self, reg_len: NonZeroU32, kind: LocKind) -> &BigUint {
193 self.starts_map
194 .get(&reg_len)
195 .map(|v| &v[kind])
196 .unwrap_or_else(zero_biguint)
197 }
198 pub fn stops(&self, reg_len: NonZeroU32, kind: LocKind) -> BigUint {
199 self.starts(reg_len, kind) << reg_len.get()
200 }
201 pub fn starts_map(&self) -> &BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>> {
202 &self.starts_map
203 }
204 pub const fn new() -> Self {
205 Self {
206 starts_map: BTreeMap::new(),
207 }
208 }
209 /// filters out empty entries, but doesn't do any other checks
210 fn from_starts_map_iter_unchecked(
211 starts_map: impl IntoIterator<Item = (NonZeroU32, EnumMap<LocKind, BigUint>)>,
212 ) -> Self {
213 Self {
214 starts_map: starts_map
215 .into_iter()
216 .filter(|(_, starts)| !starts.iter().all(|(_, starts)| starts.is_zero()))
217 .collect(),
218 }
219 }
220 fn for_each_reg_len_filtering_out_empty_entries(
221 &mut self,
222 mut f: impl FnMut(NonZeroU32, &mut EnumMap<LocKind, BigUint>),
223 ) {
224 self.starts_map.retain(|&reg_len, starts| {
225 f(reg_len, starts);
226 !Self::is_entry_empty(starts)
227 });
228 }
229 /// helper for binary operations that keeps Locs not present in rhs
230 fn bin_op_keep_helper(
231 &mut self,
232 rhs: &Self,
233 mut f: impl FnMut(NonZeroU32, &mut EnumMap<LocKind, BigUint>, &EnumMap<LocKind, BigUint>),
234 ) {
235 rhs.starts_map.iter().for_each(|(&reg_len, rhs_starts)| {
236 match self.starts_map.entry(reg_len) {
237 Entry::Vacant(entry) => {
238 let mut lhs_starts = EnumMap::default();
239 f(reg_len, &mut lhs_starts, rhs_starts);
240 if !Self::is_entry_empty(&lhs_starts) {
241 entry.insert(lhs_starts);
242 }
243 }
244 Entry::Occupied(mut entry) => {
245 f(reg_len, entry.get_mut(), rhs_starts);
246 if Self::is_entry_empty(entry.get()) {
247 entry.remove();
248 }
249 }
250 }
251 });
252 }
253 fn is_entry_empty(starts: &EnumMap<LocKind, BigUint>) -> bool {
254 starts.iter().all(|(_, starts)| starts.is_zero())
255 }
256 pub fn from_starts_map(
257 mut starts_map: BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>,
258 ) -> Result<Self> {
259 let mut error = Ok(());
260 starts_map.retain(|&reg_len, starts| {
261 if error.is_err() {
262 return false;
263 }
264 let mut any_locs = false;
265 for (kind, starts) in starts {
266 if !starts.is_zero() {
267 any_locs = true;
268 error = (|| {
269 // bits() is one past max bit set, so use >= rather than >
270 if starts.bits() >= Loc::max_start(kind, reg_len)? as u64 {
271 return Err(Error::StartNotInValidRange);
272 }
273 Ok(())
274 })();
275 if error.is_err() {
276 return false;
277 }
278 }
279 }
280 any_locs
281 });
282 Ok(Self { starts_map })
283 }
284 pub fn clear(&mut self) {
285 self.starts_map.clear();
286 }
287 pub fn contains_exact(&self, value: Loc) -> bool {
288 self.starts(value.reg_len, value.kind).bit(value.start as _)
289 }
290 pub fn insert(&mut self, value: Loc) -> bool {
291 let starts = match self.starts_map.entry(value.reg_len) {
292 Entry::Occupied(entry) => entry.into_mut(),
293 Entry::Vacant(entry) => {
294 entry.insert(Default::default())[value.kind].set_bit(value.start as u64, true);
295 return true;
296 }
297 };
298 let starts = &mut starts[value.kind];
299 let retval = !starts.bit(value.start as u64);
300 starts.set_bit(value.start as u64, true);
301 retval
302 }
303 pub fn remove(&mut self, value: Loc) -> bool {
304 let Entry::Occupied(mut entry) = self.starts_map.entry(value.reg_len) else {
305 return false;
306 };
307 let starts = entry.get_mut();
308 if starts[value.kind].bit(value.start as u64) {
309 starts[value.kind].set_bit(value.start as u64, false);
310 if starts.values().all(BigUint::is_zero) {
311 entry.remove();
312 }
313 true
314 } else {
315 false
316 }
317 }
318 pub fn is_empty(&self) -> bool {
319 self.starts_map.is_empty()
320 }
321 pub fn iter(&self) -> Iter<'_> {
322 Iter {
323 internals: IterInternals::new(self.starts_map.iter()),
324 }
325 }
326 pub fn len(&self) -> usize {
327 let retval: u64 = self
328 .starts_map
329 .values()
330 .map(|starts| starts.values().map(BigUint::count_ones).sum::<u64>())
331 .sum();
332 retval as usize
333 }
334 /// computes `self = &other - &self`
335 pub fn sub_reverse_assign(&mut self, other: impl Borrow<Self>) {
336 // TODO: make more efficient
337 let other: &Self = other.borrow();
338 *self = other - &*self;
339 }
340 }
341
342 #[derive(Clone, Debug)]
343 struct IterInternalsRest<StartsMapValueIter, Starts>
344 where
345 StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
346 Starts: Borrow<BigUint>,
347 {
348 reg_len: NonZeroU32,
349 starts_map_value_iter: StartsMapValueIter,
350 kind: LocKind,
351 starts: Starts,
352 start_range: Range<u32>,
353 }
354
355 impl<StartsMapValueIter, Starts> IterInternalsRest<StartsMapValueIter, Starts>
356 where
357 StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
358 Starts: Borrow<BigUint>,
359 {
360 fn new(reg_len: NonZeroU32, mut starts_map_value_iter: StartsMapValueIter) -> Option<Self> {
361 loop {
362 let (kind, starts) = starts_map_value_iter.next()?;
363 let starts_ref: &BigUint = starts.borrow();
364 let Some(start) = starts_ref.trailing_zeros() else {
365 continue;
366 };
367 let start = start.try_into().expect("checked by LocSet constructors");
368 let end = starts_ref
369 .bits()
370 .try_into()
371 .expect("checked by LocSet constructors");
372 return Some(Self {
373 reg_len,
374 starts_map_value_iter,
375 kind,
376 starts,
377 start_range: start..end,
378 });
379 }
380 }
381 fn next(this: &mut Option<Self>) -> Option<Loc> {
382 while let Some(Self {
383 reg_len,
384 starts_map_value_iter: _,
385 kind,
386 ref starts,
387 ref mut start_range,
388 }) = *this
389 {
390 let Some(start) = start_range.next() else {
391 *this = Self::new(reg_len, this.take().expect("known to be Some").starts_map_value_iter);
392 continue;
393 };
394 if starts.borrow().bit(start as u64) {
395 return Some(
396 Loc::new(LocFields {
397 kind,
398 start,
399 reg_len,
400 })
401 .expect("known to be valid"),
402 );
403 }
404 }
405 None
406 }
407 }
408
409 #[derive(Clone, Debug)]
410 struct IterInternals<StartsMapIter, RegLen, StartsMapValue, StartsMapValueIter, Starts>
411 where
412 StartsMapIter: Iterator<Item = (RegLen, StartsMapValue)>,
413 RegLen: Borrow<NonZeroU32>,
414 StartsMapValue: IntoIterator<IntoIter = StartsMapValueIter>,
415 StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
416 Starts: Borrow<BigUint>,
417 {
418 starts_map_iter: StartsMapIter,
419 rest: Option<IterInternalsRest<StartsMapValueIter, Starts>>,
420 }
421
422 impl<StartsMapIter, RegLen, StartsMapValue, StartsMapValueIter, Starts>
423 IterInternals<StartsMapIter, RegLen, StartsMapValue, StartsMapValueIter, Starts>
424 where
425 StartsMapIter: Iterator<Item = (RegLen, StartsMapValue)>,
426 RegLen: Borrow<NonZeroU32>,
427 StartsMapValue: IntoIterator<IntoIter = StartsMapValueIter>,
428 StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
429 Starts: Borrow<BigUint>,
430 {
431 fn new(starts_map_iter: StartsMapIter) -> Self {
432 Self {
433 starts_map_iter,
434 rest: None,
435 }
436 }
437 fn next(&mut self) -> Option<Loc> {
438 loop {
439 while self.rest.is_none() {
440 let (reg_len, starts_map_value) = self.starts_map_iter.next()?;
441 self.rest = IterInternalsRest::new(*reg_len.borrow(), starts_map_value.into_iter());
442 }
443 if let Some(loc) = IterInternalsRest::next(&mut self.rest) {
444 return Some(loc);
445 }
446 }
447 }
448 }
449
450 #[derive(Clone, Debug)]
451 pub struct Iter<'a> {
452 internals: IterInternals<
453 btree_map::Iter<'a, NonZeroU32, EnumMap<LocKind, BigUint>>,
454 &'a NonZeroU32,
455 &'a EnumMap<LocKind, BigUint>,
456 enum_map::Iter<'a, LocKind, BigUint>,
457 &'a BigUint,
458 >,
459 }
460
461 impl Iterator for Iter<'_> {
462 type Item = Loc;
463
464 fn next(&mut self) -> Option<Self::Item> {
465 self.internals.next()
466 }
467 }
468
469 impl FusedIterator for Iter<'_> {}
470
471 pub struct IntoIter {
472 internals: IterInternals<
473 btree_map::IntoIter<NonZeroU32, EnumMap<LocKind, BigUint>>,
474 NonZeroU32,
475 EnumMap<LocKind, BigUint>,
476 enum_map::IntoIter<LocKind, BigUint>,
477 BigUint,
478 >,
479 }
480
481 impl Iterator for IntoIter {
482 type Item = Loc;
483
484 fn next(&mut self) -> Option<Self::Item> {
485 self.internals.next()
486 }
487 }
488
489 impl FusedIterator for IntoIter {}
490
491 impl IntoIterator for LocSet {
492 type Item = Loc;
493 type IntoIter = IntoIter;
494
495 fn into_iter(self) -> Self::IntoIter {
496 IntoIter {
497 internals: IterInternals::new(self.starts_map.into_iter()),
498 }
499 }
500 }
501
502 impl<'a> IntoIterator for &'a LocSet {
503 type Item = Loc;
504 type IntoIter = Iter<'a>;
505
506 fn into_iter(self) -> Self::IntoIter {
507 self.iter()
508 }
509 }
510
511 impl Extend<Loc> for LocSet {
512 fn extend<T: IntoIterator<Item = Loc>>(&mut self, iter: T) {
513 iter.into_iter().for_each(|item| {
514 self.insert(item);
515 });
516 }
517 }
518
519 impl FromIterator<Loc> for LocSet {
520 fn from_iter<T: IntoIterator<Item = Loc>>(iter: T) -> Self {
521 let mut retval = LocSet::new();
522 retval.extend(iter);
523 retval
524 }
525 }
526
527 struct HexBigUint<'a>(&'a BigUint);
528
529 impl fmt::Debug for HexBigUint<'_> {
530 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
531 write!(f, "{:#x}", self.0)
532 }
533 }
534
535 struct LocSetStarts<'a>(&'a EnumMap<LocKind, BigUint>);
536
537 impl fmt::Debug for LocSetStarts<'_> {
538 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
539 f.debug_map()
540 .entries(self.0.iter().map(|(k, v)| (k, HexBigUint(v))))
541 .finish()
542 }
543 }
544
545 struct LocSetStartsMap<'a>(&'a BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>);
546
547 impl fmt::Debug for LocSetStartsMap<'_> {
548 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
549 f.debug_map()
550 .entries(self.0.iter().map(|(k, v)| (k, LocSetStarts(v))))
551 .finish()
552 }
553 }
554
555 impl fmt::Debug for LocSet {
556 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
557 f.debug_struct("LocSet")
558 .field("starts_map", &LocSetStartsMap(&self.starts_map))
559 .finish()
560 }
561 }
562
563 macro_rules! forward_bin_op {
564 (
565 $bin_op:ident::$bin_op_fn:ident(),
566 $bin_assign_op:ident::$bin_assign_op_fn:ident(),
567 $bin_assign_rev_op_fn:ident(),
568 ) => {
569 impl $bin_assign_op<LocSet> for LocSet {
570 fn $bin_assign_op_fn(&mut self, rhs: LocSet) {
571 self.$bin_assign_op_fn(&rhs);
572 }
573 }
574
575 impl $bin_op<&'_ LocSet> for LocSet {
576 type Output = LocSet;
577
578 fn $bin_op_fn(mut self, rhs: &'_ LocSet) -> Self::Output {
579 self.$bin_assign_op_fn(rhs);
580 self
581 }
582 }
583
584 impl $bin_op<LocSet> for LocSet {
585 type Output = LocSet;
586
587 fn $bin_op_fn(mut self, rhs: LocSet) -> Self::Output {
588 self.$bin_assign_op_fn(rhs);
589 self
590 }
591 }
592
593 impl $bin_op<LocSet> for &'_ LocSet {
594 type Output = LocSet;
595
596 fn $bin_op_fn(self, mut rhs: LocSet) -> Self::Output {
597 rhs.$bin_assign_rev_op_fn(self);
598 rhs
599 }
600 }
601
602 const _: fn() = {
603 fn _check<T>()
604 where
605 for<'a> T: $bin_op<T> + $bin_op<&'a T> + $bin_assign_op<T> + $bin_assign_op<&'a T>,
606 for<'a, 'b> &'a T: $bin_op<T> + $bin_op<&'b T>,
607 {
608 }
609 _check::<LocSet>
610 };
611 };
612 }
613
614 impl BitAnd<&'_ LocSet> for &'_ LocSet {
615 type Output = LocSet;
616
617 fn bitand(self, rhs: &'_ LocSet) -> Self::Output {
618 LocSet::from_starts_map_iter_unchecked(self.starts_map.iter().map(|(&reg_len, starts)| {
619 (
620 reg_len,
621 enum_map! {kind => (&starts[kind]).bitand(rhs.starts(reg_len, kind))},
622 )
623 }))
624 }
625 }
626
627 impl BitAndAssign<&'_ LocSet> for LocSet {
628 fn bitand_assign(&mut self, rhs: &'_ LocSet) {
629 self.for_each_reg_len_filtering_out_empty_entries(|reg_len, starts| {
630 for (kind, starts) in starts {
631 starts.bitand_assign(rhs.starts(reg_len, kind));
632 }
633 });
634 }
635 }
636
637 /// helper for binary operations that keeps Locs not present in rhs
638 macro_rules! impl_bin_op_keep {
639 (
640 $bin_op:ident::$bin_op_fn:ident(),
641 $bin_assign_op:ident::$bin_assign_op_fn:ident(),
642 ) => {
643 impl $bin_op<&'_ LocSet> for &'_ LocSet {
644 type Output = LocSet;
645
646 fn $bin_op_fn(self, rhs: &'_ LocSet) -> Self::Output {
647 let mut retval: LocSet = self.clone();
648 retval.$bin_assign_op_fn(rhs);
649 retval
650 }
651 }
652
653 impl $bin_assign_op<&'_ LocSet> for LocSet {
654 fn $bin_assign_op_fn(&mut self, rhs: &'_ LocSet) {
655 self.bin_op_keep_helper(rhs, |_reg_len, lhs_starts, rhs_starts| {
656 for (kind, rhs_starts) in rhs_starts {
657 lhs_starts[kind].$bin_assign_op_fn(rhs_starts);
658 }
659 });
660 }
661 }
662 };
663 }
664
665 forward_bin_op! {
666 BitAnd::bitand(),
667 BitAndAssign::bitand_assign(),
668 bitand_assign(),
669 }
670
671 impl_bin_op_keep! {
672 BitOr::bitor(),
673 BitOrAssign::bitor_assign(),
674 }
675
676 forward_bin_op! {
677 BitOr::bitor(),
678 BitOrAssign::bitor_assign(),
679 bitor_assign(),
680 }
681
682 impl_bin_op_keep! {
683 BitXor::bitxor(),
684 BitXorAssign::bitxor_assign(),
685 }
686
687 forward_bin_op! {
688 BitXor::bitxor(),
689 BitXorAssign::bitxor_assign(),
690 bitxor_assign(),
691 }
692
693 impl Sub<&'_ LocSet> for &'_ LocSet {
694 type Output = LocSet;
695
696 fn sub(self, rhs: &'_ LocSet) -> Self::Output {
697 LocSet::from_starts_map_iter_unchecked(self.starts_map.iter().map(|(&reg_len, starts)| {
698 (
699 reg_len,
700 enum_map! {kind => and_not(&starts[kind], rhs.starts(reg_len, kind))},
701 )
702 }))
703 }
704 }
705
706 impl SubAssign<&'_ LocSet> for LocSet {
707 fn sub_assign(&mut self, rhs: &'_ LocSet) {
708 self.bin_op_keep_helper(rhs, |_reg_len, lhs_starts, rhs_starts| {
709 for (kind, lhs_starts) in lhs_starts {
710 let rhs_starts = &rhs_starts[kind];
711 if rhs_starts.is_zero() {
712 continue;
713 }
714 *lhs_starts = and_not(mem::take(lhs_starts), rhs_starts);
715 }
716 });
717 }
718 }
719
720 forward_bin_op! {
721 Sub::sub(),
722 SubAssign::sub_assign(),
723 sub_reverse_assign(),
724 }
725
726 /// the largest number of Locs in `lhs` that a single Loc
727 /// from `rhs` can conflict with
728 #[derive(Clone)]
729 pub struct LocSetMaxConflictsWith<Rhs> {
730 lhs: Interned<LocSet>,
731 rhs: Rhs,
732 // result is not included in equality or hash
733 result: Cell<Option<u32>>,
734 }
735
736 impl<Rhs: Hash> Hash for LocSetMaxConflictsWith<Rhs> {
737 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
738 self.lhs.hash(state);
739 self.rhs.hash(state);
740 }
741 }
742
743 impl<Rhs: Eq> Eq for LocSetMaxConflictsWith<Rhs> {}
744
745 impl<Rhs: PartialEq> PartialEq for LocSetMaxConflictsWith<Rhs> {
746 fn eq(&self, other: &Self) -> bool {
747 self.lhs == other.lhs && self.rhs == other.rhs
748 }
749 }
750
751 pub trait LocSetMaxConflictsWithTrait: Clone {
752 fn intern(
753 v: LocSetMaxConflictsWith<Self>,
754 global_state: &GlobalState,
755 ) -> Interned<LocSetMaxConflictsWith<Self>>;
756 fn compute_result(lhs: &Interned<LocSet>, rhs: &Self, global_state: &GlobalState) -> u32;
757 #[cfg(feature = "fuzzing")]
758 fn reference_compute_result(
759 lhs: &Interned<LocSet>,
760 rhs: &Self,
761 global_state: &GlobalState,
762 ) -> u32;
763 }
764
765 impl LocSetMaxConflictsWithTrait for Loc {
766 fn compute_result(lhs: &Interned<LocSet>, rhs: &Self, _global_state: &GlobalState) -> u32 {
767 // now we do the equivalent of:
768 // return lhs.iter().map(|loc| rhs.conflicts(loc) as u32).sum()
769 let mut retval = 0;
770 for (&lhs_reg_len, lhs_starts) in lhs.starts_map() {
771 let lhs_starts = &lhs_starts[rhs.kind];
772 if lhs_starts.is_zero() {
773 continue;
774 }
775 // now we do the equivalent of:
776 // retval += sum(rhs.start < lhs_start + lhs_reg_len
777 // and lhs_start < rhs.start + rhs.reg_len
778 // for lhs_start in lhs_starts)
779 let lhs_stops = lhs_starts << lhs_reg_len.get();
780
781 // find all the bit indexes `i` where `i < rhs.start + 1`
782 let lt_rhs_start_plus_1 = (BigUint::from(1u32) << (rhs.start + 1)) - 1u32;
783
784 // find all the bit indexes `i` where
785 // `i < rhs.start + rhs.reg_len + lhs_reg_len`
786 let lt_rhs_start_plus_rhs_reg_len_plus_reg_len =
787 (BigUint::from(1u32) << (rhs.start + rhs.reg_len.get() + lhs_reg_len.get())) - 1u32;
788 let lhs_stops_and_lt_rhs_start_plus_1 = &lhs_stops & lt_rhs_start_plus_1;
789 let mut included = and_not(lhs_stops, lhs_stops_and_lt_rhs_start_plus_1);
790 included &= lt_rhs_start_plus_rhs_reg_len_plus_reg_len;
791 retval += included.count_ones() as u32;
792 }
793 retval
794 }
795
796 #[cfg(feature = "fuzzing")]
797 fn reference_compute_result(
798 lhs: &Interned<LocSet>,
799 rhs: &Self,
800 _global_state: &GlobalState,
801 ) -> u32 {
802 lhs.iter().map(|loc| rhs.conflicts(loc) as u32).sum::<u32>()
803 }
804
805 fn intern(
806 v: LocSetMaxConflictsWith<Self>,
807 global_state: &GlobalState,
808 ) -> Interned<LocSetMaxConflictsWith<Self>> {
809 v.into_interned(global_state)
810 }
811 }
812
813 impl LocSetMaxConflictsWithTrait for Interned<LocSet> {
814 fn compute_result(lhs: &Interned<LocSet>, rhs: &Self, global_state: &GlobalState) -> u32 {
815 rhs.iter()
816 .map(|loc| lhs.clone().max_conflicts_with(loc, global_state))
817 .max()
818 .unwrap_or(0)
819 }
820
821 #[cfg(feature = "fuzzing")]
822 fn reference_compute_result(
823 lhs: &Interned<LocSet>,
824 rhs: &Self,
825 global_state: &GlobalState,
826 ) -> u32 {
827 rhs.iter()
828 .map(|loc| lhs.clone().reference_max_conflicts_with(loc, global_state))
829 .max()
830 .unwrap_or(0)
831 }
832
833 fn intern(
834 v: LocSetMaxConflictsWith<Self>,
835 global_state: &GlobalState,
836 ) -> Interned<LocSetMaxConflictsWith<Self>> {
837 v.into_interned(global_state)
838 }
839 }
840
841 impl<Rhs: LocSetMaxConflictsWithTrait> LocSetMaxConflictsWith<Rhs> {
842 pub fn lhs(&self) -> &Interned<LocSet> {
843 &self.lhs
844 }
845 pub fn rhs(&self) -> &Rhs {
846 &self.rhs
847 }
848 pub fn result(&self, global_state: &GlobalState) -> u32 {
849 match self.result.get() {
850 Some(v) => v,
851 None => {
852 let retval = Rhs::compute_result(&self.lhs, &self.rhs, global_state);
853 self.result.set(Some(retval));
854 retval
855 }
856 }
857 }
858 #[cfg(feature = "fuzzing")]
859 pub fn reference_result(&self, global_state: &GlobalState) -> u32 {
860 match self.result.get() {
861 Some(v) => v,
862 None => {
863 let retval = Rhs::reference_compute_result(&self.lhs, &self.rhs, global_state);
864 self.result.set(Some(retval));
865 retval
866 }
867 }
868 }
869 }
870
871 impl Interned<LocSet> {
872 pub fn max_conflicts_with<Rhs>(self, rhs: Rhs, global_state: &GlobalState) -> u32
873 where
874 Rhs: LocSetMaxConflictsWithTrait,
875 LocSetMaxConflictsWith<Rhs>: InternTarget,
876 {
877 LocSetMaxConflictsWithTrait::intern(
878 LocSetMaxConflictsWith {
879 lhs: self,
880 rhs,
881 result: Cell::default(),
882 },
883 global_state,
884 )
885 .result(global_state)
886 }
887 #[cfg(feature = "fuzzing")]
888 pub fn reference_max_conflicts_with<Rhs>(self, rhs: Rhs, global_state: &GlobalState) -> u32
889 where
890 Rhs: LocSetMaxConflictsWithTrait,
891 LocSetMaxConflictsWith<Rhs>: InternTarget,
892 {
893 LocSetMaxConflictsWithTrait::intern(
894 LocSetMaxConflictsWith {
895 lhs: self,
896 rhs,
897 result: Cell::default(),
898 },
899 global_state,
900 )
901 .reference_result(global_state)
902 }
903 pub fn conflicts_with<Rhs>(self, rhs: Rhs, global_state: &GlobalState) -> bool
904 where
905 Rhs: LocSetMaxConflictsWithTrait,
906 LocSetMaxConflictsWith<Rhs>: InternTarget,
907 {
908 self.max_conflicts_with(rhs, global_state) != 0
909 }
910 }
911
912 #[cfg(test)]
913 mod tests {
914 use super::*;
915
916 #[test]
917 fn test_and_not() {
918 for a in 0..0x10u32 {
919 for b in 0..0x10 {
920 assert_eq!(
921 and_not(&BigUint::from(a), BigUint::from(b)),
922 (a & !b).into()
923 );
924 }
925 }
926 }
927 }