1use super::liveranges::SpillWeight;
16use crate::cfg::{CFGInfo, CFGInfoCtx};
17use crate::index::ContainerComparator;
18use crate::indexset::IndexSet;
19use crate::Vec2;
20use crate::{
21 define_index, Allocation, Block, Bump, Edit, Function, FxHashMap, FxHashSet, MachineEnv,
22 Operand, Output, PReg, ProgPoint, RegClass, VReg,
23};
24use alloc::collections::BTreeMap;
25use alloc::collections::VecDeque;
26use alloc::string::String;
27use alloc::vec::Vec;
28use core::cmp::Ordering;
29use core::fmt::Debug;
30use core::ops::{Deref, DerefMut};
31use smallvec::SmallVec;
32
33#[derive(Clone, Copy, Debug, PartialEq, Eq)]
35pub struct CodeRange {
36 pub from: ProgPoint,
37 pub to: ProgPoint,
38}
39
40impl CodeRange {
41 #[inline(always)]
42 pub fn is_empty(&self) -> bool {
43 self.from >= self.to
44 }
45 #[inline(always)]
46 pub fn contains(&self, other: &Self) -> bool {
47 other.from >= self.from && other.to <= self.to
48 }
49 #[inline(always)]
50 pub fn contains_point(&self, other: ProgPoint) -> bool {
51 other >= self.from && other < self.to
52 }
53 #[inline(always)]
54 pub fn overlaps(&self, other: &Self) -> bool {
55 other.to > self.from && other.from < self.to
56 }
57 #[inline(always)]
58 pub fn len(&self) -> usize {
59 self.to.inst().index() - self.from.inst().index()
60 }
61 #[inline(always)]
63 pub fn singleton(pos: ProgPoint) -> CodeRange {
64 CodeRange {
65 from: pos,
66 to: pos.next(),
67 }
68 }
69
70 #[inline(always)]
72 pub fn join(&self, other: CodeRange) -> Self {
73 CodeRange {
74 from: self.from.min(other.from),
75 to: self.to.max(other.to),
76 }
77 }
78}
79
80impl core::cmp::PartialOrd for CodeRange {
81 #[inline(always)]
82 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
83 Some(self.cmp(other))
84 }
85}
86impl core::cmp::Ord for CodeRange {
87 #[inline(always)]
88 fn cmp(&self, other: &Self) -> Ordering {
89 if self.to <= other.from {
90 Ordering::Less
91 } else if self.from >= other.to {
92 Ordering::Greater
93 } else {
94 Ordering::Equal
95 }
96 }
97}
98
99define_index!(LiveBundleIndex, LiveBundles, LiveBundle);
100define_index!(LiveRangeIndex, LiveRanges, LiveRange);
101define_index!(SpillSetIndex, SpillSets, SpillSet);
102define_index!(UseIndex);
103define_index!(VRegIndex, VRegs, VRegData);
104define_index!(PRegIndex);
105define_index!(SpillSlotIndex);
106
107pub type LiveBundleVec = Vec<LiveBundleIndex>;
109
110#[derive(Clone, Copy, Debug)]
111pub struct LiveRangeListEntry {
112 pub range: CodeRange,
113 pub index: LiveRangeIndex,
114}
115
116pub type LiveRangeList = Vec2<LiveRangeListEntry, Bump>;
117pub type UseList = Vec2<Use, Bump>;
118
119#[derive(Clone, Debug)]
120pub struct LiveRange {
121 pub range: CodeRange,
122
123 pub vreg: VRegIndex,
124 pub bundle: LiveBundleIndex,
125 pub uses_spill_weight_and_flags: u32,
126 pub(crate) uses: UseList,
127}
128
129#[derive(Clone, Copy, Debug, PartialEq, Eq)]
130#[repr(u32)]
131pub enum LiveRangeFlag {
132 StartsAtDef = 1,
133}
134
135impl LiveRange {
136 #[inline(always)]
137 pub fn set_flag(&mut self, flag: LiveRangeFlag) {
138 self.uses_spill_weight_and_flags |= (flag as u32) << 29;
139 }
140 #[inline(always)]
141 pub fn clear_flag(&mut self, flag: LiveRangeFlag) {
142 self.uses_spill_weight_and_flags &= !((flag as u32) << 29);
143 }
144 #[inline(always)]
145 pub fn assign_flag(&mut self, flag: LiveRangeFlag, val: bool) {
146 let bit = if val { (flag as u32) << 29 } else { 0 };
147 self.uses_spill_weight_and_flags &= 0xe000_0000;
148 self.uses_spill_weight_and_flags |= bit;
149 }
150 #[inline(always)]
151 pub fn has_flag(&self, flag: LiveRangeFlag) -> bool {
152 self.uses_spill_weight_and_flags & ((flag as u32) << 29) != 0
153 }
154 #[inline(always)]
155 pub fn flag_word(&self) -> u32 {
156 self.uses_spill_weight_and_flags & 0xe000_0000
157 }
158 #[inline(always)]
159 pub fn merge_flags(&mut self, flag_word: u32) {
160 self.uses_spill_weight_and_flags |= flag_word;
161 }
162 #[inline(always)]
163 pub fn uses_spill_weight(&self) -> SpillWeight {
164 let bits = (self.uses_spill_weight_and_flags & 0x1fff_ffff) << 2;
168 SpillWeight::from_f32(f32::from_bits(bits))
169 }
170 #[inline(always)]
171 pub fn set_uses_spill_weight(&mut self, weight: SpillWeight) {
172 let weight_bits = (weight.to_f32().to_bits() >> 2) & 0x1fff_ffff;
173 self.uses_spill_weight_and_flags =
174 (self.uses_spill_weight_and_flags & 0xe000_0000) | weight_bits;
175 }
176}
177
178#[derive(Clone, Copy, Debug)]
179pub struct Use {
180 pub operand: Operand,
181 pub pos: ProgPoint,
182 pub slot: u16,
183 pub weight: u16,
184}
185
186impl Use {
187 #[inline(always)]
188 pub fn new(operand: Operand, pos: ProgPoint, slot: u16) -> Self {
189 Self {
190 operand,
191 pos,
192 slot,
193 weight: 0,
195 }
196 }
197}
198
199#[derive(Clone, Debug)]
200pub struct LiveBundle {
201 pub(crate) ranges: LiveRangeList,
202 pub spillset: SpillSetIndex,
203 pub allocation: Allocation,
204 pub prio: u32, pub spill_weight_and_props: u32,
206 pub limit: Option<u8>,
207}
208
209pub const BUNDLE_MAX_SPILL_WEIGHT: u32 = (1 << 28) - 1;
210pub const MINIMAL_FIXED_BUNDLE_SPILL_WEIGHT: u32 = BUNDLE_MAX_SPILL_WEIGHT;
211pub const MINIMAL_BUNDLE_SPILL_WEIGHT: u32 = BUNDLE_MAX_SPILL_WEIGHT - 1;
212pub const BUNDLE_MAX_NORMAL_SPILL_WEIGHT: u32 = BUNDLE_MAX_SPILL_WEIGHT - 2;
213
214impl LiveBundle {
215 #[inline(always)]
216 pub fn set_cached_spill_weight_and_props(
217 &mut self,
218 spill_weight: u32,
219 minimal: bool,
220 fixed: bool,
221 fixed_def: bool,
222 stack: bool,
223 ) {
224 debug_assert!(spill_weight <= BUNDLE_MAX_SPILL_WEIGHT);
225 self.spill_weight_and_props = spill_weight
226 | (if minimal { 1 << 31 } else { 0 })
227 | (if fixed { 1 << 30 } else { 0 })
228 | (if fixed_def { 1 << 29 } else { 0 })
229 | (if stack { 1 << 28 } else { 0 });
230 }
231
232 #[inline(always)]
233 pub fn cached_minimal(&self) -> bool {
234 self.spill_weight_and_props & (1 << 31) != 0
235 }
236
237 #[inline(always)]
238 pub fn cached_fixed(&self) -> bool {
239 self.spill_weight_and_props & (1 << 30) != 0
240 }
241
242 #[inline(always)]
243 pub fn cached_fixed_def(&self) -> bool {
244 self.spill_weight_and_props & (1 << 29) != 0
245 }
246
247 #[inline(always)]
248 pub fn cached_stack(&self) -> bool {
249 self.spill_weight_and_props & (1 << 28) != 0
250 }
251
252 #[inline(always)]
253 pub fn set_cached_fixed(&mut self) {
254 self.spill_weight_and_props |= 1 << 30;
255 }
256
257 #[inline(always)]
258 pub fn set_cached_fixed_def(&mut self) {
259 self.spill_weight_and_props |= 1 << 29;
260 }
261
262 #[inline(always)]
263 pub fn set_cached_stack(&mut self) {
264 self.spill_weight_and_props |= 1 << 28;
265 }
266
267 #[inline(always)]
268 pub fn cached_spill_weight(&self) -> u32 {
269 self.spill_weight_and_props & BUNDLE_MAX_SPILL_WEIGHT
270 }
271}
272
273#[derive(Clone, Copy, Debug, PartialEq, Eq)]
274pub struct BundleProperties {
275 pub minimal: bool,
276 pub fixed: bool,
277}
278
279const fn no_bloat_capacity<T>() -> usize {
282 core::mem::size_of::<usize>() * 2 / core::mem::size_of::<T>()
294}
295
296#[derive(Clone, Debug)]
297pub struct SpillSet {
298 pub slot: SpillSlotIndex,
299 pub hint: PReg,
300 pub class: RegClass,
301 pub spill_bundle: LiveBundleIndex,
302 pub required: bool,
303 pub splits: u8,
304
305 pub range: CodeRange,
310}
311
312pub(crate) const MAX_SPLITS_PER_SPILLSET: u8 = 2;
313
314#[derive(Clone, Debug)]
315pub struct VRegData {
316 pub(crate) ranges: LiveRangeList,
317 pub blockparam: Block,
318 pub class: Option<RegClass>,
320}
321
322#[derive(Clone, Debug)]
323pub struct PRegData {
324 pub allocations: LiveRangeSet,
325 pub is_stack: bool,
326}
327
328#[derive(Clone, Debug)]
329pub struct MultiFixedRegFixup {
330 pub pos: ProgPoint,
331 pub from_slot: u16,
332 pub to_slot: u16,
333 pub level: FixedRegFixupLevel,
334 pub to_preg: PRegIndex,
335 pub vreg: VRegIndex,
336}
337
338#[derive(Clone, Debug, PartialEq, Eq)]
339pub enum FixedRegFixupLevel {
340 Initial,
342 Secondary,
345}
346
347#[derive(Clone, Copy, Debug, PartialEq, Eq)]
355#[repr(C)]
356pub struct BlockparamOut {
357 pub to_vreg: VRegIndex,
358 pub to_block: Block,
359 pub from_block: Block,
360 pub from_vreg: VRegIndex,
361}
362impl BlockparamOut {
363 #[inline(always)]
364 pub fn key(&self) -> u128 {
365 u128_key(
366 self.from_vreg.raw_u32(),
367 self.from_block.raw_u32(),
368 self.to_block.raw_u32(),
369 self.to_vreg.raw_u32(),
370 )
371 }
372}
373
374#[derive(Clone, Debug)]
379#[repr(C)]
380pub struct BlockparamIn {
381 pub from_block: Block,
382 pub to_block: Block,
383 pub to_vreg: VRegIndex,
384}
385impl BlockparamIn {
386 #[inline(always)]
387 pub fn key(&self) -> u128 {
388 u128_key(
389 self.to_vreg.raw_u32(),
390 self.to_block.raw_u32(),
391 self.from_block.raw_u32(),
392 0,
393 )
394 }
395}
396
397impl LiveRanges {
398 pub(crate) fn add(&mut self, range: CodeRange, bump: Bump) -> LiveRangeIndex {
399 self.push(LiveRange {
400 range,
401 vreg: VRegIndex::invalid(),
402 bundle: LiveBundleIndex::invalid(),
403 uses_spill_weight_and_flags: 0,
404 uses: UseList::new_in(bump),
405 })
406 }
407}
408
409impl LiveBundles {
410 pub(crate) fn add(&mut self, bump: Bump) -> LiveBundleIndex {
411 self.push(LiveBundle {
412 allocation: Allocation::none(),
413 ranges: LiveRangeList::new_in(bump),
414 spillset: SpillSetIndex::invalid(),
415 prio: 0,
416 spill_weight_and_props: 0,
417 limit: None,
418 })
419 }
420}
421
422impl VRegs {
423 pub fn add(&mut self, reg: VReg, data: VRegData) -> VRegIndex {
424 let idx = self.push(data);
425 debug_assert_eq!(reg.vreg(), idx.index());
426 idx
427 }
428}
429
430impl core::ops::Index<VReg> for VRegs {
431 type Output = VRegData;
432
433 #[inline(always)]
434 fn index(&self, idx: VReg) -> &Self::Output {
435 &self.storage[idx.vreg()]
436 }
437}
438
439impl core::ops::IndexMut<VReg> for VRegs {
440 #[inline(always)]
441 fn index_mut(&mut self, idx: VReg) -> &mut Self::Output {
442 &mut self.storage[idx.vreg()]
443 }
444}
445
446#[derive(Default)]
447pub struct Ctx {
448 pub(crate) cfginfo: CFGInfo,
449 pub(crate) cfginfo_ctx: CFGInfoCtx,
450 pub(crate) liveins: Vec<IndexSet>,
451 pub(crate) liveouts: Vec<IndexSet>,
452 pub(crate) blockparam_outs: Vec<BlockparamOut>,
453 pub(crate) blockparam_ins: Vec<BlockparamIn>,
454
455 pub(crate) ranges: LiveRanges,
456 pub(crate) bundles: LiveBundles,
457 pub(crate) spillsets: SpillSets,
458 pub(crate) vregs: VRegs,
459 pub(crate) pregs: Vec<PRegData>,
460 pub(crate) allocation_queue: PrioQueue,
461
462 pub(crate) spilled_bundles: Vec<LiveBundleIndex>,
463 pub(crate) spillslots: Vec<SpillSlotData>,
464 pub(crate) slots_by_class: [SpillSlotList; 3],
465
466 pub(crate) extra_spillslots_by_class: [SmallVec<[Allocation; 2]>; 3],
467 pub(crate) preferred_victim_by_class: [PReg; 3],
468
469 pub(crate) multi_fixed_reg_fixups: Vec<MultiFixedRegFixup>,
478
479 pub(crate) allocated_bundle_count: usize,
480
481 pub(crate) debug_annotations: FxHashMap<ProgPoint, Vec<String>>,
484 pub(crate) annotations_enabled: bool,
485
486 pub(crate) conflict_set: FxHashSet<LiveBundleIndex>,
489
490 pub output: Output,
492
493 pub(crate) scratch_conflicts: LiveBundleVec,
494 pub(crate) scratch_bundle: LiveBundleVec,
495 pub(crate) scratch_vreg_ranges: Vec<LiveRangeIndex>,
496 pub(crate) scratch_spillset_pool: Vec<SpillSetRanges>,
497
498 pub(crate) scratch_workqueue: VecDeque<Block>,
499
500 pub(crate) scratch_operand_rewrites: FxHashMap<usize, Operand>,
501 pub(crate) scratch_removed_lrs: FxHashSet<LiveRangeIndex>,
502 pub(crate) scratch_removed_lrs_vregs: FxHashSet<VRegIndex>,
503 pub(crate) scratch_workqueue_set: FxHashSet<Block>,
504
505 pub(crate) scratch_bump: Bump,
506}
507
508impl Ctx {
509 pub(crate) fn bump(&self) -> Bump {
510 self.scratch_bump.clone()
511 }
512}
513
514pub struct Env<'a, F: Function> {
515 pub func: &'a F,
516 pub env: &'a MachineEnv,
517 pub ctx: &'a mut Ctx,
518}
519
520impl<'a, F: Function> Deref for Env<'a, F> {
521 type Target = Ctx;
522
523 fn deref(&self) -> &Self::Target {
524 self.ctx
525 }
526}
527
528impl<'a, F: Function> DerefMut for Env<'a, F> {
529 fn deref_mut(&mut self) -> &mut Self::Target {
530 self.ctx
531 }
532}
533
534impl<'a, F: Function> Env<'a, F> {
535 #[inline]
537 pub fn vreg(&self, index: VRegIndex) -> VReg {
538 let class = self.vregs[index]
539 .class
540 .expect("trying to get a VReg before observing its class");
541 VReg::new(index.index(), class)
542 }
543
544 pub fn observe_vreg_class(&mut self, vreg: VReg) {
547 let old_class = self.vregs[vreg].class.replace(vreg.class());
548 debug_assert!(old_class == None || old_class == Some(vreg.class()));
551 }
552
553 pub fn is_vreg_used(&self, index: VRegIndex) -> bool {
555 self.vregs[index].class.is_some()
556 }
557}
558
559#[derive(Clone, Debug, Default)]
560pub struct SpillSetRanges {
561 pub btree: BTreeMap<LiveRangeKey, SpillSetIndex>,
562}
563
564#[derive(Clone, Debug)]
565pub struct SpillSlotData {
566 pub ranges: SpillSetRanges,
567 pub slots: u32,
568 pub alloc: Allocation,
569}
570
571#[derive(Clone, Debug, Default)]
572pub struct SpillSlotList {
573 pub slots: SmallVec<[SpillSlotIndex; 32]>,
574 pub probe_start: usize,
575}
576
577impl SpillSlotList {
578 pub(crate) fn next_index(&self, index: usize) -> usize {
581 debug_assert!(index < self.slots.len());
582 if index == self.slots.len() - 1 {
583 0
584 } else {
585 index + 1
586 }
587 }
588}
589
590#[derive(Clone, Debug, Default)]
591pub struct PrioQueue {
592 pub heap: alloc::collections::BinaryHeap<PrioQueueEntry>,
593}
594
595#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
596pub struct PrioQueueEntry {
597 pub prio: u32,
598 pub bundle: LiveBundleIndex,
599 pub hint: PReg,
600}
601
602#[derive(Clone, Debug)]
603pub struct LiveRangeSet {
604 pub btree: BTreeMap<LiveRangeKey, LiveRangeIndex>,
605}
606
607#[derive(Clone, Copy, Debug)]
608pub struct LiveRangeKey {
609 pub from: u32,
610 pub to: u32,
611}
612
613impl LiveRangeKey {
614 #[inline(always)]
615 pub fn from_range(range: &CodeRange) -> Self {
616 Self {
617 from: range.from.to_index(),
618 to: range.to.to_index(),
619 }
620 }
621
622 #[inline(always)]
623 pub fn to_range(&self) -> CodeRange {
624 CodeRange {
625 from: ProgPoint::from_index(self.from),
626 to: ProgPoint::from_index(self.to),
627 }
628 }
629}
630
631impl core::cmp::PartialEq for LiveRangeKey {
632 #[inline(always)]
633 fn eq(&self, other: &Self) -> bool {
634 self.to > other.from && self.from < other.to
635 }
636}
637impl core::cmp::Eq for LiveRangeKey {}
638impl core::cmp::PartialOrd for LiveRangeKey {
639 #[inline(always)]
640 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
641 Some(self.cmp(other))
642 }
643}
644impl core::cmp::Ord for LiveRangeKey {
645 #[inline(always)]
646 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
647 if self.to <= other.from {
648 core::cmp::Ordering::Less
649 } else if self.from >= other.to {
650 core::cmp::Ordering::Greater
651 } else {
652 core::cmp::Ordering::Equal
653 }
654 }
655}
656
657pub struct PrioQueueComparator<'a> {
658 pub prios: &'a [usize],
659}
660impl<'a> ContainerComparator for PrioQueueComparator<'a> {
661 type Ix = LiveBundleIndex;
662 fn compare(&self, a: Self::Ix, b: Self::Ix) -> core::cmp::Ordering {
663 self.prios[a.index()].cmp(&self.prios[b.index()])
664 }
665}
666
667impl PrioQueue {
668 #[inline(always)]
669 pub fn insert(&mut self, bundle: LiveBundleIndex, prio: usize, hint: PReg) {
670 self.heap.push(PrioQueueEntry {
671 prio: prio as u32,
672 bundle,
673 hint,
674 });
675 }
676
677 #[inline(always)]
678 pub fn is_empty(self) -> bool {
679 self.heap.is_empty()
680 }
681
682 #[inline(always)]
683 pub fn pop(&mut self) -> Option<(LiveBundleIndex, PReg)> {
684 self.heap.pop().map(|entry| (entry.bundle, entry.hint))
685 }
686}
687
688impl LiveRangeSet {
689 pub(crate) fn new() -> Self {
690 Self {
691 btree: BTreeMap::default(),
692 }
693 }
694}
695
696#[derive(Clone, Debug)]
697pub struct InsertedMove {
698 pub pos_prio: PosWithPrio,
699 pub from_alloc: Allocation,
700 pub to_alloc: Allocation,
701 pub to_vreg: VReg,
702}
703
704#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
705pub enum InsertMovePrio {
706 InEdgeMoves,
707 Regular,
708 MultiFixedRegInitial,
709 MultiFixedRegSecondary,
710 ReusedInput,
711 OutEdgeMoves,
712}
713
714#[derive(Debug, Default)]
715pub struct InsertedMoves {
716 pub moves: Vec<InsertedMove>,
717}
718
719impl InsertedMoves {
720 pub fn push(
721 &mut self,
722 pos: ProgPoint,
723 prio: InsertMovePrio,
724 from_alloc: Allocation,
725 to_alloc: Allocation,
726 to_vreg: VReg,
727 ) {
728 trace!(
729 "insert_move: pos {:?} prio {:?} from_alloc {:?} to_alloc {:?} to_vreg {:?}",
730 pos,
731 prio,
732 from_alloc,
733 to_alloc,
734 to_vreg
735 );
736 if from_alloc == to_alloc {
737 trace!(" -> skipping move with same source and dest");
738 return;
739 }
740 if let Some(from) = from_alloc.as_reg() {
741 debug_assert_eq!(from.class(), to_vreg.class());
742 }
743 if let Some(to) = to_alloc.as_reg() {
744 debug_assert_eq!(to.class(), to_vreg.class());
745 }
746 self.moves.push(InsertedMove {
747 pos_prio: PosWithPrio {
748 pos,
749 prio: prio as u32,
750 },
751 from_alloc,
752 to_alloc,
753 to_vreg,
754 });
755 }
756}
757
758#[derive(Clone, Debug, Default)]
759pub struct Edits {
760 edits: Vec<(PosWithPrio, Edit)>,
761}
762
763impl Edits {
764 #[inline(always)]
765 pub fn with_capacity(n: usize) -> Self {
766 Self {
767 edits: Vec::with_capacity(n),
768 }
769 }
770
771 #[inline(always)]
772 pub fn len(&self) -> usize {
773 self.edits.len()
774 }
775
776 #[inline(always)]
777 pub fn iter(&self) -> impl Iterator<Item = &(PosWithPrio, Edit)> {
778 self.edits.iter()
779 }
780
781 #[inline(always)]
782 pub fn drain_edits(&mut self) -> impl Iterator<Item = (ProgPoint, Edit)> + '_ {
783 self.edits.drain(..).map(|(pos, edit)| (pos.pos, edit))
784 }
785
786 #[inline(always)]
789 pub fn sort(&mut self) {
790 self.edits.sort_by_key(|&(pos_prio, _)| pos_prio.key());
791 }
792
793 pub fn add(&mut self, pos_prio: PosWithPrio, from: Allocation, to: Allocation) {
794 if from != to {
795 if from.is_reg() && to.is_reg() {
796 debug_assert_eq!(from.as_reg().unwrap().class(), to.as_reg().unwrap().class());
797 }
798 self.edits.push((pos_prio, Edit::Move { from, to }));
799 }
800 }
801}
802
803#[derive(Clone, Copy, Debug, PartialEq, Eq)]
806#[repr(C)]
807pub struct PosWithPrio {
808 pub prio: u32,
809 pub pos: ProgPoint,
810}
811
812impl PosWithPrio {
813 #[inline]
814 pub fn key(self) -> u64 {
815 u64_key(self.pos.to_index(), self.prio)
816 }
817}
818
819#[derive(Clone, Copy, Debug, Default)]
820#[cfg_attr(feature = "enable-serde", derive(serde::Serialize, serde::Deserialize))]
821pub struct Stats {
822 pub livein_blocks: usize,
823 pub livein_iterations: usize,
824 pub initial_liverange_count: usize,
825 pub merged_bundle_count: usize,
826 pub process_bundle_count: usize,
827 pub process_bundle_reg_probes_fixed: usize,
828 pub process_bundle_reg_success_fixed: usize,
829 pub process_bundle_bounding_range_probe_start_any: usize,
830 pub process_bundle_bounding_range_probes_any: usize,
831 pub process_bundle_bounding_range_success_any: usize,
832 pub process_bundle_reg_probe_start_any: usize,
833 pub process_bundle_reg_probes_any: usize,
834 pub process_bundle_reg_success_any: usize,
835 pub evict_bundle_event: usize,
836 pub evict_bundle_count: usize,
837 pub splits: usize,
838 pub splits_clobbers: usize,
839 pub splits_hot: usize,
840 pub splits_conflicts: usize,
841 pub splits_defs: usize,
842 pub splits_all: usize,
843 pub final_liverange_count: usize,
844 pub final_bundle_count: usize,
845 pub spill_bundle_count: usize,
846 pub spill_bundle_reg_probes: usize,
847 pub spill_bundle_reg_success: usize,
848 pub blockparam_ins_count: usize,
849 pub blockparam_outs_count: usize,
850 pub halfmoves_count: usize,
851 pub edits_count: usize,
852}
853
854#[inline(always)]
860pub fn u64_key(b: u32, a: u32) -> u64 {
861 a as u64 | (b as u64) << 32
862}
863#[inline(always)]
864pub fn u128_key(d: u32, c: u32, b: u32, a: u32) -> u128 {
865 a as u128 | (b as u128) << 32 | (c as u128) << 64 | (d as u128) << 96
866}