regalloc2/
checker.rs

1/*
2 * The following code is derived from `lib/src/checker.rs` in the
3 * regalloc.rs project
4 * (https://github.com/bytecodealliance/regalloc.rs). regalloc.rs is
5 * also licensed under Apache-2.0 with the LLVM exception, as the rest
6 * of regalloc2 is.
7 */
8
9//! Checker: verifies that spills/reloads/moves retain equivalent
10//! dataflow to original, VReg-based code.
11//!
12//! The basic idea is that we track symbolic values as they flow
13//! through spills and reloads.  The symbolic values represent
14//! particular virtual registers in the original function body
15//! presented to the register allocator. Any instruction in the
16//! original function body (i.e., not added by the allocator)
17//! conceptually generates a symbolic value "Vn" when storing to (or
18//! modifying) a virtual register.
19//!
20//! A symbolic value is logically a *set of virtual registers*,
21//! representing all virtual registers equal to the value in the given
22//! storage slot at a given program point. This representation (as
23//! opposed to tracking just one virtual register) is necessary
24//! because the regalloc may implement moves in the source program
25//! (via move instructions or blockparam assignments on edges) in
26//! "intelligent" ways, taking advantage of values that are already in
27//! the right place, so we need to know *all* names for a value.
28//!
29//! These symbolic values are precise but partial: in other words, if
30//! a physical register is described as containing a virtual register
31//! at a program point, it must actually contain the value of this
32//! register (modulo any analysis bugs); but it may describe fewer
33//! virtual registers even in cases where one *could* statically prove
34//! that it contains a certain register, because the analysis is not
35//! perfectly path-sensitive or value-sensitive. However, all
36//! assignments *produced by our register allocator* should be
37//! analyzed fully precisely. (This last point is important and bears
38//! repeating: we only need to verify the programs that we produce,
39//! not arbitrary programs.)
40//!
41//! Operand constraints (fixed register, register, any) are also checked
42//! at each operand.
43//!
44//! ## Formal Definition
45//!
46//! The analysis lattice consists of the elements of 𝒫(V), the
47//! powerset (set of all subsets) of V (the set of all virtual
48//! registers). The ⊤ (top) value in the lattice is V itself, and the
49//! ⊥ (bottom) value in the lattice is ∅ (the empty set). The lattice
50//! ordering relation is the subset relation: S ≤ U iff S ⊆ U. These
51//! definitions imply that the lattice meet-function (greatest lower
52//! bound) is set-intersection.
53//!
54//! (For efficiency, we represent ⊤ not by actually listing out all
55//! virtual registers, but by representing a special "top" value, but
56//! the semantics are the same.)
57//!
58//! The dataflow analysis state at each program point (each point
59//! before or after an instruction) is:
60//!
61//!   - map of: Allocation -> lattice value
62//!
63//! And the transfer functions for instructions are (where `A` is the
64//! above map from allocated physical registers to lattice values):
65//!
66//!   - `Edit::Move` inserted by RA:       [ alloc_d := alloc_s ]
67//!
68//!       A' = A[alloc_d → A\[alloc_s\]]
69//!
70//!   - statement in pre-regalloc function [ V_i := op V_j, V_k, ... ]
71//!     with allocated form                [ A_i := op A_j, A_k, ... ]
72//!
73//!       A' = { A_k → A\[A_k\] \ { V_i } for k ≠ i } ∪
74//!            { A_i -> { V_i } }
75//!
76//!     In other words, a statement, even after allocation, generates
77//!     a symbol that corresponds to its original virtual-register
78//!     def. Simultaneously, that same virtual register symbol is removed
79//!     from all other allocs: they no longer carry the current value.
80//!
81//!   - Parallel moves or blockparam-assignments in original program
82//!                                       [ V_d1 := V_s1, V_d2 := V_s2, ... ]
83//!
84//!       A' = { A_k → subst(A\[A_k\]) for all k }
85//!            where subst(S) removes symbols for overwritten virtual
86//!            registers (V_d1 .. V_dn) and then adds V_di whenever
87//!            V_si appeared prior to the removals.
88//!
89//! To check correctness, we first find the dataflow fixpoint with the
90//! above lattice and transfer/meet functions. Then, at each op, we
91//! examine the dataflow solution at the preceding program point, and
92//! check that the allocation for each op arg (input/use) contains the
93//! symbol corresponding to the original virtual register specified
94//! for this arg.
95
96#![allow(dead_code)]
97
98use crate::{
99    Allocation, AllocationKind, Block, Edit, Function, FxHashMap, FxHashSet, Inst, InstOrEdit,
100    InstPosition, MachineEnv, Operand, OperandConstraint, OperandKind, OperandPos, Output, PReg,
101    PRegSet, VReg,
102};
103use alloc::vec::Vec;
104use alloc::{format, vec};
105use core::default::Default;
106use core::hash::Hash;
107use core::ops::Range;
108use core::result::Result;
109use smallvec::{smallvec, SmallVec};
110
111/// A set of errors detected by the regalloc checker.
112#[derive(Clone, Debug)]
113pub struct CheckerErrors {
114    errors: Vec<CheckerError>,
115}
116
117/// A single error detected by the regalloc checker.
118#[derive(Clone, Debug)]
119pub enum CheckerError {
120    MissingAllocation {
121        inst: Inst,
122        op: Operand,
123    },
124    UnknownValueInAllocation {
125        inst: Inst,
126        op: Operand,
127        alloc: Allocation,
128    },
129    ConflictedValueInAllocation {
130        inst: Inst,
131        op: Operand,
132        alloc: Allocation,
133    },
134    IncorrectValuesInAllocation {
135        inst: Inst,
136        op: Operand,
137        alloc: Allocation,
138        actual: FxHashSet<VReg>,
139    },
140    ConstraintViolated {
141        inst: Inst,
142        op: Operand,
143        alloc: Allocation,
144    },
145    AllocationIsNotReg {
146        inst: Inst,
147        op: Operand,
148        alloc: Allocation,
149    },
150    AllocationIsNotFixedReg {
151        inst: Inst,
152        op: Operand,
153        alloc: Allocation,
154    },
155    AllocationIsNotReuse {
156        inst: Inst,
157        op: Operand,
158        alloc: Allocation,
159        expected_alloc: Allocation,
160    },
161    AllocationIsNotStack {
162        inst: Inst,
163        op: Operand,
164        alloc: Allocation,
165    },
166    StackToStackMove {
167        into: Allocation,
168        from: Allocation,
169    },
170    AllocationOutsideLimit {
171        inst: Inst,
172        op: Operand,
173        alloc: Allocation,
174        range: Range<usize>,
175    },
176}
177
178/// Abstract state for an allocation.
179///
180/// Equivalent to a set of virtual register names, with the
181/// universe-set as top and empty set as bottom lattice element. The
182/// meet-function is thus set intersection.
183#[derive(Clone, Debug, PartialEq, Eq)]
184enum CheckerValue {
185    /// The lattice top-value: this value could be equivalent to any
186    /// vreg (i.e., the universe set).
187    Universe,
188    /// The set of VRegs that this value is equal to.
189    VRegs(FxHashSet<VReg>),
190}
191
192impl CheckerValue {
193    fn vregs(&self) -> Option<&FxHashSet<VReg>> {
194        match self {
195            CheckerValue::Universe => None,
196            CheckerValue::VRegs(vregs) => Some(vregs),
197        }
198    }
199
200    fn vregs_mut(&mut self) -> Option<&mut FxHashSet<VReg>> {
201        match self {
202            CheckerValue::Universe => None,
203            CheckerValue::VRegs(vregs) => Some(vregs),
204        }
205    }
206}
207
208impl Default for CheckerValue {
209    fn default() -> CheckerValue {
210        CheckerValue::Universe
211    }
212}
213
214impl CheckerValue {
215    /// Meet function of the abstract-interpretation value
216    /// lattice. Returns a boolean value indicating whether `self` was
217    /// changed.
218    fn meet_with(&mut self, other: &CheckerValue) {
219        match (self, other) {
220            (_, CheckerValue::Universe) => {
221                // Nothing.
222            }
223            (this @ CheckerValue::Universe, _) => {
224                *this = other.clone();
225            }
226            (CheckerValue::VRegs(my_vregs), CheckerValue::VRegs(other_vregs)) => {
227                my_vregs.retain(|vreg| other_vregs.contains(vreg));
228            }
229        }
230    }
231
232    fn from_reg(reg: VReg) -> CheckerValue {
233        CheckerValue::VRegs(core::iter::once(reg).collect())
234    }
235
236    fn remove_vreg(&mut self, reg: VReg) {
237        match self {
238            CheckerValue::Universe => {
239                panic!("Cannot remove VReg from Universe set (we do not have the full list of vregs available");
240            }
241            CheckerValue::VRegs(vregs) => {
242                vregs.remove(&reg);
243            }
244        }
245    }
246
247    fn copy_vreg(&mut self, src: VReg, dst: VReg) {
248        match self {
249            CheckerValue::Universe => {
250                // Nothing.
251            }
252            CheckerValue::VRegs(vregs) => {
253                if vregs.contains(&src) {
254                    vregs.insert(dst);
255                }
256            }
257        }
258    }
259
260    fn empty() -> CheckerValue {
261        CheckerValue::VRegs(FxHashSet::default())
262    }
263}
264
265fn visit_all_vregs<F: Function, V: FnMut(VReg)>(f: &F, mut v: V) {
266    for block in 0..f.num_blocks() {
267        let block = Block::new(block);
268        for inst in f.block_insns(block).iter() {
269            for op in f.inst_operands(inst) {
270                v(op.vreg());
271            }
272            if f.is_branch(inst) {
273                for succ_idx in 0..f.block_succs(block).len() {
274                    for &param in f.branch_blockparams(block, inst, succ_idx) {
275                        v(param);
276                    }
277                }
278            }
279        }
280        for &vreg in f.block_params(block) {
281            v(vreg);
282        }
283    }
284}
285
286/// State that steps through program points as we scan over the instruction stream.
287#[derive(Clone, Debug, PartialEq, Eq)]
288enum CheckerState {
289    Top,
290    Allocations(FxHashMap<Allocation, CheckerValue>),
291}
292
293impl CheckerState {
294    fn get_value(&self, alloc: &Allocation) -> Option<&CheckerValue> {
295        match self {
296            CheckerState::Top => None,
297            CheckerState::Allocations(allocs) => allocs.get(alloc),
298        }
299    }
300
301    fn get_values_mut(&mut self) -> impl Iterator<Item = &mut CheckerValue> {
302        match self {
303            CheckerState::Top => panic!("Cannot get mutable values iterator on Top state"),
304            CheckerState::Allocations(allocs) => allocs.values_mut(),
305        }
306    }
307
308    fn get_mappings(&self) -> impl Iterator<Item = (&Allocation, &CheckerValue)> {
309        match self {
310            CheckerState::Top => panic!("Cannot get mappings iterator on Top state"),
311            CheckerState::Allocations(allocs) => allocs.iter(),
312        }
313    }
314
315    fn get_mappings_mut(&mut self) -> impl Iterator<Item = (&Allocation, &mut CheckerValue)> {
316        match self {
317            CheckerState::Top => panic!("Cannot get mutable mappings iterator on Top state"),
318            CheckerState::Allocations(allocs) => allocs.iter_mut(),
319        }
320    }
321
322    /// Transition from a "top" (undefined/unanalyzed) state to an empty set of allocations.
323    fn become_defined(&mut self) {
324        match self {
325            CheckerState::Top => *self = CheckerState::Allocations(FxHashMap::default()),
326            _ => {}
327        }
328    }
329
330    fn set_value(&mut self, alloc: Allocation, value: CheckerValue) {
331        match self {
332            CheckerState::Top => {
333                panic!("Cannot set value on Top state");
334            }
335            CheckerState::Allocations(allocs) => {
336                allocs.insert(alloc, value);
337            }
338        }
339    }
340
341    fn copy_vreg(&mut self, src: VReg, dst: VReg) {
342        match self {
343            CheckerState::Top => {
344                // Nothing.
345            }
346            CheckerState::Allocations(allocs) => {
347                for value in allocs.values_mut() {
348                    value.copy_vreg(src, dst);
349                }
350            }
351        }
352    }
353
354    fn remove_value(&mut self, alloc: &Allocation) {
355        match self {
356            CheckerState::Top => {
357                panic!("Cannot remove value on Top state");
358            }
359            CheckerState::Allocations(allocs) => {
360                allocs.remove(alloc);
361            }
362        }
363    }
364
365    fn initial() -> Self {
366        CheckerState::Allocations(FxHashMap::default())
367    }
368}
369
370impl Default for CheckerState {
371    fn default() -> CheckerState {
372        CheckerState::Top
373    }
374}
375
376impl core::fmt::Display for CheckerValue {
377    fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
378        match self {
379            CheckerValue::Universe => {
380                write!(f, "top")
381            }
382            CheckerValue::VRegs(vregs) => {
383                write!(f, "{{ ")?;
384                for vreg in vregs {
385                    write!(f, "{} ", vreg)?;
386                }
387                write!(f, "}}")?;
388                Ok(())
389            }
390        }
391    }
392}
393
394/// Meet function for analysis value: meet individual values at
395/// matching allocations, and intersect keys (remove key-value pairs
396/// only on one side). Returns boolean flag indicating whether `into`
397/// changed.
398fn merge_map<K: Copy + Clone + PartialEq + Eq + Hash>(
399    into: &mut FxHashMap<K, CheckerValue>,
400    from: &FxHashMap<K, CheckerValue>,
401) {
402    into.retain(|k, _| from.contains_key(k));
403    for (k, into_v) in into.iter_mut() {
404        let from_v = from.get(k).unwrap();
405        into_v.meet_with(from_v);
406    }
407}
408
409impl CheckerState {
410    /// Create a new checker state.
411    fn new() -> CheckerState {
412        Default::default()
413    }
414
415    /// Merge this checker state with another at a CFG join-point.
416    fn meet_with(&mut self, other: &CheckerState) {
417        match (self, other) {
418            (_, CheckerState::Top) => {
419                // Nothing.
420            }
421            (this @ CheckerState::Top, _) => {
422                *this = other.clone();
423            }
424            (
425                CheckerState::Allocations(my_allocations),
426                CheckerState::Allocations(other_allocations),
427            ) => {
428                merge_map(my_allocations, other_allocations);
429            }
430        }
431    }
432
433    fn check_val<'a, F: Function>(
434        &self,
435        inst: Inst,
436        op: Operand,
437        alloc: Allocation,
438        val: &CheckerValue,
439        allocs: &[Allocation],
440        checker: &Checker<'a, F>,
441    ) -> Result<(), CheckerError> {
442        if alloc == Allocation::none() {
443            return Err(CheckerError::MissingAllocation { inst, op });
444        }
445
446        if op.kind() == OperandKind::Use && op.as_fixed_nonallocatable().is_none() {
447            match val {
448                CheckerValue::Universe => {
449                    return Err(CheckerError::UnknownValueInAllocation { inst, op, alloc });
450                }
451                CheckerValue::VRegs(vregs) if !vregs.contains(&op.vreg()) => {
452                    return Err(CheckerError::IncorrectValuesInAllocation {
453                        inst,
454                        op,
455                        alloc,
456                        actual: vregs.clone(),
457                    });
458                }
459                _ => {}
460            }
461        }
462
463        self.check_constraint(inst, op, alloc, allocs, checker)?;
464
465        Ok(())
466    }
467
468    /// Check an instruction against this state. This must be called
469    /// twice: once with `InstPosition::Before`, and once with
470    /// `InstPosition::After` (after updating state with defs).
471    fn check<'a, F: Function>(
472        &self,
473        pos: InstPosition,
474        checkinst: &CheckerInst,
475        checker: &Checker<'a, F>,
476    ) -> Result<(), CheckerError> {
477        let default_val = Default::default();
478        match checkinst {
479            &CheckerInst::Op {
480                inst,
481                ref operands,
482                ref allocs,
483                ..
484            } => {
485                // Skip Use-checks at the After point if there are any
486                // reused inputs: the Def which reuses the input
487                // happens early.
488                let has_reused_input = operands
489                    .iter()
490                    .any(|op| matches!(op.constraint(), OperandConstraint::Reuse(_)));
491                if has_reused_input && pos == InstPosition::After {
492                    return Ok(());
493                }
494
495                // For each operand, check (i) that the allocation
496                // contains the expected vreg, and (ii) that it meets
497                // the requirements of the OperandConstraint.
498                for (op, alloc) in operands.iter().zip(allocs.iter()) {
499                    let is_here = match (op.pos(), pos) {
500                        (OperandPos::Early, InstPosition::Before) => true,
501                        (OperandPos::Late, InstPosition::After) => true,
502                        _ => false,
503                    };
504                    if !is_here {
505                        continue;
506                    }
507
508                    let val = self.get_value(alloc).unwrap_or(&default_val);
509                    trace!(
510                        "checker: checkinst {:?}: op {:?}, alloc {:?}, checker value {:?}",
511                        checkinst,
512                        op,
513                        alloc,
514                        val
515                    );
516                    self.check_val(inst, *op, *alloc, val, allocs, checker)?;
517                }
518            }
519            &CheckerInst::Move { into, from } => {
520                // Ensure that the allocator never returns stack-to-stack moves.
521                let is_stack = |alloc: Allocation| {
522                    if let Some(reg) = alloc.as_reg() {
523                        checker.stack_pregs.contains(reg)
524                    } else {
525                        alloc.is_stack()
526                    }
527                };
528                if is_stack(into) && is_stack(from) {
529                    return Err(CheckerError::StackToStackMove { into, from });
530                }
531            }
532            &CheckerInst::ParallelMove { .. } => {
533                // This doesn't need verification; we just update
534                // according to the move semantics in the step
535                // function below.
536            }
537        }
538        Ok(())
539    }
540
541    /// Update according to instruction.
542    fn update(&mut self, checkinst: &CheckerInst) {
543        self.become_defined();
544
545        match checkinst {
546            &CheckerInst::Move { into, from } => {
547                // Value may not be present if this move is part of
548                // the parallel move resolver's fallback sequence that
549                // saves a victim register elsewhere. (In other words,
550                // that sequence saves an undefined value and restores
551                // it, so has no effect.) The checker needs to avoid
552                // putting Universe lattice values into the map.
553                if let Some(val) = self.get_value(&from).cloned() {
554                    trace!(
555                        "checker: checkinst {:?} updating: move {:?} -> {:?} val {:?}",
556                        checkinst,
557                        from,
558                        into,
559                        val
560                    );
561                    self.set_value(into, val);
562                }
563            }
564            &CheckerInst::ParallelMove { ref moves } => {
565                // First, build map of actions for each vreg in an
566                // alloc. If an alloc has a reg V_i before a parallel
567                // move, then for each use of V_i as a source (V_i ->
568                // V_j), we might add a new V_j wherever V_i appears;
569                // and if V_i is used as a dest (at most once), then
570                // it must be removed first from allocs' vreg sets.
571                let mut additions: FxHashMap<VReg, SmallVec<[VReg; 2]>> = FxHashMap::default();
572                let mut deletions: FxHashSet<VReg> = FxHashSet::default();
573
574                for &(dest, src) in moves {
575                    deletions.insert(dest);
576                    additions
577                        .entry(src)
578                        .or_insert_with(|| smallvec![])
579                        .push(dest);
580                }
581
582                // Now process each allocation's set of vreg labels,
583                // first deleting those labels that were updated by
584                // this parallel move, then adding back labels
585                // redefined by the move.
586                for value in self.get_values_mut() {
587                    if let Some(vregs) = value.vregs_mut() {
588                        let mut insertions: SmallVec<[VReg; 2]> = smallvec![];
589                        for &vreg in vregs.iter() {
590                            if let Some(additions) = additions.get(&vreg) {
591                                insertions.extend(additions.iter().cloned());
592                            }
593                        }
594                        for &d in &deletions {
595                            vregs.remove(&d);
596                        }
597                        vregs.extend(insertions);
598                    }
599                }
600            }
601            &CheckerInst::Op {
602                ref operands,
603                ref allocs,
604                ref clobbers,
605                ..
606            } => {
607                // For each def, (i) update alloc to reflect defined
608                // vreg (and only that vreg), and (ii) update all
609                // other allocs in the checker state by removing this
610                // vreg, if defined (other defs are now stale).
611                for (op, alloc) in operands.iter().zip(allocs.iter()) {
612                    if op.kind() != OperandKind::Def {
613                        continue;
614                    }
615                    self.remove_vreg(op.vreg());
616                    self.set_value(*alloc, CheckerValue::from_reg(op.vreg()));
617                }
618                for clobber in clobbers {
619                    self.remove_value(&Allocation::reg(*clobber));
620                }
621            }
622        }
623    }
624
625    fn remove_vreg(&mut self, vreg: VReg) {
626        for (_, value) in self.get_mappings_mut() {
627            value.remove_vreg(vreg);
628        }
629    }
630
631    fn check_constraint<'a, F: Function>(
632        &self,
633        inst: Inst,
634        op: Operand,
635        alloc: Allocation,
636        allocs: &[Allocation],
637        checker: &Checker<'a, F>,
638    ) -> Result<(), CheckerError> {
639        match op.constraint() {
640            OperandConstraint::Any => {}
641            OperandConstraint::Reg => {
642                if let Some(preg) = alloc.as_reg() {
643                    // Reject pregs that represent a fixed stack slot.
644                    if !checker.machine_env.fixed_stack_slots.contains(&preg) {
645                        return Ok(());
646                    }
647                }
648                return Err(CheckerError::AllocationIsNotReg { inst, op, alloc });
649            }
650            OperandConstraint::Stack => {
651                if alloc.kind() != AllocationKind::Stack {
652                    // Accept pregs that represent a fixed stack slot.
653                    if let Some(preg) = alloc.as_reg() {
654                        if checker.machine_env.fixed_stack_slots.contains(&preg) {
655                            return Ok(());
656                        }
657                    }
658                    return Err(CheckerError::AllocationIsNotStack { inst, op, alloc });
659                }
660            }
661            OperandConstraint::FixedReg(preg) => {
662                if alloc != Allocation::reg(preg) {
663                    return Err(CheckerError::AllocationIsNotFixedReg { inst, op, alloc });
664                }
665            }
666            OperandConstraint::Reuse(idx) => {
667                if alloc.kind() != AllocationKind::Reg {
668                    return Err(CheckerError::AllocationIsNotReg { inst, op, alloc });
669                }
670                if alloc != allocs[idx] {
671                    return Err(CheckerError::AllocationIsNotReuse {
672                        inst,
673                        op,
674                        alloc,
675                        expected_alloc: allocs[idx],
676                    });
677                }
678            }
679            OperandConstraint::Limit(max) => {
680                if let Some(preg) = alloc.as_reg() {
681                    if preg.hw_enc() >= max {
682                        return Err(CheckerError::AllocationOutsideLimit {
683                            inst,
684                            op,
685                            alloc,
686                            range: (0..max),
687                        });
688                    }
689                }
690            }
691        }
692        Ok(())
693    }
694}
695
696/// An instruction representation in the checker's BB summary.
697#[derive(Clone, Debug)]
698pub(crate) enum CheckerInst {
699    /// A move between allocations (these could be registers or
700    /// spillslots).
701    Move { into: Allocation, from: Allocation },
702
703    /// A parallel move in the original program. Simultaneously moves
704    /// from all source vregs to all corresponding dest vregs,
705    /// permitting overlap in the src and dest sets and doing all
706    /// reads before any writes.
707    ParallelMove {
708        /// Vector of (dest, src) moves.
709        moves: Vec<(VReg, VReg)>,
710    },
711
712    /// A regular instruction with fixed use and def slots. Contains
713    /// both the original operands (as given to the regalloc) and the
714    /// allocation results.
715    Op {
716        inst: Inst,
717        operands: Vec<Operand>,
718        allocs: Vec<Allocation>,
719        clobbers: Vec<PReg>,
720    },
721}
722
723#[derive(Debug)]
724pub struct Checker<'a, F: Function> {
725    f: &'a F,
726    bb_in: FxHashMap<Block, CheckerState>,
727    bb_insts: FxHashMap<Block, Vec<CheckerInst>>,
728    edge_insts: FxHashMap<(Block, Block), Vec<CheckerInst>>,
729    machine_env: &'a MachineEnv,
730    stack_pregs: PRegSet,
731}
732
733impl<'a, F: Function> Checker<'a, F> {
734    /// Create a new checker for the given function, initializing CFG
735    /// info immediately.  The client should call the `add_*()`
736    /// methods to add abstract instructions to each BB before
737    /// invoking `run()` to check for errors.
738    pub fn new(f: &'a F, machine_env: &'a MachineEnv) -> Checker<'a, F> {
739        let mut bb_in = FxHashMap::default();
740        let mut bb_insts = FxHashMap::default();
741        let mut edge_insts = FxHashMap::default();
742
743        for block in 0..f.num_blocks() {
744            let block = Block::new(block);
745            bb_in.insert(block, Default::default());
746            bb_insts.insert(block, vec![]);
747            for &succ in f.block_succs(block) {
748                edge_insts.insert((block, succ), vec![]);
749            }
750        }
751
752        bb_in.insert(f.entry_block(), CheckerState::default());
753
754        let mut stack_pregs = PRegSet::empty();
755        for &preg in &machine_env.fixed_stack_slots {
756            stack_pregs.add(preg);
757        }
758
759        Checker {
760            f,
761            bb_in,
762            bb_insts,
763            edge_insts,
764            machine_env,
765            stack_pregs,
766        }
767    }
768
769    /// Build the list of checker instructions based on the given func
770    /// and allocation results.
771    pub fn prepare(&mut self, out: &Output) {
772        trace!("checker: out = {:?}", out);
773        let mut last_inst = None;
774        for block in 0..self.f.num_blocks() {
775            let block = Block::new(block);
776            for inst_or_edit in out.block_insts_and_edits(self.f, block) {
777                match inst_or_edit {
778                    InstOrEdit::Inst(inst) => {
779                        debug_assert!(last_inst.is_none() || inst > last_inst.unwrap());
780                        last_inst = Some(inst);
781                        self.handle_inst(block, inst, out);
782                    }
783                    InstOrEdit::Edit(edit) => self.handle_edit(block, edit),
784                }
785            }
786        }
787    }
788
789    /// For each original instruction, create an `Op`.
790    fn handle_inst(&mut self, block: Block, inst: Inst, out: &Output) {
791        // Process uses, defs, and clobbers.
792        let operands: Vec<_> = self.f.inst_operands(inst).iter().cloned().collect();
793        let allocs: Vec<_> = out.inst_allocs(inst).iter().cloned().collect();
794        let clobbers: Vec<_> = self.f.inst_clobbers(inst).into_iter().collect();
795        let checkinst = CheckerInst::Op {
796            inst,
797            operands,
798            allocs,
799            clobbers,
800        };
801        trace!("checker: adding inst {:?}", checkinst);
802        self.bb_insts.get_mut(&block).unwrap().push(checkinst);
803
804        // If this is a branch, emit a ParallelMove on each outgoing
805        // edge as necessary to handle blockparams.
806        if self.f.is_branch(inst) {
807            for (i, &succ) in self.f.block_succs(block).iter().enumerate() {
808                let args = self.f.branch_blockparams(block, inst, i);
809                let params = self.f.block_params(succ);
810                assert_eq!(
811                    args.len(),
812                    params.len(),
813                    "block{} has succ block{}; gave {} args for {} params",
814                    block.index(),
815                    succ.index(),
816                    args.len(),
817                    params.len()
818                );
819                if args.len() > 0 {
820                    let moves = params.iter().cloned().zip(args.iter().cloned()).collect();
821                    self.edge_insts
822                        .get_mut(&(block, succ))
823                        .unwrap()
824                        .push(CheckerInst::ParallelMove { moves });
825                }
826            }
827        }
828    }
829
830    fn handle_edit(&mut self, block: Block, edit: &Edit) {
831        trace!("checker: adding edit {:?}", edit);
832        match edit {
833            &Edit::Move { from, to } => {
834                self.bb_insts
835                    .get_mut(&block)
836                    .unwrap()
837                    .push(CheckerInst::Move { into: to, from });
838            }
839        }
840    }
841
842    /// Perform the dataflow analysis to compute checker state at each BB entry.
843    fn analyze(&mut self) {
844        let mut queue = Vec::new();
845        let mut queue_set = FxHashSet::default();
846
847        // Put every block in the queue to start with, to ensure
848        // everything is visited even if the initial state remains
849        // `Top` after preds update it.
850        //
851        // We add blocks in reverse order so that when we process
852        // back-to-front below, we do our initial pass in input block
853        // order, which is (usually) RPO order or at least a
854        // reasonable visit order.
855        for block in (0..self.f.num_blocks()).rev() {
856            let block = Block::new(block);
857            queue.push(block);
858            queue_set.insert(block);
859        }
860
861        while let Some(block) = queue.pop() {
862            queue_set.remove(&block);
863            let mut state = self.bb_in.get(&block).cloned().unwrap();
864            trace!("analyze: block {} has state {:?}", block.index(), state);
865            for inst in self.bb_insts.get(&block).unwrap() {
866                state.update(inst);
867                trace!("analyze: inst {:?} -> state {:?}", inst, state);
868            }
869
870            for &succ in self.f.block_succs(block) {
871                let mut new_state = state.clone();
872                for edge_inst in self.edge_insts.get(&(block, succ)).unwrap() {
873                    new_state.update(edge_inst);
874                    trace!(
875                        "analyze: succ {:?}: inst {:?} -> state {:?}",
876                        succ,
877                        edge_inst,
878                        new_state
879                    );
880                }
881
882                let cur_succ_in = self.bb_in.get(&succ).unwrap();
883                trace!(
884                    "meeting state {:?} for block {} with state {:?} for block {}",
885                    new_state,
886                    block.index(),
887                    cur_succ_in,
888                    succ.index()
889                );
890                new_state.meet_with(cur_succ_in);
891                let changed = &new_state != cur_succ_in;
892                trace!(" -> {:?}, changed {}", new_state, changed);
893
894                if changed {
895                    trace!(
896                        "analyze: block {} state changed from {:?} to {:?}; pushing onto queue",
897                        succ.index(),
898                        cur_succ_in,
899                        new_state
900                    );
901                    self.bb_in.insert(succ, new_state);
902                    if queue_set.insert(succ) {
903                        queue.push(succ);
904                    }
905                }
906            }
907        }
908    }
909
910    /// Using BB-start state computed by `analyze()`, step the checker state
911    /// through each BB and check each instruction's register allocations
912    /// for errors.
913    fn find_errors(&self) -> Result<(), CheckerErrors> {
914        let mut errors = vec![];
915        for (block, input) in &self.bb_in {
916            let mut state = input.clone();
917            for inst in self.bb_insts.get(block).unwrap() {
918                if let Err(e) = state.check(InstPosition::Before, inst, self) {
919                    trace!("Checker error: {:?}", e);
920                    errors.push(e);
921                }
922                state.update(inst);
923                if let Err(e) = state.check(InstPosition::After, inst, self) {
924                    trace!("Checker error: {:?}", e);
925                    errors.push(e);
926                }
927            }
928        }
929
930        if errors.is_empty() {
931            Ok(())
932        } else {
933            Err(CheckerErrors { errors })
934        }
935    }
936
937    /// Find any errors, returning `Err(CheckerErrors)` with all errors found
938    /// or `Ok(())` otherwise.
939    pub fn run(mut self) -> Result<(), CheckerErrors> {
940        self.analyze();
941        let result = self.find_errors();
942
943        trace!("=== CHECKER RESULT ===");
944        fn print_state(state: &CheckerState) {
945            if !trace_enabled!() {
946                return;
947            }
948            if let CheckerState::Allocations(allocs) = state {
949                let mut s = vec![];
950                for (alloc, state) in allocs {
951                    s.push(format!("{} := {}", alloc, state));
952                }
953                trace!("    {{ {} }}", s.join(", "))
954            }
955        }
956        for bb in 0..self.f.num_blocks() {
957            let bb = Block::new(bb);
958            trace!("block{}:", bb.index());
959            let insts = self.bb_insts.get(&bb).unwrap();
960            let mut state = self.bb_in.get(&bb).unwrap().clone();
961            print_state(&state);
962            for inst in insts {
963                match inst {
964                    &CheckerInst::Op {
965                        inst,
966                        ref operands,
967                        ref allocs,
968                        ref clobbers,
969                    } => {
970                        trace!(
971                            "  inst{}: {:?} ({:?}) clobbers:{:?}",
972                            inst.index(),
973                            operands,
974                            allocs,
975                            clobbers
976                        );
977                    }
978                    &CheckerInst::Move { from, into } => {
979                        trace!("    {} -> {}", from, into);
980                    }
981                    &CheckerInst::ParallelMove { .. } => {
982                        panic!("unexpected parallel_move in body (non-edge)")
983                    }
984                }
985                state.update(inst);
986                print_state(&state);
987            }
988
989            for &succ in self.f.block_succs(bb) {
990                trace!("  succ {:?}:", succ);
991                let mut state = state.clone();
992                for edge_inst in self.edge_insts.get(&(bb, succ)).unwrap() {
993                    match edge_inst {
994                        &CheckerInst::ParallelMove { ref moves } => {
995                            let moves = moves
996                                .iter()
997                                .map(|(dest, src)| format!("{} -> {}", src, dest))
998                                .collect::<Vec<_>>();
999                            trace!("    parallel_move {}", moves.join(", "));
1000                        }
1001                        _ => panic!("unexpected edge_inst: not a parallel move"),
1002                    }
1003                    state.update(edge_inst);
1004                    print_state(&state);
1005                }
1006            }
1007        }
1008
1009        result
1010    }
1011}