F4Z7XBCF3K6TRYGQT5JW2Y7Z2Y2GELCWCBX2IC5WDYFDIKWBESZQC
delimiter $ ( [ { ~ $
$ } ] ) , $;
--| Well formed formulas, or propositions - true or false.
strict provable sort wff;
--| Implication: if p, then q.
term im (p q: wff): wff; infixr im: $->$ prec 25;
--| Negation: p is false.
term not (p: wff): wff; prefix not: $~$ prec 41;
--| Axiom 1 of Lukasiewicz' axioms for classical propositional logic.
axiom ax_1 (a b: wff): $ a -> b -> a $;
--| Axiom 2 of Lukasiewicz' axioms for classical propositional logic.
axiom ax_2 (a b c: wff): $ (a -> b -> c) -> (a -> b) -> a -> c $;
--| Axiom 3 of Lukasiewicz' axioms for classical propositional logic.
axiom ax_3 (a b: wff): $ (~a -> ~b) -> b -> a $;
--| Modus ponens: from `a -> b` and `a`, infer `b`.
axiom ax_mp (a b: wff): $ a -> b $ > $ a $ > $ b $;
delimiter $ ( ) ~ { } , $;
--| Well formed formulas, or propositions - true or false.
strict provable sort wff;
--| Implication: if p, then q.
term im (p q: wff): wff; infixr im: $->$ prec 25;
--| Negation: p is false.
term not (p: wff): wff; prefix not: $~$ prec 41;
--| Axiom 1 of Lukasiewicz' axioms for classical propositional logic.
axiom ax_1 (a b: wff): $ a -> b -> a $;
--| Axiom 2 of Lukasiewicz' axioms for classical propositional logic.
axiom ax_2 (a b c: wff): $ (a -> b -> c) -> (a -> b) -> a -> c $;
--| Axiom 3 of Lukasiewicz' axioms for classical propositional logic.
axiom ax_3 (a b: wff): $ (~a -> ~b) -> b -> a $;
--| Modus ponens: from `a -> b` and `a`, infer `b`.
axiom ax_mp (a b: wff): $ a -> b $ > $ a $ > $ b $;
strict provable sort GG;
term termG1 (p: GG): GG;
term termG2 (p: GG): GG;
strict provable sort GG;
term termG1 (p: GG): GG;
term termG2 (p: GG): GG;
strict provable sort FF;
term termF1 (p: FF): FF;
term termF2 (p: FF): FF;
strict provable sort FF;
term termF1 (p: FF): FF;
term termF2 (p: FF): FF;
strict provable sort EE;
term termE1 (p: EE): EE;
term termE2 (p: EE): EE;
strict provable sort EE;
term termE1 (p: EE): EE;
term termE2 (p: EE): EE;
strict provable sort GG;
term termG1 (p: GG): GG;
term termG2 (p: GG): GG;
strict provable sort GG;
term termG1 (p: GG): GG;
term termG2 (p: GG): GG;
strict provable sort FF;
term termF1 (p: FF): FF;
term termF2 (p: FF): FF;
strict provable sort FF;
term termF1 (p: FF): FF;
term termF2 (p: FF): FF;
strict provable sort EE;
term termE1 (p: EE): EE;
term termE2 (p: EE): EE;
strict provable sort EE;
term termE1 (p: EE): EE;
term termE2 (p: EE): EE;
strict provable sort DD;
term termD1 (p: DD): DD;
term termD2 (p: DD): DD;
strict provable sort DD;
term termD1 (p: DD): DD;
term termD2 (p: DD): DD;
import "e.mm1";
import "f.mm1";
import "g.mm1";
strict provable sort CC;
term termC1 (p: EE): FF;
term termC2 (p: GG): CC;
import "e.mm0";
import "f.mm0";
import "g.mm0";
strict provable sort CC;
term termC1 (p: EE): FF;
term termC2 (p: GG): CC;
import "d.mm1";
import "e.mm1";
delimiter $ ( [ { ~ $
$ } ] ) , $;
strict provable sort BB;
term termB1 (p: DD): BB;
term termB2 (p: EE): BB;
import "d.mm0";
import "e.mm0";
strict provable sort BB;
term termB1 (p: DD): BB;
term termB2 (p: EE): BB;
import "b.mm1";
import "c.mm1";
strict provable sort AA;
term termA1 (p: AA): AA;
term termA2 : AA > BB > CC;
import "b.mm0";
import "c.mm0";
strict provable sort AA;
term termA1 (p: AA): AA;
term termA2 : AA > BB > CC;
strict provable sort DD;
term termD1 (p: DD): DD;
term termD2 (p: DD): DD;
strict provable sort DD;
term termD1 (p: DD): DD;
term termD2 (p: DD): DD;
import "cycleA.mm0";
strict provable sort B;
term termB (p: B): B;
import "cycleB.mm0";
strict provable sort A;
term termA (p: A): A;
import "f.mm1";
import "g.mm1";
strict provable sort CC;
term termC1 (p: FF): CC;
term termC2 (p: GG): CC;
import "f.mm0";
import "g.mm0";
strict provable sort CC;
term termC1 (p: FF): CC;
term termC2 (p: GG): CC;
import "d.mm1";
import "e.mm1";
delimiter $ ( [ { ~ $
$ } ] ) , $;
strict provable sort BB;
term termB1 (p: DD): BB;
term termB2 (p: EE): BB;
import "d.mm0";
import "e.mm0";
strict provable sort BB;
term termB1 (p: DD): BB;
term termB2 (p: EE): BB;
import "b.mm1";
import "c.mm1";
strict provable sort AA;
term termA1 (p: AA): AA;
term termA2 : AA > BB > CC;
import "b.mm0";
import "c.mm0";
strict provable sort AA;
term termA1 (p: AA): AA;
term termA2 : AA > BB > CC;
use std::convert::{ TryFrom, TryInto };
use std::marker::PhantomData;
use std::fmt::{ Debug, Formatter, Result as FmtResult };
use std::sync::atomic::{ AtomicU8, AtomicU32, Ordering::Relaxed };
use crate::mmb::unify::UnifyIter;
use crate::mmb::proof::ProofIter;
use crate::mmb::stmt::StmtCmd;
/// 10000000_11111111_11111111_11111111_11111111_11111111_11111111_11111111
const TYPE_SORT_MASK: u64 = (1 << 63) | ((1 << 56) - 1);
#[macro_export]
macro_rules! localize {
( $e:expr ) => {
$e.map_err(|e| VerifErr::Local(file!(), line!(), Box::new(e)))
}
}
#[macro_export]
macro_rules! io_err {
( $e:expr ) => {
$e.map_err(|e| VerifErr::IoErr(file!(), line!(), Box::new(e)))
}
}
#[macro_export]
macro_rules! none_err {
( $e:expr ) => {
$e.ok_or(VerifErr::NoneErr(file!(), line!()))
}
}
#[macro_export]
macro_rules! conv_err {
( $e:expr ) => {
$e.map_err(|_| VerifErr::ConvErr(file!(), line!()))
}
}
#[macro_export]
macro_rules! make_sure {
( $e:expr ) => {
if !($e) {
return Err(VerifErr::MakeSure(file!(), line!()))
}
}
}
macro_rules! bin_parser {
( $(($name: ident, $t:ident))* ) => {
$(
pub fn $name(source: &[u8]) -> Res<($t, &[u8])> {
let int_bytes =
source
.get(0..std::mem::size_of::<$t>())
.ok_or_else(|| {
VerifErr::Msg(format!("binary parser ran out of input @ {}: {}", file!(), line!()))
})?;
let new_pos = std::mem::size_of::<$t>();
Ok(
($t::from_le_bytes(int_bytes.try_into().unwrap()), &source[new_pos..])
)
}
)*
};
}
bin_parser! {
(parse_u8, u8)
(parse_u16, u16)
(parse_u32, u32)
(parse_u64, u64)
}
macro_rules! bitwise_inner {
( $($t:ident),* ) => {
$(
impl std::ops::BitAnd<$t> for $t {
type Output = Self;
fn bitand(self, rhs: Self) -> Self::Output {
$t { inner: self.inner & rhs.inner }
}
}
impl std::ops::BitAndAssign<$t> for $t {
fn bitand_assign(&mut self, other: Self) {
self.inner &= other.inner
}
}
impl std::ops::BitOr<$t> for $t {
type Output = Self;
fn bitor(self, rhs: Self) -> Self::Output {
$t { inner: self.inner | rhs.inner }
}
}
impl std::ops::BitOrAssign<$t> for $t {
fn bitor_assign(&mut self, other: Self) {
self.inner |= other.inner
}
}
impl std::ops::Not for $t {
type Output = $t;
fn not(self) -> Self::Output {
$t { inner: !self.inner }
}
}
)*
}
}
bitwise_inner!(Type, Mods);
// These are just so that the reader can see they have their
// respective meaning at the call-site.
pub type SortNum = u8;
pub type TermNum = u32;
pub type AssertNum = u32;
pub type SortIdent<'a> = Str<'a>;
pub type TermIdent<'a> = Str<'a>;
pub type AssertIdent<'a> = Str<'a>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Ptr<'a, A>(pub u32, pub PhantomData<&'a A>);
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Str<'a>(pub &'a [u8]);
impl<'b, 'a: 'b> Str<'a> {
pub fn as_bytes(&self) -> &'a [u8] {
self.0
}
}
impl<'a> Debug for Str<'a> {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
for byte in self.0.iter() {
write!(f, "{}", *byte as char)?;
}
Ok(())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Either<A, B> {
L(A),
R(B),
}
pub type Arg = Type;
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct Type {
pub inner: u64
}
impl std::default::Default for Type {
fn default() -> Self {
Type { inner: 0 }
}
}
impl Type {
pub fn high_bit(self) -> Self {
Type { inner: self.inner & (!crate::mmb::TYPE_DEPS_MASK) }
}
pub fn disjoint(self, other: Self) -> bool {
(self.inner & other.inner) == 0
}
pub fn new_bound() -> Self {
Type { inner: 1 << 63 }
}
pub fn new(bound: bool) -> Self {
if bound {
Type { inner: 1 << 63 }
} else {
Type { inner: 0 }
}
}
pub fn new_with_sort(sort_num: u8) -> Self {
Type { inner: (sort_num as u64) << 56 }
}
#[inline]
pub fn is_bound(self) -> bool {
self.inner & (1 << 63) != 0
}
pub fn add_sort(&mut self, sort_id: u8) {
// clear existing sort if any;
self.inner &= TYPE_SORT_MASK;
// Add new sort
self.inner |= ((sort_id as u64) << 56);
}
/// Add a dependency on bv_idx
pub fn add_dep(&mut self, bv_idx: u64) {
self.inner |= 1 << bv_idx.checked_sub(1).expect("No 0th bv to depend on")
}
pub fn sort(self) -> u8 {
u8::try_from((self.inner >> 56) & 0x7F).unwrap()
}
pub fn depends_on_(self, j: u64) -> bool {
self.inner & (1u64 << j.checked_sub(1).expect("No 0th to depend on")) != 0
}
/// If this is the type of a bound variable, return a u64
/// whose only activated bit is the bit indicating which bv
/// it is.
pub fn bound_digit(self) -> Res<u64> {
if self.is_bound() {
Ok(self.inner & !(0xFF << 56))
} else {
Err(VerifErr::Msg(format!("Not bound")))
}
}
/// The POSITION (1 - 55) of this bound variable, NOT the literal u64.
pub fn bound_pos(self) -> Res<u64> {
if self.is_bound() {
for i in 0..56 {
if ((1 << i) & self.inner) != 0 {
return Ok(i + 1)
}
}
Err(VerifErr::Msg(format!("should be unreachable")))
} else {
Err(VerifErr::Msg(format!("Not bound")))
}
}
pub fn has_deps(self) -> bool {
if self.is_bound() {
false
} else {
(self.inner & crate::mmb::TYPE_DEPS_MASK) > 0
}
}
/// If this is a regular/not-bound variable, return a u64
/// whose only activated bits are the ones marking the bvs
/// on which it depends.
pub fn deps(self) -> Res<u64> {
if !self.is_bound() {
Ok(self.inner & !(0xFF << 56))
} else {
Err(VerifErr::Msg(format!("No deps")))
}
}
}
#[test]
fn bound1() {
let mut t1 = Type::new_bound();
t1 |= Type { inner: (1 << 15) };
assert_eq!(t1.bound_pos().unwrap(), 16);
}
#[test]
fn bound2() {
let mut t1 = Type::new_bound();
t1 |= Type { inner: (1 << 0) };
assert_eq!(t1.bound_pos().unwrap(), 1);
}
#[test]
fn bound3() {
let mut t1 = Type::new_bound();
t1 |= Type { inner: (1 << 55) };
assert_eq!(t1.bound_pos().unwrap(), 56);
}
#[test]
fn bound_err1() {
let t1 = Type::new_bound();
assert!(t1.bound_pos().is_err())
}
#[test]
fn bound_err2() {
let t1 = Type::new(false);
assert!(t1.bound_pos().is_err())
}
#[test]
fn deps1() {
let t0 = Type { inner: 0 };
let mut t1_v = Type { inner: 0 };
let mut t1 = Type { inner: 0 };
let t2 = Type { inner: 1 };
let t3 = Type::new_bound();
assert!(!t0.has_deps());
assert!(!t1.has_deps());
assert!(t2.has_deps());
assert!(!t3.has_deps());
assert_eq!(t2.deps().unwrap(), 1);
t1.add_dep(1);
t1.add_dep(2);
t1.add_dep(4);
assert!(t1.has_deps());
assert_eq!(t1.deps().unwrap(), 11);
t1_v.add_dep(4);
t1_v.add_dep(1);
t1_v.add_dep(2);
assert!(t1_v.has_deps());
assert_eq!(t1_v.deps().unwrap(), 11);
println!("t3: {:?}", t3);
let bound_1 = Type { inner: Type::new_bound().inner | (1 << 8) };
println!("bound idx: {:?}", bound_1.bound_digit());
assert!(bound_1.is_bound());
}
pub type Res<A> = Result<A, VerifErr>;
//#[derive(Clone)]
pub enum VerifErr {
MakeSure(&'static str, u32),
NoneErr(&'static str, u32),
ConvErr(&'static str, u32),
Msg(String),
// Crate a rough backtrace; use the `localize!` macro to make this.
Local(&'static str, u32, Box<VerifErr>),
Unreachable(&'static str, u32),
IoErr(&'static str, u32, std::io::Error)
}
impl Debug for VerifErr {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
match self {
VerifErr::Msg(s) => {
let mut d = f.debug_struct("VerifErr::Msg");
d.field("Msg", &format_args!("{}", s));
d.finish()
},
VerifErr::Local(fi, l, e) => {
let mut d = f.debug_struct("VerifErr::Local");
d.field("file", &fi);
d.field("line", &l);
d.field("Msg", &e);
d.finish()
},
VerifErr::MakeSure(fi, l) => {
let mut d = f.debug_struct("VerifErr::MakeSure");
d.field("file", &fi);
d.field("line", &l);
d.finish()
},
VerifErr::ConvErr(fi, l) => {
let mut d = f.debug_struct("VerifErr::ConvErr");
d.field("file", &fi);
d.field("line", &l);
d.finish()
},
VerifErr::Unreachable(fi, l) => {
let mut d = f.debug_struct("VerifErr::Unreachable");
d.field("file", &fi);
d.field("line", &l);
d.finish()
},
VerifErr::IoErr(fi, l, e) => {
let mut d = f.debug_struct("VerifErr::IoErr");
d.field("file", &fi);
d.field("line", &l);
d.field("err", &format_args!("{}", e));
d.finish()
},
VerifErr::NoneErr(fi, l) => {
let mut d = f.debug_struct("VerifErr::NoneErr");
d.field("file", &fi);
d.field("line", &l);
d.finish()
},
}
}
}
/// A reference to an entry in the term table.
#[derive(Debug, Clone, Copy)]
pub struct Term<'a> {
/// The sort of the term.
pub term_num: u32,
pub sort: u8,
/// Number of args NOT including `ret`
pub args_start: &'a [u8],
// The pointer to the start of the unify stream.
pub unify: UnifyIter<'a>,
}
/// An iterator over the arguments of a term/assertion that doesn't require allocation
/// of a vector, instead reading them from the source file each time.
#[derive(Debug, Clone, Copy)]
pub struct Args<'a> {
source: &'a [u8],
}
impl<'a> DoubleEndedIterator for Args<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
if self.source.is_empty() {
None
} else {
let split = self.source.len().checked_sub(8)?;
let (lhs, rhs) = self.source.split_at(split);
self.source = lhs;
match parse_u64(rhs) {
Err(_) => unreachable!("Should have been guaranteed by check in `prefix_args`"),
Ok((parsed, rest)) => {
assert!(rest.is_empty());
Some(Type { inner: parsed })
}
}
}
}
}
impl<'a> ExactSizeIterator for Args<'a> {
fn len(&self) -> usize {
self.source.len() / std::mem::size_of::<u64>()
}
}
impl<'a> Iterator for Args<'a> {
type Item = Arg;
fn next(&mut self) -> Option<Self::Item> {
if self.source.is_empty() {
return None
}
match parse_u64(self.source) {
Err(_) => unreachable!("Should have been guaranteed by check in `prefix_args`"),
Ok((parsed, rest)) => {
self.source = rest;
Some(Type { inner: parsed })
}
}
}
// Needed for `len` to work correctly.
fn size_hint(&self) -> (usize, Option<usize>) {
(
self.source.len() / std::mem::size_of::<u64>(),
Some(self.source.len() / std::mem::size_of::<u64>())
)
}
}
#[test]
fn args_back1() {
let s1 = &[
10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33
];
let s2 = &[
10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25,
];
let s3 = &[
10, 11, 12, 13, 14, 15, 16, 17,
];
let tgt1 = [26, 27, 28, 29, 30, 31, 32, 33];
let tgt2 = [18, 19, 20, 21, 22, 23, 24, 25];
let tgt3 = [10, 11, 12, 13, 14, 15, 16, 17];
let mut args = Args { source: s1 };
let back = args.next_back().unwrap();
assert_eq!(back.inner, u64::from_le_bytes(tgt1));
assert_eq!(args.source, s2);
let back = args.next_back().unwrap();
assert_eq!(back.inner, u64::from_le_bytes(tgt2));
assert_eq!(args.source, s3);
let back = args.next_back().unwrap();
assert_eq!(back.inner, u64::from_le_bytes(tgt3));
assert!(args.source.is_empty());
}
#[test]
fn args_back_err1() {
let s1 = &[10, 11, 12, 13, 14, 15, 16];
let mut args = Args { source: s1 };
let back = args.next_back();
assert!(back.is_none());
}
#[test]
fn args_no_ret1() {
let fake_unify = UnifyIter { buf: &[], pos: 0 };
let s1 = &[
10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25,
26, 27, 28, 29, 30, 31, 32, 33
];
let t = Term { term_num: 0, sort: 0, args_start: s1, unify: fake_unify };
let s2 = &[
10, 11, 12, 13, 14, 15, 16, 17,
18, 19, 20, 21, 22, 23, 24, 25,
];
let args_no_ret = t.args_no_ret();
assert_eq!(args_no_ret.source, s2);
}
#[test]
fn args_no_ret2() {
let fake_unify = UnifyIter { buf: &[], pos: 0 };
let s1 = &[
10, 11, 12, 13, 14, 15, 16, 17,
];
let t = Term { term_num: 0, sort: 0, args_start: s1, unify: fake_unify };
let args_no_ret = t.args_no_ret();
assert_eq!(args_no_ret.source, &[]);
}
impl<'a> Term<'a> {
/// Returns true if this is a `def`, false for a `term`.
#[inline]
pub fn is_def(&self) -> bool {
self.sort & 0x80 != 0
}
/// The return sort of this term/def.
#[inline]
pub fn sort(&self) -> SortNum {
self.sort & 0x7F
}
/// args; including the `ret` element at the end.
#[inline]
pub fn args(&self) -> Args<'a> {
Args {
source: self.args_start,
}
}
/// args without the `ret` element at the end.
/// This will panic if there are less than 8 bytes in `source`,
/// which seems fine for now, since that would mean we had an invalid `Args`
/// to start with.
#[inline]
pub fn args_no_ret(&self) -> Args<'a> {
Args {
source: &self.args_start[..(self.args_start.len() - std::mem::size_of::<u64>())],
}
}
pub fn num_args_no_ret(&self) -> u16 {
u16::try_from(self.args().len().checked_sub(1).unwrap()).unwrap()
}
/// The return sort and dependencies.
pub fn ret(&self) -> Type {
self.args().last().unwrap()
}
/// The beginning of the unify stream for the term.
#[inline]
pub fn unify(&self) -> UnifyIter<'_> {
self.unify.clone()
}
}
/// A reference to an entry in the theorem table.
#[derive(Debug, Clone, Copy)]
pub struct Assert<'a> {
/// The array of arguments.
// `Arg` is a u64
pub assert_num: u32,
pub args_start: &'a [u8],
/// The pointer to the start of the unify stream.
pub unify: UnifyIter<'a>,
}
impl<'a> Assert<'a> {
/// The list of arguments of this axiom/theorem.
#[inline]
pub fn args(&self) -> Args<'a> {
Args {
source: self.args_start,
}
}
/// The beginning of the unify stream.
#[inline]
pub fn unify(&self) -> UnifyIter<'a> {
self.unify.clone()
}
pub fn num_args(&self) -> u16 {
u16::try_from(self.args().len()).unwrap()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct Mods {
pub inner: u8,
}
#[test]
fn test_mods1() {
let m = Mods { inner: 1 };
assert!(!m.is_provable());
let m = Mods { inner: 2 };
assert!(!m.is_provable());
let m = Mods { inner: 4 };
assert!(m.is_provable());
let m = Mods { inner: 8 };
assert!(!m.is_provable());
let m = Mods { inner: 3 };
assert!(!m.is_provable());
let m = Mods { inner: 12 };
assert!(m.is_provable());
let m = Mods { inner: 7 };
assert!(m.is_provable());
}
impl Mods {
pub fn new() -> Self {
Mods { inner: 0u8 }
}
pub fn is_provable(self) -> bool {
(self.inner & Mods::provable().inner) != 0
}
pub fn pure() -> Self {
Mods { inner: 1 }
}
pub fn strict() -> Self {
Mods { inner: 2 }
}
pub fn provable() -> Self {
Mods { inner: 4 }
}
pub fn free() -> Self {
Mods { inner: 8 }
}
}
/// An iterator over the declaration stream. This is the main provider of
/// verification obligations for mmb items, giving you both the target (for example,
/// `public term, number 13`) as well as the proof stream. The unification stream
/// Is later looked up using the item number when it's needed.
#[derive(Debug, Clone)]
pub struct DeclIter<'a> {
/// The full source file.
pub mmb: &'a [u8],
/// The index of the current declaration in the file.
/// Starts at `proof_stream_start` from the header file.
pub pos: usize,
pub next_sort_num: u8,
pub next_termdef_num: u32,
pub next_assert_num: u32,
}
/// We pair the item number (sortnum, termnum, assertnum) with the statement
/// So that during parallel checking of mmb declarations, we know which item is
/// which, since they may be checked out of order.
impl<'a> Iterator for DeclIter<'a> {
type Item = Result<(StmtCmd, ProofIter<'a>), VerifErr>;
fn next(&mut self) -> Option<Self::Item> {
match try_next_decl(self.mmb, self.pos)? {
Err(e) => Some(Err(e)),
Ok((stmt, pr, rest)) => {
let stmt = match stmt {
StmtCmd::Sort {..} => {
let num = self.next_sort_num;
self.next_sort_num += 1;
StmtCmd::Sort { num: Some(num) }
}
StmtCmd::TermDef {local, ..} => {
let num = self.next_termdef_num;
self.next_termdef_num += 1;
StmtCmd::TermDef { num: Some(num), local }
}
StmtCmd::Axiom {..} => {
let num = self.next_assert_num;
self.next_assert_num += 1;
StmtCmd::Axiom { num: Some(num) }
}
StmtCmd::Thm {local, ..} => {
let num = self.next_assert_num;
self.next_assert_num += 1;
StmtCmd::Thm { num: Some(num), local }
}
};
self.pos = rest;
Some(Ok((stmt, pr)))
}
}
}
}
/// is only used for `DeclIter::next`.
/// This always gets the full mmb file
fn try_next_decl(mmb: &[u8], pos: usize) -> Option<Res<(StmtCmd, ProofIter<'_>, usize)>> {
let (cmd, data, rest) = match try_next_cmd(mmb, pos) {
// Means cmd == 0, but here is unreachable
Ok(None) => return None,
// Means there was an error slicing in `try_next_cmd`
Err(e) => return Some(Err(e)),
Ok(Some(((cmd, data), rest))) => (cmd, data, rest)
};
let next2 = pos + data as usize;
let pr = ProofIter {
buf: mmb,
pos: rest,
ends_at: pos + (data as usize)
};
Some(Ok((
StmtCmd::try_from(cmd).ok()?,
pr,
next2
)))
}
// used by proof and unify
// This now always gets the full mmb file.
pub fn try_next_cmd<T>(mmb: &[u8], pos: usize) -> Res<Option<(T, usize)>>
where T: TryFrom<(u8, u32)> {
let (cmd, data, new_pos) = parse_cmd(mmb, pos)?;
if cmd == 0 {
return Ok(None);
}
Ok(Some((
T::try_from((cmd, data)).map_err(|_| VerifErr::Msg(format!("try_next_cmd failed @ pos {}", pos)))?,
new_pos
)))
}
/// Constants used in the MMB specification.
pub mod cmd {
/// `DATA_8 = 0x40`, used as a command mask for an 8 bit data field
pub const DATA_8: u8 = 0x40;
/// `DATA_16 = 0x80`, used as a command mask for a 16 bit data field
pub const DATA_16: u8 = 0x80;
/// `DATA_32 = 0xC0`, used as a command mask for a 32 bit data field
pub const DATA_32: u8 = 0xC0;
/// `DATA_MASK = 0xC0`, selects one of `DATA_8`, `DATA_16`, or `DATA_32` for data size
pub const DATA_MASK: u8 = 0xC0;
}
fn parse_cmd(mmb: &[u8], start_at: usize) -> Res<(u8, u32, usize)> {
match mmb.get(start_at..) {
None | Some([]) => Err(VerifErr::Msg("Parse cmd exhausted".to_string())),
Some([n, tl @ ..]) => {
let cmd_kind = n & !cmd::DATA_MASK;
let cmd_size = n & cmd::DATA_MASK;
match cmd_size {
0 => Ok((cmd_kind, 0, mmb.len() - tl.len())),
cmd::DATA_8 => {
let (data, rest) = parse_u8(tl)?;
Ok((cmd_kind, data as u32, mmb.len() - rest.len()))
}
cmd::DATA_16 => {
let (data, rest) = parse_u16(tl)?;
Ok((cmd_kind, data as u32, mmb.len() - rest.len()))
}
cmd::DATA_32 => {
let (data, rest) = parse_u32(tl)?;
Ok((cmd_kind, data, mmb.len() - rest.len()))
}
_ => Err(VerifErr::Msg("Parse cmd got unknown size".to_string())),
}
}
}
}
/// There's a better name for this, it just hasn't come to me yet.
pub struct Outline<'a> {
pub file_data: &'a crate::fs::FileData,
pub header: crate::mmb::Header,
pub index: crate::mmb::index::Index<'a>,
/// Get the proof stream for the file.
/// Has the whole mmb file, and the position at which the proof stream starts (taken from the header)
pub declarations: Vec<(StmtCmd, ProofIter<'a>)>,
mmb_num_sorts_done: AtomicU8,
mmb_num_termdefs_done: AtomicU32,
mmb_num_asserts_done: AtomicU32,
}
impl<'a> Outline<'a> {
pub fn new_from(file_data: &'a crate::fs::FileData) -> Res<Self> {
let header = crate::mmb::parse_header(file_data.mmb_file.as_slice())?;
let index = crate::mmb::index::parse_index(file_data.mmb_file.as_slice(), header)?;
let declars = DeclIter {
mmb: file_data.mmb_file.as_slice(),
pos: header.proof_stream_start as usize,
next_sort_num: 0,
next_termdef_num: 0,
next_assert_num: 0,
};
let declarations: Vec<(StmtCmd, ProofIter)> = declars.collect::<Result<Vec<(StmtCmd, ProofIter)>, VerifErr>>()?;
Ok(Outline {
file_data,
header,
index,
declarations,
mmb_num_sorts_done: AtomicU8::new(0),
mmb_num_termdefs_done: AtomicU32::new(0),
mmb_num_asserts_done: AtomicU32::new(0),
})
}
pub fn add_declar(&self, stmt: StmtCmd) {
match stmt {
StmtCmd::Sort {..} => { self.next_sort(); },
StmtCmd::TermDef {..} => { self.next_termdef(); },
StmtCmd::Axiom {..} | StmtCmd::Thm {..} => { self.next_assert(); },
}
}
pub fn assert_mmz_done(&self, mmz: &crate::mmz::MmzMem<'a>, errs: &mut Vec<VerifErr>) {
if mmz.num_sorts_done() != self.header.num_sorts {
errs.push(VerifErr::Msg(format!(
"mmz verified fewer sorts than specified in the header. Verified {}, header specified {}",
mmz.num_sorts_done(),
self.header.num_sorts
)))
}
if mmz.num_termdefs_done() != self.header.num_terms {
errs.push(VerifErr::Msg(format!(
"mmz verified fewer terms than specified in the header. Verified {}, header specified {}",
mmz.num_termdefs_done(),
self.header.num_terms
)))
}
if mmz.num_asserts_done() != self.header.num_thms {
errs.push(VerifErr::Msg(format!(
"mmz verified fewer assertions than specified in the header. Verified {}, header specified {}",
mmz.num_asserts_done(),
self.header.num_thms
)))
}
}
pub fn assert_mmb_done(&self, errs: &mut Vec<VerifErr>) {
if self.mmb_num_sorts_done() != self.header.num_sorts {
errs.push(VerifErr::Msg(format!(
"mmb verified fewer sorts than specified in the header. Verified {}, header specified {}",
self.mmb_num_sorts_done(),
self.header.num_sorts
)))
}
if self.mmb_num_termdefs_done() != self.header.num_terms {
errs.push(VerifErr::Msg(format!(
"mmb verified fewer terms than specified in the header. Verified {}, header specified {}",
self.mmb_num_termdefs_done(),
self.header.num_terms
)))
}
if self.mmb_num_asserts_done() != self.header.num_thms {
errs.push(VerifErr::Msg(format!(
"mmb verified fewer assertions than specified in the header. Verified {}, header specified {}",
self.mmb_num_asserts_done(),
self.header.num_thms
)))
}
}
}
impl<'a> Outline<'a> {
pub fn mmb(&self) -> &'a [u8] {
self.file_data.mmb_file.as_slice()
}
pub fn mmb_num_sorts_done(&self) -> u8 {
self.mmb_num_sorts_done.load(Relaxed)
}
pub fn mmb_num_termdefs_done(&self) -> u32 {
self.mmb_num_termdefs_done.load(Relaxed)
}
pub fn mmb_num_asserts_done(&self) -> u32 {
self.mmb_num_asserts_done.load(Relaxed)
}
fn next_sort(&self) -> u8 {
self.mmb_num_sorts_done.fetch_add(1, Relaxed)
}
fn next_termdef(&self) -> u32 {
self.mmb_num_termdefs_done.fetch_add(1, Relaxed)
}
fn next_assert(&self) -> u32 {
self.mmb_num_asserts_done.fetch_add(1, Relaxed)
}
pub fn get_sort_mods(&self, n: usize) -> Res<Mods> {
none_err! {
self
.mmb()
.get(self.header.sort_data_start as usize + n)
.map(|byte| Mods { inner: *byte })
}
}
/// Get a term (by number) from the mmb file
pub fn get_term_by_num(&self, term_num: u32) -> Res<Term<'a>> {
let start_point = (self.header.terms_start as usize) + ((term_num as usize) * 8);
let source = none_err!(self.mmb().get(start_point..))?;
let (num_args, source) = parse_u16(source)?;
let (sort, source) = parse_u8(source)?;
let (_reserved, source) = parse_u8(source)?;
let (term_args_start, _) = parse_u32(source)?;
let args_start = none_err!(self.mmb().get(term_args_start as usize..))?;
let split_point = none_err!{ std::mem::size_of::<u64>().checked_mul((num_args + 1) as usize) }?;
let (args_start, unify_start) = args_start.split_at(split_point);
let unify = UnifyIter {
buf: self.mmb(),
pos: self.mmb().len() - unify_start.len(),
};
Ok(Term { term_num, sort, args_start, unify })
}
/// Get an assertion (by number) from the mmb file
pub fn get_assert_by_num(&self, assert_num: u32) -> Res<Assert<'a>> {
let thm_start = (self.header.thms_start + (assert_num * 8)) as usize;
let source = self.mmb().get(thm_start..).ok_or(VerifErr::Msg(format!("Bad index")))?;
let (num_args, source) = parse_u16(source)?;
let (_reserved, source) = parse_u16(source)?;
let (args_start, _) = parse_u32(source)?;
let args_slice = none_err!(self.mmb().get(args_start as usize..))?;
let split_point = none_err! { std::mem::size_of::<u64>().checked_mul(num_args as usize) }?;
let (args_start, unify_start) = args_slice.split_at(split_point);
let unify = UnifyIter {
buf: self.mmb(),
pos: self.mmb().len() - unify_start.len(),
};
Ok(Assert { assert_num, args_start, unify })
}
}
/// These are helper functions for rendering unsigned integers
/// as a sequence of bits. For example, `view64(TYPE_SORT_MASK)` renders as:
///
/// 10000000_11111111_11111111_11111111_11111111_11111111_11111111_11111111
///
/// They're not efficient, but they're for debugging and I'm too lazy to write efficient
/// ones right now.
pub fn view64(n: u64) -> String {
let s = format!("{:#066b}", n);
let mut acc = String::new();
for (idx, c) in s.chars().skip(2).enumerate() {
if idx % 8 == 0 && idx != 0 {
acc.push('_');
}
acc.push(c);
}
acc
}
pub fn view32(n: u32) -> String {
let s = format!("{:#034b}", n);
let mut acc = String::new();
for (idx, c) in s.chars().skip(2).enumerate() {
if idx % 8 == 0 && idx != 0 {
acc.push('_');
}
acc.push(c);
}
acc
}
pub fn view16(n: u16) -> String {
let s = format!("{:#018b}", n);
let mut acc = String::new();
for (idx, c) in s.chars().skip(2).enumerate() {
if idx % 8 == 0 && idx != 0 {
acc.push('_');
}
acc.push(c);
}
acc
}
pub fn view8(n: u8) -> String {
let s = format!("{:#010b}", n);
let mut acc = String::new();
for (idx, c) in s.chars().skip(2).enumerate() {
if idx % 8 == 0 && idx != 0 {
acc.push('_');
}
acc.push(c);
}
acc
}
impl Debug for Type {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
write!(f, "{}", view64(self.inner))
}
}
use std::sync::Arc;
use std::collections::HashMap;
use bumpalo::collections::Vec as BumpVec;
use crate::mmb::stmt::StmtCmd;
use crate::mmz::{
MmzState,
MmzMem,
DelimKind,
MmzExpr,
Prec,
Fix,
NotationInfo,
NotationLit,
Coe,
MmzVar,
MmzHyp,
};
use crate::util::{
Mods,
Str,
Res,
VerifErr,
SortNum,
Type,
Either,
Term,
Assert,
Either::*,
Args,
};
use crate::make_sure;
use crate::localize;
use crate::none_err;
use crate::mmb::unify::{ UnifyCmd, UnifyIter, UMode };
fn bump<'a>(yes: bool, _: Str<'a>, p: Prec) -> Prec {
if !yes {
p
} else if let Prec::Num(num) = p {
if let Some(i) = num.checked_add(1) {
Prec::Num(i)
} else {
panic!("bump prec out of range")
}
} else {
panic!("infix constants cannot have prec max")
}
}
pub fn wc(c: u8) -> bool {
c == b' ' || c == b'\n'
}
/// return true iff a given character is an acceptable ident starter.
pub fn ident_start(c: u8) -> bool {
(b'a'..=b'z').contains(&c) || (b'A'..=b'Z').contains(&c) || c == b'_'
}
/// return true iff a given character is an acceptable ident character.
pub fn ident_rest(c: u8) -> bool {
ident_start(c) || (b'0'..=b'9').contains(&c)
}
/// Remove the leading and trailing whitespace from a byte slice.
pub fn trim(mut bytes: &[u8]) -> &[u8] {
while let [hd, tl @ ..] = bytes {
if wc(*hd) {
bytes = tl;
} else {
break
}
}
while let [lhs @ .., last] = bytes {
if wc(*last) {
bytes = lhs
} else {
break
}
}
bytes
}
impl<'b, 'a: 'b> MmzMem<'a> {
pub fn is_empty(&self) -> bool {
self.mmz_pos == self.mmz.len()
}
pub fn cur(&self) -> Option<u8> {
self.mmz.get(self.mmz_pos).copied()
}
pub fn cur_slice(&self) -> &'a [u8] {
self.mmz.get(self.mmz_pos..).unwrap_or_else(|| &[])
}
pub fn advance(&mut self, n: usize) {
self.mmz_pos += n;
}
pub fn parse_until(
&mut self,
bump: &mut bumpalo::Bump,
stmt_cmd: StmtCmd,
item: Option<Either<Term<'a>, Assert<'a>>>
) -> Res<()> {
'outer: loop {
'inner: loop {
let mut mmz_st = MmzState::new_from(&mut *self, bump);
if mmz_st.cur().is_none() {
break 'inner
}
match { mmz_st.skip_ws(); mmz_st.peek_word() } {
b"provable" | b"strict" | b"free" | b"pure" | b"sort" => return mmz_st.parse_sort(),
b"term" | b"def" => {
if let Some(L(t)) = item {
return mmz_st.parse_termdef(t)
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
b"axiom" | b"theorem" => {
make_sure!(matches!(stmt_cmd, StmtCmd::Axiom {..}) || matches!(stmt_cmd, StmtCmd::Thm {..}));
if let Some(R(assert)) = item {
return mmz_st.parse_assert(assert)
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
},
b"delimiter" => mmz_st.delims()?,
b"prefix" | b"infixl" | b"infixr" => mmz_st.simple_notation()?,
b"notation" => mmz_st.gen_notation()?,
b"coercion" => mmz_st.coercion()?,
b"import" => mmz_st.parse_import()?,
b"input" => return Err(VerifErr::Msg(format!("`input` statements are not currently suppprted"))),
b"output" => return Err(VerifErr::Msg(format!("Output statements are not currently supported"))),
_ => break 'inner,
}
}
if let Ok(()) = self.next_mmz_file() {
continue 'outer
} else {
return Err(VerifErr::Msg(
format!("`parse_until` ran out of input with remaining verification obligations.")
))
}
}
}
}
impl<'b, 'a: 'b> MmzState<'b, 'a> {
pub fn skip_ws(&mut self) {
while !self.is_empty() {
match self.cur_slice() {
[b'-', b'-', ..] => {
self.advance(2);
// skip the rest of the line;
while self.cur().map(|c| c != b'\n').unwrap_or(false) {
self.advance(1)
}
},
[hd, ..] if wc(*hd) => self.advance(1),
_ => break
}
}
}
pub fn guard(&mut self, c: u8) -> Res<u8> {
self.skip_ws();
if self.cur() == Some(c) {
self.advance(1);
Ok(c)
} else {
Err(VerifErr::Msg(
format!("guard failed: expected `{}`, got {:?} @ {}", c as char, self.cur(), self.mem.mmz_pos)
))
}
}
/// Given an expression `(e: x)` and a target sort `z`, try to coerce an expression `(e: z)`
pub fn coerce(&mut self, e_x: MmzExpr<'b>, z: SortNum) -> Res<MmzExpr<'b>> {
// If the task is coercion of `(e: z)` to `(e: z)`, just return `e`.
if e_x.sort() == z {
return Ok(e_x)
}
// If the coercion `x > z` exists, but `z` is provable, return `(e: x)`
if let Some(tgt_id) = self.mem.coe_prov.get(&e_x.sort()) {
if z == *tgt_id {
return Ok(e_x)
}
}
// Find the existing coercion `x > z`
let coe = none_err!(self.mem.coes.get(&e_x.sort()).and_then(|m| m.get(&z)).cloned())?;
match coe {
// If `my_coe: x > z` is a straight-shot, just return `(my_coe (e: x)) : z`
Coe::Single { term_num } => {
Ok(MmzExpr::App {
term_num,
num_args: 1,
args: self.alloc(bumpalo::vec![in self.bump; e_x]),
sort: z
})
},
// If this is a transitive coercion `x > y > z`, return `(y_to_z (x_to_y (e: x)))`k
Coe::Trans { middleman_sort: y, .. } => {
self.coerce(e_x, y)
.and_then(|y_expr| self.coerce(y_expr, z))
},
}
}
fn kw(&mut self, k: &[u8]) -> Option<&'a [u8]> {
let rollback = self.mem.mmz_pos;
self.skip_ws();
if self.cur_slice().starts_with(k) {
let taken_kw = &self.cur_slice()[0..k.len()];
self.advance(k.len());
Some(taken_kw)
} else {
self.mem.mmz_pos = rollback;
None
}
}
// Kind of a lot of effort to avoid allocation.
fn delim_kind(&mut self) -> Res<DelimKind> {
let rollback = self.mem.mmz_pos;
self.skip_ws();
let out = match self.skip_formula() {
Err(_) => Err(VerifErr::Msg(format!("Bad delim stmt"))),
Ok(_) => match self.skip_formula() {
Err(_) => Ok(DelimKind::Both),
Ok(_) => match self.skip_formula() {
Err(_) => Ok(DelimKind::Left),
Ok(_) => Err(VerifErr::Msg(format!("too many formulas in delim stmt (3+)"))),
}
}
};
self.mem.mmz_pos = rollback;
out
}
// Parse, then insert the actual delimiter tokens.
fn delim_aux(&mut self, kind: DelimKind) -> Res<()> {
localize!(self.guard(b'$'))?;
while let Some([c]) = self.math_tok().map(|s| s.as_bytes()) {
if self.mem.delims.iter().any(|(k, _)| k == c) {
return Err(VerifErr::Msg(format!("Cannot redeclare delimiter: {}", *c as char)))
} else {
self.mem.delims.push((*c, kind))
}
}
if kind == DelimKind::Both|| kind == DelimKind::Right {
localize!(self.guard(b';'))?;
}
Ok(())
}
// 1. Find the `delimiter` keyword
// 2. Determine whether we have a Both or Left/Right kind of delimiter statement
// 3. Use delim_aux two parse and insert the actual delimiter tokens.
pub fn delims(&mut self) -> Res<()> {
self.skip_ws();
if !self.kw(b"delimiter ").is_some() {
let _x = String::from_utf8_lossy(self.peek_word());
return Err(VerifErr::Msg(
format!("Bad delim stmt; keyword `delimiter` failed. Word is: {}", _x)
))
}
let kind = self.delim_kind()?;
self.delim_aux(kind)?;
if kind == DelimKind::Left {
self.delim_aux(DelimKind::Right)?;
}
Ok(())
}
/// Returns the nex
/// Doesn't advance past the word.
fn peek_word(&mut self) -> &'a [u8] {
let rollback = self.mem.mmz_pos;
self.skip_ws();
let start = self.mem.mmz_pos;
loop {
match self.cur() {
None => break,
Some(c) if wc(c) => break,
Some(_) => self.advance(1),
}
}
let out = &self.mem.mmz[start..self.mem.mmz_pos];
self.mem.mmz_pos = rollback;
out
}
/// EITHER a dummy `.` OR an `ident_`
fn dummy_ident(&mut self) -> Res<Str<'a>> {
let rollback = self.mem.mmz_pos;
self.skip_ws();
let start = self.mem.mmz_pos;
if let Some(b'.') = self.cur() {
self.advance(1);
}
match self.cur() {
Some(c) if ident_start(c) => self.advance(1),
_ => {
self.mem.mmz_pos = rollback;
return Err(VerifErr::Msg(
format!("ident_ failed; word: {}", String::from_utf8_lossy(self.peek_word()))
))
}
}
while let Some(c) = self.cur() {
if ident_rest(c) {
self.advance(1)
} else {
break
}
}
Ok(Str(&self.mem.mmz[start..self.mem.mmz_pos]))
}
fn ident(&mut self) -> Res<Str<'a>> {
let rollback = self.mem.mmz_pos;
let ident = self.ident_()?;
if let [b'_', ..] = ident.as_bytes() {
self.mem.mmz_pos = rollback;
return Err(VerifErr::Msg(format!("ident got underscore")))
}
Ok(ident)
}
fn ident_(&mut self) -> Res<Str<'a>> {
let rollback = self.mem.mmz_pos;
self.skip_ws();
let start = self.mem.mmz_pos;
match self.cur() {
Some(c) if ident_start(c) => self.advance(1),
_ => {
self.mem.mmz_pos = rollback;
return Err(VerifErr::Msg(
format!("ident_ failed; word: {}", String::from_utf8_lossy(self.peek_word()))
))
}
}
while let Some(c) = self.cur() {
if ident_rest(c) {
self.advance(1)
} else {
break
}
}
Ok(Str(&self.mem.mmz[start..self.mem.mmz_pos]))
}
pub fn parse_u32(&mut self) -> Option<u32> {
self.skip_ws();
let mut acc = None;
while let Some(c) = self.cur() {
if c.is_ascii_digit() {
self.advance(1);
acc = Some((acc.unwrap_or(0) * 10) + ((c as u32) - 48))
} else {
break
}
}
acc
}
}
fn add_nota_info<'a>(
m: &mut HashMap<Str<'a>, NotationInfo<'a>>,
tok: Str<'a>,
n: NotationInfo<'a>,
) -> Res<()> {
if let Some(e) = m.insert(tok, n.clone()) {
if e != n {
return Err(VerifErr::Msg(format!("Incompatible error in add_nota_info")))
}
}
Ok(())
}
impl<'b, 'a: 'b> MmzState<'b, 'a> {
pub fn prec_lvl(&mut self) -> Option<Prec> {
if self.kw(b"max").is_some() {
Some(Prec::Max)
} else {
self.parse_u32().map(|n| Prec::Num(n))
}
}
fn gen_notation(&mut self) -> Res<()> {
none_err!(self.kw(b"notation "))?;
let ident = self.ident()?;
let term = self.mem.get_term_by_ident(&ident)?;
let _binders = self.binders(term.args(), "notation")?;
localize!(self.guard(b'='))?;
let prec_const = self.prec_const()?;
let mut lits = vec![L(prec_const)];
loop {
self.skip_ws();
let rollback = self.mem.mmz_pos;
match self.prec_const() {
Ok((s, p)) => {
lits.push(L((s, p)));
},
Err(_) => {
self.mem.mmz_pos = rollback;
match self.ident() {
Ok(y) => {
lits.push(R(y));
},
Err(_) => break,
}
}
}
}
// notation_literal
self.elab_gen_nota(
term.term_num,
ident,
term.num_args_no_ret(),
lits
)?;
localize!(self.guard(b';'))?;
Ok(())
}
fn add_const(&mut self, tk: Str<'a>, prec: Prec) -> Res<()> {
if tk.as_bytes() == b"(" || tk.as_bytes() == b")" {
return Err(VerifErr::Msg(format!("Parens not allowed as notation consts")));
}
match self.mem.consts.insert(tk, prec) {
Some(already) if already != prec => {
Err(VerifErr::Msg(format!("Cannot redeclare const {:?} with new prec", tk)))
},
_ => Ok(())
}
}
fn elab_gen_nota(
&mut self,
term_num: u32,
term_ident: Str<'a>,
term_num_args: u16,
lits: Vec<Either<(Str<'a>, Prec), Str<'a>>>,
) -> Res<()> {
let num_args_nota = self.vars_done.len();
assert_eq!(num_args_nota, term_num_args as usize);
let mut vars_used = BumpVec::new_in(self.bump);
let mut it = lits.iter().peekable();
let (mut lits2, mut right_associative, tok, prec) = match it.next() {
None => panic!("Notation requires at least one literal"),
Some(L((tok, prec))) => (Vec::<NotationLit>::new(), true, tok, prec),
Some(R(_)) => panic!("generalized infix not allowed in mm0")
};
self.add_const(*tok, prec.clone())?;
if it.peek().is_none() {
right_associative = false;
}
while let Some(lit) = it.next() {
match *lit {
L((tok, prec)) => {
lits2.push(NotationLit::Const(tok));
self.add_const(tok, prec)?
}
R(var_ident) => {
let prec = match it.peek() {
None => bump(right_associative, *tok, *prec),
Some(L((tok2, prec2))) => bump(true, *tok2, *prec2),
Some(R(_)) => Prec::Max,
};
vars_used.push(var_ident);
let pos = self.vars_done.iter().position(|v| v.ident == Some(var_ident)).unwrap();
lits2.push(NotationLit::Var { pos, prec })
}
}
}
for sig_var in self.vars_done.iter() {
if let Some(ident) = sig_var.ident {
// Dummy variables not allowed in `notation` declarations.
make_sure!(!sig_var.is_dummy);
// Each ident mentioned on the LHS of the colon must be used in the notation.
make_sure!(vars_used.iter().any(|used_ident| *used_ident == ident));
} else {
return Err(VerifErr::Msg(format!("anonymous vars not allowed in general notation declarations!")))
}
}
let info = NotationInfo {
term_num,
term_ident,
rassoc: right_associative,
lits: Arc::from(lits2),
};
//self
//.declared_notations
//.entry(term_ident)
//.or_default()
//.consts
//.push((*tok, None));
add_nota_info(&mut self.mem.prefixes, *tok, info)
}
fn prec_const(&mut self) -> Res<(Str<'a>, Prec)> {
localize!(self.guard(b'('))?;
let constant = self.constant()?;
localize!(self.guard(b':'))?;
let prec = none_err!(self.prec_lvl())?;
localize!(self.guard(b')'))?;
Ok((constant, prec))
}
// for all coercions from `x` to `ys`, if `y e. ys` is provable
// insert `x |-> y` into `provs`.
// If a pair `x |-> y` already existed, throw an error.
// For example, if there are two
// x |-> nat
// x |-> nat
// You have a diamond.
fn update_provs(&mut self) -> Res<()> {
let mut provs = HashMap::with_hasher(Default::default());
for (s1, m) in self.mem.coes.iter() {
for s2 in m.keys() {
if self.mem.outline.get_sort_mods(*s2 as usize)?.is_provable() {
if let Some(_s2) = provs.insert(*s1, *s2) {
println!("Coercion diamond to provable detected");
panic!();
}
}
}
}
Ok(self.mem.coe_prov = provs)
}
fn add_coe(
&mut self,
term: Term,
_term_ident: Str<'a>,
from: u8,
to: u8,
) -> Res<()> {
self.add_coe_raw(term, from, to)?;
self.update_provs()?;
//self
//.declared_notations
//.entry(term_ident)
//.or_default()
//.has_coe = true;
Ok(())
}
/// Suppose we wanted to add some coercion coercion my_coe: x > y;`
///
/// In doing so, we need to consider the transitive coercions "on either side. c
/// That is, coercions from a sort `w` to `x`, and coercions from `y` to a sort `z`.
/// In doing so, we need to:
/// 1. Account for new transitive coercions that will result from the addition
/// 2. Make sure we didn't create any cycles
/// 3. Make sure we didn't create any diamonds.
fn add_coe_raw(
&mut self,
term: Term,
x: u8,
y: u8,
) -> Res<()> {
// If the coercion `x > y` already exists, do nothing, otherwise continue
// knowing that `x > y` is unique.
match self.mem.coes.get(&x).and_then(|m| m.get(&y)) {
Some(&Coe::Single { term_num }) if term_num == term.term_num => return Ok(()),
_ => {}
}
// Make the new coercion `x > y` with whatever term_num corresponds to `my_coe`.
let coe_x_y = Arc::new(Coe::Single { term_num: term.term_num });
// Holds:
// 1. The original coercion `x > y`
// 2. All transitive coercions `w > y`
// 3. All transitive coercions `x > z`
let mut todo = BumpVec::new_in(self.bump);
// For any coercions already declared that go (either directly or transitively)
// from `w > x`, place (w, y, (Trans { w > x, x, w > y })) in `todo`
for (w, coes_w_xs) in self.mem.coes.iter() {
if let Some(coe_w_x) = coes_w_xs.get(&x) {
todo.push((
*w,
y,
Arc::new(Coe::Trans {
c1: Arc::new(coe_w_x.clone()),
middleman_sort: x,
c2: coe_x_y.clone()
}),
))
}
}
todo.push((x, y, coe_x_y.clone()));
// For any existing coercion `y > z`, push
// (x, z, Trans { x > y, y, y > z })
// onto `todos`
if let Some(m) = self.mem.coes.get(&y) {
for (z, y_to_z) in m {
todo.push((
x,
*z,
Arc::new(Coe::Trans{
c1: coe_x_y.clone(),
middleman_sort: y,
c2: Arc::new(y_to_z.clone())
}),
))
}
}
// If the addition of `x > y` would create a transitive coercion
// such what `w > x > y > w`, we've created a cycle.
for (w, z, coe) in todo {
if w == z {
return Err(VerifErr::Msg(format!("Coercion cycle detected!")));
}
// If, when inserting the transitive coercion `w > z`, we find an existing
// coercion `w > z`, we know there's a diamond being created since the first
// thing we did was confirm that there was no existing coercion `x > y`.
// Therefore, there's some alternate path between `w` and `z`.
if let Some(_) =self.mem.coes.entry(w).or_default().insert(z, coe.as_ref().clone()) {
return Err(VerifErr::Msg(format!("Coercion diamond detected!")));
}
}
Ok(())
}
fn elab_coe(
&mut self,
term: Term,
term_ident: Str<'a>,
from: u8,
to: u8,
) -> Res<()> {
assert_eq!(term.num_args_no_ret(), 1);
self.add_coe(term, term_ident, from, to)
}
fn coercion(&mut self) -> Res<()> {
none_err!(self.kw(b"coercion "))?;
let ident = self.ident()?;
localize!(self.guard(b':'))?;
let from = self.ident()?;
localize!(self.guard(b'>'))?;
let to = self.ident()?;
localize!(self.guard(b';'))?;
let term = self.mem.get_term_by_ident(&ident)?;
let from_num = self.mem.get_sort_num(from)?;
let to_num = self.mem.get_sort_num(to)?;
self.elab_coe(term, ident, from_num, to_num)
}
fn simple_notation(&mut self) -> Res<()> {
let fix = if self.kw(b"prefix ").is_some() {
Fix::Prefix
} else if self.kw(b"infixl ").is_some() {
Fix::Infixl
} else if self.kw(b"infixr ").is_some() {
Fix::Infixr
} else {
return Err(VerifErr::Msg(format!("Expected fix")))
};
let ident = self.ident()?;
let term = self.mem.get_term_by_ident(&ident)?;
localize!(self.guard(b':'))?;
let constant = self.constant()?;
if !self.kw(b"prec").is_some() {
return localize!(Err(VerifErr::Msg(format!("Missing prec keyword"))))
}
let prec = none_err!(self.prec_lvl())?;
localize!(self.guard(b';'))?;
match fix {
Fix::Prefix => self.elab_simple_prefix(ident, term, constant, prec),
Fix::Infixl => self.elab_simple_infix(ident, term, constant, prec, false),
Fix::Infixr => self.elab_simple_infix(ident, term, constant, prec, true),
}
}
fn elab_simple_prefix(
&mut self,
term_ident: Str<'a>,
term: Term<'a>,
constant: Str<'a>,
prec: Prec,
) -> Res<()> {
let mut lits = Vec::new();
// This is now all_args - 2
if let Some(n) = term.num_args_no_ret().checked_sub(1) {
for i in 0..n {
lits.push(NotationLit::Var {
pos: i as usize,
prec: Prec::Max,
})
}
lits.push(NotationLit::Var { pos: n as usize, prec });
}
self.add_const(constant, prec)?;
let info = NotationInfo {
term_num: term.term_num,
term_ident,
rassoc: true,
lits: Arc::from(lits),
};
//self
//.declared_notations
//.entry(term_ident)
//.or_default()
//.consts
//.push((constant, Some(Fix::Prefix)));
add_nota_info(
&mut self.mem.prefixes,
constant,
info
)
}
fn elab_simple_infix(
&mut self,
term_ident: Str<'a>,
term: Term<'a>,
constant: Str<'a>,
prec: Prec,
is_infixr: bool,
) -> Res<()> {
match prec {
Prec::Max => return Err(VerifErr::Msg(format!("Max not allowed for simple infix notations"))),
Prec::Num(n) => {
let i2 = none_err!(n.checked_add(1))?;
if term.num_args_no_ret() != 2 {
return localize!(Err(VerifErr::Msg(
format!("infix notations must be for terms with 2 arguments. {:?} had {}", term_ident, term.num_args_no_ret())
)))
}
let lits = if is_infixr {
vec![
NotationLit::Var { pos: 0, prec: Prec::Num(i2) },
NotationLit::Const(constant),
NotationLit::Var { pos: 1, prec },
]
} else {
vec![
NotationLit::Var { pos: 0, prec },
NotationLit::Const(constant),
NotationLit::Var { pos: 1, prec: Prec::Num(i2) }
]
};
self.add_const(constant, prec)?;
let info = NotationInfo {
term_num: term.term_num,
term_ident,
rassoc: is_infixr,
lits: Arc::from(lits),
};
//self
//.declared_notations
//.entry(term_ident)
//.or_default()
//.consts
//.push((constant, Some(if is_infixr { Fix::Infixr } else { Fix::Infixl })));
add_nota_info(&mut self.mem.infixes, constant, info)
}
}
}
fn sort(&mut self) -> Res<(Str<'a>, Mods)> {
let mut mods = Mods::new();
while let Some(_) = self.cur() {
if self.kw(b"provable ").is_some() {
mods |= Mods::provable();
} else if self.kw(b"strict ").is_some() {
mods |= Mods::strict();
} else if self.kw(b"pure ").is_some() {
mods |= Mods::pure();
} else if self.kw(b"free ").is_some() {
mods |= Mods::free();
} else if self.kw(b"sort ").is_some() {
break;
} else {
return Err(VerifErr::Msg(format!("Bad sort parse")))
}
}
let ident = self.ident()?;
localize!(self.guard(b';'))?;
Ok((ident, mods))
}
pub fn parse_sort(&mut self) -> Res<()> {
let (ident, mods) = self.sort()?;
let mmb_mods = self.mem.outline.get_sort_mods(self.mem.num_sorts_done() as usize)?;
make_sure!(mods == mmb_mods);
self.mem.add_sort(ident);
Ok(())
}
pub fn parse_termdef(
&mut self,
term: Term<'a>,
) -> Res<()> {
make_sure!(self.kw(b"term ").or(self.kw(b"def ")).is_some());
let ident = self.ident()?;
let mode = if term.is_def() { "def" } else { "term" };
self.binders(term.args(), mode)?;
if term.is_def() && self.peek_word() == b"=" {
localize!(self.guard(b'='))?;
let rhs_e = self.expr_()?;
let rhs_e = self.coerce(rhs_e, self.mem.coe_prov_or_else(rhs_e.sort()))?;
self.check_expr(term.unify(), rhs_e, UMode::UDef)?;
}
localize!(self.guard(b';'))?;
self.mem.add_termdef(ident);
Ok(())
}
pub fn parse_assert(
&mut self,
assert: Assert<'a>,
) -> Res<()> {
make_sure!(self.kw(b"axiom ").or(self.kw(b"theorem ")).is_some());
let ident = self.ident().unwrap();
let _binders = self.binders(assert.args(), "assert")?;
let tgt = self.hyps.pop().unwrap().expr;
self.check_expr(
assert.unify(),
tgt,
UMode::UThm
)?;
localize!(self.guard(b';'))?;
self.mem.add_assert(ident);
Ok(())
}
pub fn parse_import(&mut self) -> Res<()> {
make_sure!(self.kw(b"import ").is_some());
localize!(self.guard(b'"'))?;
while let Some(d) = self.cur() {
if d == b'"' {
self.advance(1);
localize!(self.guard(b';'))?;
self.skip_ws();
break
} else {
self.advance(1);
}
}
Ok(())
}
fn binders(
&mut self,
args: Args<'a>,
mode: &str,
) -> Res<()> {
// This rolling back of the binders on failure is just to make eventual error-reporting
// nicer. If part of the binder group fails, we want to be able to fail the whole group.
let (mut todo_len, mut done_len) = (0, 0);
// Loop over 'getting a binder group, like
// iter1.. {x: nat}
// iter_n.. (ph ps: wff x)
while let Some(_) = self.cur() {
// The portion of the declaration's arguments that we haven't yet parsed.
let args_todo = args.skip(self.non_dummy_vars().count());
match self.binder_group(args_todo, mode) {
Ok(_) => {
todo_len = self.vars_todo.len();
done_len = self.vars_done.len();
continue
}
Err(_) => {
self.vars_todo.truncate(todo_len);
self.vars_done.truncate(done_len);
break
}
}
}
localize!(self.guard(b':'))?;
let args_todo = args.skip(self.non_dummy_vars().count());
self.return_ty(args_todo, mode)
}
// Parse the variable names on the left hand side of the colon.
// The (x y) in `(x y : A)`, or the `a` and `b` in {a .b: T}
fn binder_idents(&mut self, mode: &str) -> Res<()> {
while let Ok(var_ident) = if mode == "def" { self.dummy_ident() } else { self.ident_() } {
match var_ident.as_bytes() {
[b'.', tl @ ..] => {
make_sure!(mode == "def");
self.vars_todo.push((Str(tl), true))
},
_ => self.vars_todo.push((var_ident, false))
}
}
Ok(())
}
fn binder_group(&mut self, args_todo: std::iter::Skip<Args<'a>>, mode: &str) -> Res<()> {
let opener = localize!(self.guard(b'(').or(self.guard(b'{')))?;
let num_non_dummy_before = self.non_dummy_vars().count();
// Get the variable idents/names, put them in `vars_todo` until we know their type
self.binder_idents(mode)?;
// Find and skip past the type separator `:`
localize!(self.guard(b':'))?;
// Try to parse a formula or dep_type as appropriate.
// Both of these functions then transfer the new vars added to `vars_todo`
// into `vars_done`, adding the appropriate type info.
self.skip_ws();
if let Some(b'$') = self.cur() {
self.binder_fml(opener == b'{', mode)?;
} else {
self.binder_dep_ty(opener == b'{')?;
}
// Type-check the new variables we parsed. For dummy variables, make sure they have the high byte (bindedness and sort)
// matches what the mmb file's args have (since the low 7 bytes are just the bound var index, which may be different).
//
// For regular variables, assert that the entire type is equal.
make_sure!(args_todo.len() >= self.non_dummy_vars().skip(num_non_dummy_before).count());
for (l, r) in self.non_dummy_vars().skip(num_non_dummy_before).zip(args_todo) {
if opener == b'(' {
make_sure!(l.ty == r)
} else {
make_sure!(l.ty.high_bit() == r.high_bit())
}
}
match opener {
b'(' => { localize!(self.guard(b')'))?; }
b'{' => { localize!(self.guard(b'}'))?; }
_ => return Err(VerifErr::Msg(format!("Binder group has mismatched braces")))
}
Ok(())
}
/// This is only used in one spot, so expects the parser to already be at the leading `$`
fn binder_fml(&mut self, bound: bool, mode: &str) -> Res<()> {
make_sure!(mode == "assert");
// No bound hyps
make_sure!(!bound);
// Has to be either an axiom or a theorem to have hyps
let math_expr = self.expr_()?;
let math_expr = self.coerce(math_expr, self.mem.coe_prov_or_else(math_expr.sort()))?;
for (ident, dummy) in self.vars_todo.iter().skip(self.vars_done.len()) {
make_sure!(!dummy);
let pos = self.vars_done.len();
self.hyps.push(MmzHyp {
ident: Some(*ident),
pos: Some(pos),
expr: math_expr
})
}
Ok(())
}
/// Parse the `wff x y` from `(ph: wff x y)`
/// where `x` and `y` are previously declared dependencies.
fn binder_dep_ty(&mut self, bound: bool) -> Res<()> {
let sort_ident = self.ident()?;
let sort_num = self.mem.get_sort_num(sort_ident)?;
let mut ty_accum = Type::new(bound);
ty_accum.add_sort(sort_num);
if bound {
ty_accum.inner |= self.take_next_bv();
} else {
while let Ok(dep_ident) = self.ident() {
make_sure!(!bound);
let dep_pos = 1 + none_err!(self.potential_deps().position(|v| v.ident == Some(dep_ident)))?;
ty_accum.add_dep(dep_pos as u64);
}
}
for (ident, dummy) in self.vars_todo.iter().skip(self.vars_done.len()) {
let pos = self.vars_done.len();
self.vars_done.push(MmzVar {
ident: Some(*ident),
pos,
ty: ty_accum,
is_dummy: *dummy
})
}
Ok(())
}
fn return_ty(&mut self, mut arrow_args: std::iter::Skip<Args<'a>>, mode: &str) -> Res<()> {
while let Some(_) = self.cur() {
// If it's a hypothesis
if let Some(b'$') = { self.skip_ws(); self.cur() } {
make_sure!(mode == "assert");
let math_expr = self.expr_()?;
let math_expr = self.coerce(math_expr, self.mem.coe_prov_or_else(math_expr.sort()))?;
self.hyps.push(MmzHyp { ident: None, pos: None, expr: math_expr });
} else {
// If it's an arrow type.
let sort_ident = self.ident()?;
let sort_num = self.mem.get_sort_num(sort_ident)?;
let mut ty_accum = Type::new_with_sort(sort_num);
while let Ok(dep_ident) = self.ident() {
let dep_pos = 1 + none_err!(self.potential_deps().position(|v| v.ident == Some(dep_ident)))?;
ty_accum.add_dep(dep_pos as u64);
}
// Type-check the return type.
make_sure!(ty_accum == none_err!(arrow_args.next())?);
}
if let Some(b'>') = { self.skip_ws(); self.cur() } {
self.advance(1);
} else {
break
}
}
Ok(())
}
pub fn check_expr(
&mut self,
u: UnifyIter,
tgt: MmzExpr<'b>,
mode: UMode,
) -> Res<()> {
self.ustack.push(tgt);
for v in self.vars_done.iter().filter(|v| !v.is_dummy) {
self.uheap.push(MmzExpr::Var(*v))
}
for maybe_cmd in u {
match maybe_cmd? {
UnifyCmd::Ref(i) => {
let heap_elem = *&self.uheap[i as usize];
let stack_elem = none_err!(self.ustack.pop())?;
if heap_elem != stack_elem {
return Err(VerifErr::Msg(format!("check_expr Ref eq test went bad")))
}
}
UnifyCmd::Term { term_num, save } => {
let p = none_err!(self.ustack.pop())?;
if let MmzExpr::App { term_num:n2, args, .. } = p {
make_sure!(term_num == n2);
self.ustack.extend(args.into_iter().rev());
if save {
self.uheap.push(p);
}
} else {
return Err(VerifErr::Msg(format!("UnifyCmd expected term")))
}
},
UnifyCmd::Dummy { sort_id } => {
make_sure!(mode == UMode::UDef);
let p = none_err!(self.ustack.pop())?;
if let MmzExpr::Var(MmzVar { ty, is_dummy, .. }) = p {
make_sure!(sort_id == ty.sort());
make_sure!(is_dummy);
self.uheap.push(p)
} else {
return localize!(Err(VerifErr::Msg(format!("check_expr failed at dummy"))))
}
}
UnifyCmd::Hyp => self.ustack.push(none_err!(self.hyps.pop())?.expr),
}
}
Ok(self.uheap.clear())
}
}
pub mod parse;
pub mod math_parser;
use std::convert::TryFrom;
use std::collections::HashMap;
use std::sync::Arc;
use bumpalo::Bump;
use bumpalo::collections::Vec as BumpVec;
use crate::none_err;
use crate::conv_err;
use crate::Outline;
use crate::util::{
Either::*,
Str,
SortNum,
TermNum,
AssertNum,
SortIdent,
TermIdent,
AssertIdent,
VerifErr,
Res,
Type,
Term
};
use crate::mmb::stmt::StmtCmd;
pub type MathStr<'b, 'a> = BumpVec<'b, Str<'a>>;
// Stack item
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MmzExpr<'b> {
Var(MmzVar<'b>),
App {
term_num: u32,
num_args: u16,
args: &'b [MmzExpr<'b>],
sort: SortNum,
},
}
impl<'b> MmzExpr<'b> {
fn sort(self) -> SortNum {
match self {
MmzExpr::Var(v) => v.sort(),
MmzExpr::App { sort, .. } => sort
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MmzHyp<'a> {
pub ident: Option<Str<'a>>,
pub pos: Option<usize>,
pub expr: MmzExpr<'a>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MmzVar<'a> {
pub ident: Option<Str<'a>>,
pub pos: usize,
pub ty: Type,
pub is_dummy: bool,
}
impl<'a> MmzVar<'a> {
pub fn is_bound(self) -> bool {
self.ty.is_bound()
}
pub fn sort(self) -> u8 {
self.ty.sort()
}
}
//#[derive(Debug)]
//pub struct DeclaredNotation<'a> {
// pub has_coe: bool,
// pub consts: Vec<(Str<'a>, Option<Fix>)>
//}
//impl<'a> std::default::Default for DeclaredNotation<'a> {
// fn default() -> Self {
// DeclaredNotation {
// has_coe: false,
// consts: Vec::new()
// }
// }
//}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum DelimKind {
Left,
Right,
Both,
}
/// Operator precedence for user-declared notation.
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Prec {
Num(u32),
Max,
}
///
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct NotationInfo<'a> {
pub term_num: u32,
pub term_ident: Str<'a>,
pub rassoc: bool,
pub lits: Arc<[NotationLit<'a>]>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Fix {
Infixl,
Infixr,
Prefix,
}
/// Inside of a general notation declaration, you can either have variables (`Var`) or notation symbols.
/// For example, in the notation for `peano.mm0/sb` which represents variable substitution,
///```text
/// notation sb (a: nat) {x: nat} (p: wff x): wff =
/// ($[$:41) a ($/$:0) x ($]$:0) p;
///```
/// You have three variables [ a, x, p ]
///
/// and three `Const` items, [ $[$, $/$, $]$ ]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum NotationLit<'a> {
Var { pos: usize, prec: Prec },
Const(Str<'a>),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Coe {
// term_id
Single {
term_num: TermNum,
},
// transitive
// `Trans(c1, m, c2)` asserts that `c1: s1 -> m` and `c2: m -> s2` (so we get a transitive
// coercion from `s1` to `s2`).
Trans {
// (sA -> sB)
c1: Arc<Coe>,
// sB
middleman_sort: SortNum,
// (sB -> sC)
c2: Arc<Coe>
},
}
pub struct MmzMem<'a> {
pub outline: &'a Outline<'a>,
pub mmz: &'a [u8],
pub mmz_files: &'a [String],
pub mmz_file_num: usize,
pub mmz_pos: usize,
pub delims: Vec<(u8, DelimKind)>,
/// Maps the string representing a user-declared notation to its associated precedence value.
/// `/\` |-> `Prec::Num(34)`
pub consts: HashMap<Str<'a>, Prec>,
/// A map containing prefix notations
pub prefixes: HashMap<Str<'a>, NotationInfo<'a>>,
pub infixes: HashMap<Str<'a>, NotationInfo<'a>>,
// unneeded
//pub declared_notations: FxIndexMap<Str<'a>, DeclaredNotation<'a>>,
pub nonlocal_termdefs: HashMap<TermIdent<'a>, TermNum>,
pub nonlocal_asserts: HashMap<AssertIdent<'a>, AssertNum>,
sorts: Vec<SortIdent<'a>>,
pub provs: HashMap<SortNum, bool>,
/// `sort_id |-> [sort_id |-> coe]`
///
/// A map of sort pairs `s1/from, s2/to` to the coercion `c: s1 -> s2`.
pub coes: HashMap<SortNum, HashMap<SortNum, Coe>>,
/// A map of sorts `s` to some sort `t` such that `t` is provable and `c: s -> t` is in `coes`,
/// if one exists.
pub coe_prov: HashMap<SortNum, SortNum>,
num_sorts: u8,
num_termdefs: u32,
num_asserts: u32,
}
impl<'a> MmzMem<'a> {
//fn new_for_test() -> Res<Self> {
//}
/// The main constructor for `State`.
pub fn new_from(outline: &'a Outline<'a>) -> Res<Self> {
Ok(MmzMem {
outline,
mmz: none_err!(outline.file_data.mmz_files.first())?.as_bytes(),
mmz_files: outline.file_data.mmz_files.as_slice(),
mmz_pos: 0usize,
mmz_file_num: 0usize,
delims: Vec::new(),
consts: HashMap::new(),
prefixes: HashMap::new(),
infixes: HashMap::new(),
//declared_notations: FxIndexMap::with_hasher(Default::default()),
nonlocal_termdefs: HashMap::new(),
nonlocal_asserts: HashMap::new(),
sorts: Vec::new(),
provs: HashMap::new(),
coes: HashMap::new(),
coe_prov: HashMap::new(),
num_sorts: 0,
num_termdefs: 0,
num_asserts: 0,
})
}
pub fn verify1(&mut self, bump: &mut Bump, stmt: StmtCmd) -> Res<()> {
if !stmt.is_local() {
let item = match stmt {
StmtCmd::Sort {..} => None,
StmtCmd::TermDef {..} => {
let term = self.outline.get_term_by_num(self.num_termdefs_done())?;
Some(L(term))
}
StmtCmd::Axiom {..} | StmtCmd::Thm {..} => {
let assert = self.outline.get_assert_by_num(self.num_asserts_done())?;
Some(R(assert))
}
};
self.parse_until(bump, stmt, item)?;
}
Ok(self.add_declar(stmt))
}
pub fn next_mmz_file(&mut self) -> Res<()> {
self.mmz_file_num += 1;
self.mmz_pos = 0;
Ok(self.mmz = none_err!(self.mmz_files.get(self.mmz_file_num))?.as_bytes())
}
pub fn coe_prov_or_else(&self, from: SortNum) -> SortNum {
self.coe_prov.get(&from).copied().unwrap_or(from)
}
pub fn add_termdef(&mut self, ident: Str<'a>) -> TermNum {
let idx = self.num_termdefs;
assert!(self.nonlocal_termdefs.insert(ident, idx).is_none());
idx
}
pub fn add_assert(&mut self, ident: Str<'a>) -> AssertNum {
let idx = self.num_asserts;
assert!(self.nonlocal_asserts.insert(ident, idx).is_none());
idx
}
pub fn get_sort_num(&self, ident: Str<'a>) -> Res<u8> {
let idx = none_err!(self.sorts.iter().position(|s| *s == ident))?;
conv_err!(u8::try_from(idx))
}
pub fn add_sort(&mut self, ident: Str<'a>) -> SortNum {
let idx = self.sorts.len();
assert!(!self.sorts.iter().any(|x| *x == ident));
self.sorts.push(ident);
let sort_num = u8::try_from(idx).unwrap();
let mods = self.outline.get_sort_mods(idx).unwrap();
assert!(self.provs.insert(sort_num, mods.is_provable()).is_none());
sort_num
}
pub fn num_sorts_done(&self) -> u8{
self.num_sorts
}
pub fn num_termdefs_done(&self) -> u32 {
self.num_termdefs
}
pub fn num_asserts_done(&self) -> u32 {
self.num_asserts
}
pub fn add_declar(&mut self, stmt_cmd: StmtCmd) {
match stmt_cmd {
StmtCmd::Sort {..} => self.num_sorts += 1,
StmtCmd::TermDef {..} => self.num_termdefs += 1,
StmtCmd::Axiom {..} | StmtCmd::Thm {..} => self.num_asserts += 1,
}
}
pub fn get_term_by_ident(&self, ident: &Str<'a>) -> Res<Term<'a>> {
let term_num = none_err!(self.nonlocal_termdefs.get(ident).copied())?;
self.outline.get_term_by_num(term_num)
}
}
//#[derive(Debug)]
pub struct MmzState<'b, 'a: 'b> {
pub mem: &'b mut MmzMem<'a>,
bump: &'b Bump,
vars_todo: BumpVec<'b, (Str<'a>, bool)>,
vars_done: BumpVec<'b, MmzVar<'b>>,
hyps: BumpVec<'b, MmzHyp<'b>>,
ustack: BumpVec<'b, MmzExpr<'b>>,
uheap: BumpVec<'b, MmzExpr<'b>>,
pub next_bv: u64
}
impl<'b, 'a: 'b> MmzState<'b, 'a> {
pub fn new_from(mem: &'b mut MmzMem<'a>, bump: &'b mut Bump) -> MmzState<'b, 'a> {
bump.reset();
MmzState {
mem,
bump,
vars_todo: BumpVec::new_in(bump),
vars_done: BumpVec::new_in(bump),
hyps: BumpVec::new_in(bump),
ustack: BumpVec::new_in(bump),
uheap: BumpVec::new_in(bump),
next_bv: 1u64
}
}
pub fn alloc<A>(&self, item: A) -> &'b A {
&*self.bump.alloc(item)
}
pub fn take_next_bv(&mut self) -> u64 {
let outgoing = self.next_bv;
// Assert we're under the limit of 55 bound variables.
assert!(outgoing >> 56 == 0);
self.next_bv *= 2;
outgoing
}
/// Produce the list of variables that have been parsed which are not dummy variables
/// (But may be either bound or regular)
pub fn non_dummy_vars(&self) -> impl Iterator<Item = MmzVar> {
self.vars_done.iter().filter(|v| !v.is_dummy).copied()
}
/// Produce the list of potential dependencies (bound variables that are not dummies)
pub fn potential_deps(&self) -> impl Iterator<Item = MmzVar> {
self.non_dummy_vars().filter(|v| v.is_bound())
}
/// Produce an iterator with the same elements as what an Mmb `Args` should have ()
pub fn args_iter(&self) -> impl Iterator<Item = MmzVar> {
self.vars_done.iter().copied().filter(|v| !v.is_dummy)
}
pub fn is_empty(&self) -> bool {
self.mem.mmz_pos == self.mem.mmz.len()
}
pub fn cur(&self) -> Option<u8> {
self.mem.mmz.get(self.mem.mmz_pos).copied()
}
pub fn cur_slice(&self) -> &'a [u8] {
self.mem.mmz.get(self.mem.mmz_pos..).unwrap_or_else(|| &[])
}
pub fn advance(&mut self, n: usize) {
self.mem.mmz_pos += n;
}
}
use bumpalo::collections::Vec as BumpVec;
use crate::mmz::parse::{ wc, trim };
use crate::none_err;
use crate::localize;
use crate::make_sure;
use crate::mmz::{
MmzState,
NotationLit,
NotationInfo,
MathStr,
MmzExpr,
Prec,
DelimKind,
};
use crate::util::{
Res,
VerifErr,
Str,
Term,
};
const APP_PREC: u32 = 1024;
impl<'b, 'a: 'b> MmzState<'b, 'a> {
/// move past non-comment whitespace
fn skip_math_ws(&mut self) {
while self.cur().map(|c| wc(c)).unwrap_or(false) {
self.advance(1);
}
}
/// Parse a sequence of math tokens delimited by `$` characters.
fn math_string(&mut self) -> Res<MathStr<'b, 'a>> {
localize!(self.guard(b'$'))?;
let mut acc = BumpVec::new_in(self.bump);
while let Some(tok) = self.math_tok() {
acc.push(tok);
}
Ok(acc)
}
/// Try to parse a certain character, but when you skip whitespace, don't look for comments
/// since we're working in a math string
fn guard_math(&mut self, c: u8) -> Res<u8> {
self.skip_math_ws();
if self.cur() == Some(c) {
self.advance(1);
Ok(c)
} else {
localize!(Err(VerifErr::Msg(format!("guard_math failed: expected {}", c as char))))
}
}
// Math-strings are tokenized by
// FIRST: splitting on wc
// THEN: splitting AFTER `Left` delimiters,
// THEN: splitting BEFORE `Right` delimiters
// THEN: splitting BEFORE AND AFTER `Both` delimiters
//
// This is slightly more involved that it would seem.
pub fn math_tok(&mut self) -> Option<Str<'a>> {
self.skip_math_ws();
if self.cur() == Some(b'$') {
self.advance(1);
return None
}
let start = self.mem.mmz_pos;
while let Some(c) = self.cur() {
match self.mem.delims.iter().find(|(d, _)| *d == c).map(|(_, kind)| kind) {
// If this is the first character, we want to return it even though
// we're breaking before and after `Both` delims
Some(DelimKind::Both) if start == self.mem.mmz_pos => {
self.advance(1);
break
},
// If this is NOT the first characrter, don't return it; it has to be
// a standalone token.
Some(DelimKind::Both) => break,
// If this is the first character, keep going
Some(DelimKind::Right) if start == self.mem.mmz_pos => self.advance(1),
Some(DelimKind::Right) => break,
Some(DelimKind::Left) => {
self.advance(1);
break
}
None if c == b'$' => break,
None if wc(c) => break,
None => self.advance(1)
}
}
let out = &self.mem.mmz[start..self.mem.mmz_pos];
if out.is_empty() {
None
} else if let [lhs @ .., b'$'] = out {
Some(Str(trim(lhs)))
} else {
Some(Str(trim(out)))
}
}
fn peek_math_tok(&mut self) -> Option<Str<'a>> {
let rollback = self.mem.mmz_pos;
let next = self.math_tok();
self.mem.mmz_pos = rollback;
next
}
pub fn skip_formula(&mut self) -> Res<()> {
localize!(self.guard(b'$'))?;
while let Some(c) = self.cur() {
self.advance(1);
if c == b'$' {
return Ok(())
}
}
Err(VerifErr::Msg(format!("unclosed formula in `skip_formula`")))
}
/// Parse a math-string that you can assert is only comprised
/// of a single token.
pub fn constant(&mut self) -> Res<Str<'a>> {
let s = self.math_string()?;
if let [tk] = s.as_slice() {
Ok(*tk)
} else {
Err(VerifErr::Msg(format!("constant can only be used for math strings comprised of 1 token")))
}
}
/// For initial calls to `expr`; expects the parser to have something
/// surrounded by `$` delimiters.
pub fn expr_(&mut self) -> Res<MmzExpr<'b>> {
localize!(self.guard(b'$'))?;
let out_e = self.expr(Prec::Num(0))?;
self.skip_math_ws();
if let Some(b'$') = self.cur() {
self.advance(1)
}
Ok(out_e)
}
/// For recursive calls to `expr`; doesn't demand a leading `$` token
fn expr(&mut self, p: Prec) -> Res<MmzExpr<'b>> {
let lhs_e = self.prefix(p)?;
self.lhs(p, lhs_e)
}
fn term_app(&mut self, this_prec: Prec, tok: &Str<'a>) -> Res<MmzExpr<'b>> {
let term = self.mem.get_term_by_ident(&tok)?;
if term.num_args_no_ret() == 0 {
Ok(MmzExpr::App {
term_num: term.term_num,
num_args: 0,
args: self.alloc(BumpVec::new_in(self.bump)),
sort: term.sort()
})
} else {
make_sure!(this_prec <= Prec::Num(APP_PREC));
let mut sig_args = BumpVec::new_in(self.bump);
let mut restore = self.mem.mmz_pos;
for arg in term.args() {
if let Ok(e_) = self.expr(Prec::Max) {
sig_args.push(self.coerce(e_, arg.sort())?);
restore = self.mem.mmz_pos;
} else {
self.mem.mmz_pos = restore;
break
}
}
Ok(MmzExpr::App {
term_num: term.term_num,
num_args: term.num_args_no_ret(),
args: self.alloc(sig_args).as_slice(),
sort: term.sort()
})
}
}
fn prefix(&mut self, this_prec: Prec) -> Res<MmzExpr<'b>> {
self.skip_math_ws();
if let Some(b'(') = self.cur() {
self.advance(1);
let e = self.expr(Prec::Num(0))?;
self.guard_math(b')')?;
return Ok(e)
}
let tok = none_err!(self.math_tok())?;
if let Some(q) = self.mem.consts.get(&tok).copied() {
self.prefix_const(this_prec, &tok, q)
} else if let Some(mmz_var) = self.vars_done.iter().find(|v| v.ident == Some(tok)) {
Ok(MmzExpr::Var(*mmz_var))
} else {
self.term_app(this_prec, &tok)
}
}
fn prefix_const(&mut self, last_prec: Prec, tok: &Str<'a>, next_prec: Prec) -> Res<MmzExpr<'b>> {
if next_prec >= last_prec {
if let Some(pfx_info) = self.mem.prefixes.get(tok).cloned() {
let term_num = pfx_info.term_num;
let term = self.mem.outline.get_term_by_num(term_num)?;
let args = self.literals(pfx_info.lits.as_ref(), term)?;
return Ok(MmzExpr::App {
term_num,
num_args: term.num_args_no_ret(),
args: self.alloc(args),
sort: term.sort(),
})
}
}
Err(VerifErr::Msg(format!("bad prefix const")))
}
fn literals(
&mut self,
lits: &[NotationLit<'a>],
term: Term<'a>,
) -> Res<BumpVec<'b, MmzExpr<'b>>> {
let mut math_args = BumpVec::new_in(self.bump);
for lit in lits {
if let NotationLit::Var {..} = lit {
math_args.push(None)
}
}
for lit in lits {
match lit {
NotationLit::Const(fml) => {
let tk = none_err!(self.math_tok())?;
make_sure!(tk == *fml);
}
NotationLit::Var { pos, prec } => {
let e = self.expr(*prec)?;
let nth_arg = none_err!(term.args().nth(*pos))?;
let tgt_sort = nth_arg.sort();
let coerced = self.coerce(e, tgt_sort)?;
match math_args.get_mut(*pos) {
Some(x @ None) => *x = Some(coerced),
_ => return Err(VerifErr::Msg(format!("misplaced variable in math_parser::literals")))
}
}
}
}
let mut out = BumpVec::new_in(self.bump);
for arg in math_args {
match arg {
Some(x) => out.push(x),
None => return Err(VerifErr::Msg(format!("incomplete sequence of args in math_parser::literals")))
}
}
Ok(out)
}
/// If the next peeked token is an *infix* operator with a precedence that is greater than or
/// equal to the last precedence, return (notation token, Prec, notation info)
fn peek_stronger_infix(&mut self, last_prec: Prec) -> Option<(Str<'a>, Prec, NotationInfo<'a>)> {
let peeked = self.peek_math_tok()?;
let next_prec = self.mem.consts.get(&peeked).copied()?;
if (next_prec >= last_prec) {
if let Some(notation_info) = self.mem.infixes.get(&peeked).cloned() {
Some((peeked, next_prec, notation_info))
} else {
None
}
} else {
None
}
}
/// If the next token is an infix operator with precedence >= the last
/// precedence, return the term it represents, and its precedence, bumping the precedence
/// by 1 if it's left-associative so that the `>=` test will fail if the next operator
/// is the same.
///
/// DOES advance the parser.
fn take_ge_infix(&mut self, last_prec: Prec) -> Option<Res<(Term<'a>, Prec)>> {
let (_, next_prec, notation_info) = self.peek_stronger_infix(last_prec)?;
let infix_term = self.mem.outline.get_term_by_num(notation_info.term_num);
self.math_tok().unwrap();
Some(
if notation_info.rassoc {
infix_term.map(|t| (t, next_prec))
} else if let Prec::Num(n) = next_prec {
infix_term.map(|t| (t, Prec::Num(n + 1)))
} else {
Err(VerifErr::Msg(format!("bad lhs")))
}
)
}
/// lhs and lhs2 are mutually recursive functions that form the part of the pratt parser
/// that deals with the presence of infix tokens.
fn lhs(&mut self, p: Prec, lhs_e: MmzExpr<'b>) -> Res<MmzExpr<'b>> {
if let Some((ge_infix_term, ge_infix_prec)) = self.take_ge_infix(p).transpose()? {
let rhs_e = self.prefix(ge_infix_prec)?;
let new_lhs_e = self.lhs2(lhs_e, (ge_infix_term, ge_infix_prec), rhs_e)?;
self.lhs(p, new_lhs_e)
} else {
Ok(lhs_e)
}
}
fn lhs2(
&mut self,
lhs_e: MmzExpr<'b>,
(infix_term, infix_prec): (Term<'a>, Prec),
rhs_e: MmzExpr<'b>,
) -> Res<MmzExpr<'b>> {
if let Some((_, next_prec, _)) = self.peek_stronger_infix(infix_prec) {
let new_rhs_e = self.lhs(next_prec, rhs_e)?;
self.lhs2(lhs_e, (infix_term, infix_prec), new_rhs_e)
} else {
let mut args = infix_term.args();
if let (Some(fst), Some(snd), Some(_), None) = (args.next(), args.next(), args.next(), args.next()) {
let end_args = bumpalo::vec![
in self.bump;
self.coerce(lhs_e, fst.sort())?,
self.coerce(rhs_e, snd.sort())?
];
Ok(MmzExpr::App {
term_num: infix_term.term_num,
num_args: infix_term.num_args_no_ret(),
args: self.alloc(end_args),
sort: infix_term.sort()
})
} else {
panic!()
}
}
}
}
use bumpalo::collections::Vec as BumpVec;
use crate::util::{
VerifErr,
Res,
};
use crate::mmb::{
MmbItem,
MmbState,
MmbExpr
};
use crate::util::try_next_cmd;
use crate::none_err;
use crate::make_sure;
/// `UNIFY_TERM = 0x30`: See [`UnifyCmd`](super::UnifyCmd).
pub const UNIFY_TERM: u8 = 0x30;
/// `UNIFY_TERM_SAVE = 0x31`: See [`UnifyCmd`](super::UnifyCmd).
pub const UNIFY_TERM_SAVE: u8 = 0x31;
/// `UNIFY_REF = 0x32`: See [`UnifyCmd`](super::UnifyCmd).
pub const UNIFY_REF: u8 = 0x32;
/// `UNIFY_DUMMY = 0x33`: See [`UnifyCmd`](super::UnifyCmd).
pub const UNIFY_DUMMY: u8 = 0x33;
/// `UNIFY_HYP = 0x36`: See [`UnifyCmd`](super::UnifyCmd).
pub const UNIFY_HYP: u8 = 0x36;
/// Unify commands appear in the header data for a `def` or `axiom`/`theorem`.
/// They are executed by the [`ProofCmd::Thm`] command in order to perform
/// substitutions. The state of the unify stack machine is:
///
/// * `MS: Stack<StackEl>`: The main stack, called `S` in the [`ProofCmd`]
/// documentation.
/// Since a unification is called as a subroutine during a proof command,
/// the main stack is available, but most operations don't use it.
/// * `S: Stack<Expr>`: The unify stack, which contains expressions
/// from the target context that are being destructured.
/// * `H: Vec<Expr>: The unify heap, also known as the substitution. This is
/// initialized with the expressions that the target context would like to
/// substitute for the variable names in the theorem being applied, but
/// it can be extended in order to support substitutions with sharing
/// as well as dummy variables.
#[derive(Debug, Clone, Copy)]
pub enum UnifyCmd {
/// ```text
/// UTerm t: S, (t e1 ... en) --> S, en, ..., e1
/// USave: H; S, e --> H, e; S, e
/// UTermSave t = USave; UTerm t:
/// H; S, (t e1 ... en) --> H, (t e1 ... en); S, en, ..., e1
/// ```
/// Pop an element from the stack, ensure that the head is `t`, then
/// push the `n` arguments to the term (in reverse order, so that they
/// are dealt with in the correct order in the command stream).
/// `UTermSave` does the same thing but saves the term to the unify heap
/// before the destructuring.
Term {
/** The term that should be at the head */
term_num: u32,
/** True if we want to recall this substitution for later */
save: bool,
},
/// ```text
/// URef i: H; S, Hi --> H; S
/// ```
/// Get the `i`-th element from the unify heap (the substitution),
/// and match it against the head of the unify stack.
Ref(u32),
/// ```text
/// UDummy s: H; S, x --> H, x; S (where x:s)
/// ```
/// Pop a variable from the unify stack (ensure that it is a name of
/// the appropriate sort) and push it to the heap (ensure that it is
/// distinct from everything else in the substitution).
Dummy { sort_id: u8 },
/// ```text
/// UHyp (UThm mode): MS, |- e; S --> MS; S, e
/// UHyp (UThmEnd mode): HS, e; S --> HS; S, e
/// ```
/// `UHyp` is a command that is used in theorem declarations to indicate that
/// we are about to read a hypothesis. There are two contexts where we read
/// this, when we are first declaring the theorem and check the statement (`UThmEnd` mode),
/// and later when we are applying the theorem and have to apply a substitution (`UThm` mode).
/// When we are applying the theorem, we have `|- e` on the main stack, and we
/// pop that and load the expression onto the unify stack.
/// When we are checking a theorem, we have been pushing hypotheses onto the
/// hypothesis stack, so we pop it from there instead.
Hyp,
}
impl std::convert::TryFrom<(u8, u32)> for UnifyCmd {
type Error = VerifErr;
fn try_from((cmd, data): (u8, u32)) -> Result<Self, VerifErr> {
Ok(match cmd {
UNIFY_TERM => UnifyCmd::Term {
term_num: data,
save: false,
},
UNIFY_TERM_SAVE => UnifyCmd::Term {
term_num: data,
save: true,
},
UNIFY_REF => UnifyCmd::Ref(data),
UNIFY_DUMMY => {
let sort_id = u8::try_from(data).map_err(|_| {
VerifErr::Msg("Failure to shrink u32 to u8 in proof_cmd try from".to_string())
})?;
UnifyCmd::Dummy { sort_id }
}
UNIFY_HYP => UnifyCmd::Hyp,
_ => return Err(VerifErr::Msg("Unrecognized try_form in try from unifycmd".to_string())),
})
}
}
/// An iterator over a unify command stream.
#[derive(Debug, Clone, Copy)]
pub struct UnifyIter<'a> {
/// The full source file.
pub buf: &'a [u8],
/// The index of the current declaration in the file.
pub pos: usize,
}
impl<'a> Iterator for UnifyIter<'a> {
type Item = Result<UnifyCmd, VerifErr>;
fn next(&mut self) -> Option<Self::Item> {
match try_next_cmd(self.buf, self.pos) {
// err
Err(e) => Some(Err(e)),
// Exhausted
Ok(None) => None,
// next
Ok(Some((stmt, rest))) => {
self.pos = rest;
Some(Ok(stmt))
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum UMode {
UThm,
UDef,
UThmEnd
}
impl<'b, 'a: 'b> MmbState<'b, 'a> {
pub fn run_unify(
&mut self,
mode: UMode,
unify: UnifyIter,
tgt: &'b MmbItem<'b>,
) -> Res<()> {
self.ustack.push(tgt);
for maybe_cmd in unify {
match maybe_cmd? {
UnifyCmd::Ref(i) => self.unify_ref(i)?,
UnifyCmd::Term { term_num, save } => self.unify_term(term_num, save)?,
UnifyCmd::Dummy { sort_id } => self.unify_dummy(mode, sort_id)?,
UnifyCmd::Hyp => self.unify_hyp(mode)?,
}
}
make_sure!(self.ustack.is_empty());
if mode == UMode::UThmEnd {
make_sure!(self.hstack.is_empty());
}
Ok(self.uheap.clear())
}
fn unify_ref(&mut self, i: u32) -> Res<()> {
let heap_elem = none_err!(self.uheap.get(i as usize).copied())?;
let ustack_elem = none_err!(self.ustack.pop())?;
if heap_elem != ustack_elem {
Err(VerifErr::Msg(format!("Bad unify ref")))
} else {
Ok(())
}
}
pub fn push_ustack_rev_mmb(&mut self, items: &'b BumpVec<'b, &'b MmbItem<'b>>) {
for elem in items.iter().rev() {
self.ustack.push(elem)
}
}
fn unify_term(
&mut self,
term_num: u32,
save: bool
) -> Res<()> {
let p = none_err!(self.ustack.pop())?;
if let MmbItem::Expr(MmbExpr::App { term_num:id2, args, .. }) = p {
make_sure!(term_num == *id2);
for arg in args.iter().rev() {
self.ustack.push(arg)
}
if save {
self.uheap.push(p)
}
Ok(())
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn unify_dummy(
&mut self,
mode: UMode,
sort_id: u8,
) -> Res<()> {
make_sure!(mode == UMode::UDef);
let p = self.ustack.pop().unwrap();
if let MmbItem::Expr(MmbExpr::Var { ty, .. }) = p {
make_sure!(sort_id == ty.sort());
// assert that ty is bound, and get its bv idx (0-55);
let bound_idx = ty.bound_digit()?;
// ty has no dependencies
for heap_elem in self.uheap.iter() {
let ty = heap_elem.get_ty().unwrap();
make_sure!(ty.inner & bound_idx == 0);
}
Ok(self.uheap.push(p))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn unify_hyp(&mut self, mode: UMode) -> Res<()> {
if let UMode::UThm = mode {
let proof = none_err!(self.stack.pop())?;
if let MmbItem::Proof(e) = proof {
Ok(self.ustack.push(e))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
} else if let UMode::UThmEnd = mode {
make_sure!(self.ustack.is_empty());
let elem = self.hstack.pop().unwrap();
Ok(self.ustack.push(elem))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
}
/// `STMT_AXIOM = 0x02`, starts an `axiom` declaration
pub const STMT_AXIOM: u8 = 0x02;
/// `STMT_SORT = 0x04`, starts a `sort` declaration
pub const STMT_SORT: u8 = 0x04;
// `STMT_TERM = 0x05`, starts a `term` declaration
//pub const STMT_TERM: u8 = 0x05;
/// `STMT_DEF = 0x05`, starts a `def` declaration. (This is the same as
/// `STMT_TERM` because the actual indication of whether this is a
/// def is in the term header)
pub const STMT_DEF: u8 = 0x05;
/// `STMT_THM = 0x06`, starts a `theorem` declaration
pub const STMT_THM: u8 = 0x06;
/// `STMT_LOCAL = 0x08`, starts a `local` declaration
/// (a bit mask to be combined with `STMT_THM` or `STMT_DEF`)
pub const STMT_LOCAL: u8 = 0x08;
/// `STMT_LOCAL_DEF = 0x0D`
pub const STMT_LOCAL_DEF: u8 = STMT_LOCAL | STMT_DEF;
/// `STMT_LOCAL_THM = 0x0E`
pub const STMT_LOCAL_THM: u8 = STMT_LOCAL | STMT_THM;
/// The main part of the proof consists of a sequence of declarations,
/// and these commands denote the different kind of declaration that can
/// be introduced.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StmtCmd {
/// A new sort. Equivalent to `sort foo;`. This is followed by no data,
/// as the sort data is stored in the header.
Sort { num: Option<u8> },
/// A new axiom. Equivalent to `axiom foo ...`. This is followed by a proof
/// sequence, that should unify with the unify sequence in the header.
Axiom { num: Option<u32> },
/// A new term or def. Equivalent to `term/def foo ...`.
/// If `local` is true, then this is `local def foo`. This is followed by
/// no data, as the header contains the unify sequence and can be checked on its own.
TermDef { num: Option<u32>, local: bool },
/// A new theorem. Equivalent to `(pub) theorem foo ...`, where `local` means
/// that the theorem is not `pub`. This is followed by a proof sequence,
/// that will construct the statement and proof, and should unify
/// with the unify sequence in the header.
Thm { num: Option<u32>, local: bool },
}
impl StmtCmd {
pub fn is_local(self) -> bool {
match self {
| StmtCmd::TermDef { local: true, .. }
| StmtCmd::Thm { local: true, .. } => true,
_ => false
}
}
}
impl std::convert::TryFrom<u8> for StmtCmd {
type Error = ();
fn try_from(cmd: u8) -> Result<Self, ()> {
Ok(match cmd {
STMT_SORT => StmtCmd::Sort { num: None },
STMT_AXIOM => StmtCmd::Axiom { num: None },
STMT_DEF => StmtCmd::TermDef { num: None, local: false },
STMT_LOCAL_DEF => StmtCmd::TermDef { num: None, local: true },
STMT_THM => StmtCmd::Thm { num: None, local: false },
STMT_LOCAL_THM => StmtCmd::Thm { num: None, local: true },
_ => return Err(()),
})
}
}
use bumpalo::collections::Vec as BumpVec;
use crate::mmb::sorts_compatible;
use crate::mmb::unify::UMode;
use crate::util::{
VerifErr,
try_next_cmd,
Res,
Type,
};
use crate::mmb::{
MmbState,
MmbItem,
MmbExpr
};
pub const TYPE_BOUND_MASK: u64 = 1 << 63;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Mode {
Def,
Thm,
}
use crate::none_err;
use crate::localize;
use crate::make_sure;
/// `PROOF_TERM = 0x10`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_TERM: u8 = 0x10;
/// `PROOF_TERM_SAVE = 0x11`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_TERM_SAVE: u8 = 0x11;
/// `PROOF_REF = 0x12`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_REF: u8 = 0x12;
/// `PROOF_DUMMY = 0x13`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_DUMMY: u8 = 0x13;
/// `PROOF_THM = 0x14`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_THM: u8 = 0x14;
/// `PROOF_THM_SAVE = 0x15`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_THM_SAVE: u8 = 0x15;
/// `PROOF_HYP = 0x16`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_HYP: u8 = 0x16;
/// `PROOF_CONV = 0x17`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_CONV: u8 = 0x17;
/// `PROOF_REFL = 0x18`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_REFL: u8 = 0x18;
/// `PROOF_SYMM = 0x19`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_SYMM: u8 = 0x19;
/// `PROOF_CONG = 0x1A`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_CONG: u8 = 0x1A;
/// `PROOF_UNFOLD = 0x1B`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_UNFOLD: u8 = 0x1B;
/// `PROOF_CONV_CUT = 0x1C`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_CONV_CUT: u8 = 0x1C;
/// `PROOF_CONV_REF = 0x1D`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_CONV_REF: u8 = 0x1D;
/// `PROOF_CONV_SAVE = 0x1E`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_CONV_SAVE: u8 = 0x1E;
/// `PROOF_SAVE = 0x1F`: See [`ProofCmd`](super::ProofCmd).
pub const PROOF_SAVE: u8 = 0x1F;
/// A proof command, which acts on a stack machine with the following components:
///
/// * `H: Vec<StackEl>`: a "heap" consisting of indexable elements that can be copied
/// onto the stack using [`Ref`](ProofCmd::Ref)
/// * `S: Stack<StackEl>`: The main stack, which most operations push and pop from.
/// * `HS: Vec<Expr>`: The hypothesis list, which grows only on [`Hyp`](ProofCmd::Hyp)
/// operations and collects the hypotheses of the theorem.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ProofCmd {
/// ```text
/// Term t: H; S, e1, ..., en --> H; S, (t e1 .. en)
/// Save: H; S, e --> H, e; S, e
/// TermSave t = Term t; Save:
/// H; S, e1, ..., en --> H, (t e1 .. en); S, (t e1 .. en)
/// ```
///
/// Pop `n` elements from the stack and allocate a new term `t` applied to those
/// expressions. When `save` is used, the new term is also saved to the heap
/// (this is used if the term will ever be needed more than once).
Term {
/** The term to construct */
term_num: u32,
/** True if we should save to the heap */
save: bool,
},
/// ```text
/// Ref i: H; S --> H; S, Hi
/// ```
/// Get the `i`-th heap element and push it on the stack.
Ref(u32),
/// ```text
/// Dummy s: H; S --> H, x; S, x alloc(x:s)
/// ```
/// Allocate a new variable `x` of sort `s`, and push it to the stack and the heap.
Dummy { sort_num: u8 },
/// ```text
/// Thm T: H; S, e1, ..., en, e --> H; S', |- e
/// (where Unify(T): S; e1, ... en; e --> S'; H'; .)
/// Save: H; S, |- e --> H, |- e; S, |- e
/// ```
/// Pop `n` elements from the stack and put them on the unify heap, then call the
/// unifier for `T` with `e` as the target. The unifier will pop additional
/// proofs from the stack if the UHyp command is used, and when it is done,
/// the conclusion is pushed as a proven statement.
///
/// When Save is used, the proven statement is also saved to the heap.
Thm {
/** The theorem to apply */
thm_num: u32,
/** True if we should save to the heap */
save: bool,
},
/// ```text
/// Hyp: HS; H; S, e --> HS, e; H, |- e; S
/// ```
/// This command means that we are finished constructing the expression `e`
/// which denotes a statement, and wish to assume it as a hypothesis.
/// Push `e` to the hypothesis stack, and push `|- e` to the heap.
Hyp,
/// ```text
/// Conv: S, e1, |- e2 --> S, |- e1, e1 =?= e2
/// ```
/// Pop `e1` and `|- e2`, and push `|- e1`, guarded by a convertibility obligation
/// `e1 =?= e2`.
Conv,
/// ```text
/// Refl: S, e =?= e --> S
/// ```
/// Pop a convertibility obligation where the two sides are equal.
Refl,
/// ```text
/// Symm: S, e1 =?= e2 --> S, e2 =?= e1
/// ```
/// Swap the direction of a convertibility obligation.
Sym,
/// ```text
/// Cong: S, (t e1 ... en) =?= (t e1' ... en') --> S, en =?= en', ..., e1 =?= e1'
/// ```
/// Pop a convertibility obligation for two term expressions, and
/// push convertibility obligations for all the parts.
/// The parts are pushed in reverse order so that they are dealt with
/// in declaration order in the proof stream.
Cong,
/// ```text
/// Unfold: S, (t e1 ... en) =?= e', (t e1 ... en), e --> S, e =?= e'
/// (where Unify(t): e1, ..., en; e --> H'; .)
/// ```
/// Pop terms `(t e1 ... en)`, `e` from the stack and run the unifier for `t`
/// (which should be a definition) to make sure that `(t e1 ... en)` unfolds to `e`.
/// Then pop `(t e1 ... en) =?= e'` and push `e =?= e'`.
Unfold,
/// ```text
/// ConvCut: S, e1 =?= e2 --> S, e1 = e2, e1 =?= e2
/// ```
/// Pop a convertibility obligation `e1 =?= e2`, and
/// push a convertability assertion `e1 = e2` guarded by `e1 =?= e2`.
ConvCut,
/// ```text
/// ConvRef i: H; S, e1 =?= e2 --> H; S (where Hi is e1 = e2)
/// ```
/// Pop a convertibility obligation `e1 =?= e2`, where `e1 = e2` is
/// `i`-th on the heap.
ConvRef(u32),
/// ```text
/// ConvSave: H; S, e1 = e2 --> H, e1 = e2; S
/// ```
/// Pop a convertibility proof `e1 = e2` and save it to the heap.
ConvSave,
/// ```text
/// Save: H; S, s --> H, s; S, s
/// ```
/// Save the outline of the stack to the heap, without popping it.
Save,
}
impl std::convert::TryFrom<(u8, u32)> for ProofCmd {
type Error = VerifErr;
fn try_from((cmd, data): (u8, u32)) -> Result<Self, Self::Error> {
Ok(match cmd {
PROOF_TERM => ProofCmd::Term {
term_num: data,
save: false,
},
PROOF_TERM_SAVE => ProofCmd::Term {
term_num: data,
save: true,
},
PROOF_REF => ProofCmd::Ref(data),
PROOF_DUMMY => {
let sort_num = u8::try_from(data).map_err(|_| {
VerifErr::Msg("Failure to shrink u32 to u8 in proof_cmd try from".to_string())
})?;
ProofCmd::Dummy { sort_num }
}
PROOF_THM => ProofCmd::Thm {
thm_num: data,
save: false,
},
PROOF_THM_SAVE => ProofCmd::Thm {
thm_num: data,
save: true,
},
PROOF_HYP => ProofCmd::Hyp,
PROOF_CONV => ProofCmd::Conv,
PROOF_REFL => ProofCmd::Refl,
PROOF_SYMM => ProofCmd::Sym,
PROOF_CONG => ProofCmd::Cong,
PROOF_UNFOLD => ProofCmd::Unfold,
PROOF_CONV_CUT => ProofCmd::ConvCut,
PROOF_CONV_REF => ProofCmd::ConvRef(data),
PROOF_CONV_SAVE => ProofCmd::ConvSave,
PROOF_SAVE => ProofCmd::Save,
owise => {
println!("Failed to convert {:?}\n", owise);
return Err(VerifErr::Msg("try_from for ProofCmd failed match".to_string()));
}
})
}
}
/// An iterator over a proof command stream.
#[derive(Debug, Clone, Copy)]
pub struct ProofIter<'a> {
/// The full source file, but trimmed such that the end
/// is the expected end of the proof stream. The final call to `next`
/// will fail if it does not hit the expected end when the
/// proof stream runs out.
pub buf: &'a [u8],
/// The index of the current proof command in the file.
pub pos: usize,
/// Mark `ends_at` instead of giving ProofIter a truncated slice just so
/// the behvaior wrt `try_next_cmd` is identical.
pub ends_at: usize,
}
impl<'a> ProofIter<'a> {
/// True if this iterator is "null", meaning that it has zero commands.
/// This is not the same as being empty, which happens when there is one command
/// which is the terminating `CMD_END` command.
pub fn is_null(&self) -> bool {
//self.buf.len() == self.pos
self.pos == self.ends_at
}
}
impl<'a> Iterator for ProofIter<'a> {
type Item = Result<ProofCmd, VerifErr>;
fn next(&mut self) -> Option<Self::Item> {
match try_next_cmd(self.buf, self.pos) {
// An actual error.
Err(e) => Some(Err(e)),
// `try_next_cmd` got `Ok(None)` by receiving a 0 command at the correct position
Ok(None) if self.ends_at == self.pos + 1 => None,
// `try_next_cmd` got `Ok(None)` by receiving a 0 command at the WRONG position
Ok(None) => Some(Err(VerifErr::Msg(format!("Proof iter err @ {}", self.pos)))),
// `try_next_cmd` parsed a new command.
Ok(Some((stmt, rest))) => {
self.pos = rest;
Some(Ok(stmt))
}
}
}
}
impl<'b, 'a: 'b> MmbState<'b, 'a> {
pub fn run_proof(
&mut self,
mode: Mode,
proof: ProofIter
) -> Res<()> {
for maybe_cmd in proof {
match maybe_cmd? {
ProofCmd::Ref(i) => self.proof_ref(i)?,
ProofCmd::Dummy { sort_num } => self.proof_dummy(sort_num)?,
ProofCmd::Term { term_num, save } => self.proof_term(mode, term_num, save)?,
ProofCmd::Thm { thm_num, save } => self.proof_thm(thm_num, save)?,
ProofCmd::Hyp => self.proof_hyp(mode)?,
ProofCmd::Conv => self.proof_conv()?,
ProofCmd::Refl=> self.proof_refl()?,
ProofCmd::Sym => self.proof_sym()?,
ProofCmd::Cong => self.proof_cong()?,
ProofCmd::Unfold => self.proof_unfold()?,
ProofCmd::ConvCut => self.proof_conv_cut()?,
ProofCmd::ConvRef(i) => self.proof_conv_ref(i)?,
ProofCmd::ConvSave => self.proof_conv_save()?,
ProofCmd::Save => self.proof_save()?,
}
}
Ok(())
}
fn proof_ref(&mut self, i: u32) -> Res<()> {
let heap_elem = *&self.heap[i as usize];
Ok(self.stack.push(heap_elem))
}
fn proof_dummy(&mut self, sort_num: u8) -> Res<()> {
make_sure!(sort_num < self.outline.header.num_sorts);
make_sure!(self.outline.get_sort_mods(sort_num as usize).unwrap().inner & crate::mmb::SORT_STRICT == 0);
// Owise too many bound variables.
make_sure!(self.next_bv >> 56 == 0);
let ty = Type { inner: TYPE_BOUND_MASK | ((sort_num as u64) << 56) | self.take_next_bv() };
let e = self.alloc(MmbItem::Expr(self.alloc(MmbExpr::Var { idx: self.heap.len(), ty })));
self.stack.push(e);
Ok(self.heap.push(e))
}
fn proof_term(
&mut self,
mode: Mode,
term_num: u32,
save: bool
) -> Res<()> {
make_sure!(term_num < self.outline.header.num_terms);
let termref = self.outline.get_term_by_num(term_num)?;
// remove ebar from the stack; either variables or applications.
// We don't actually drain the elements from the stack until the end
// in order to avoid an allocation.
let drain_from = self.stack.len() - (termref.num_args_no_ret() as usize);
let stack_args = &self.stack[drain_from..];
// (sig_args, stack_args)
let all_args = || { termref.args_no_ret().zip(stack_args.iter()) };
// Arguments from the stack (and their positions, starting from 1) that the stack demands be bound.
let stack_bound_by_sig = all_args().filter_map(|(sig, stack)| {
if sig.is_bound() {
Some(stack.get_bound_digit())
} else {
None
}
});
// For all of the args, make sure the stack and sig items have compatible sorts.
for (sig_arg, stack_arg) in all_args() {
make_sure!(sorts_compatible(stack_arg.get_ty()?, sig_arg))
}
// Start building the new return type now that we know we have the right sort.
let mut new_type_accum = Type::new_with_sort(termref.sort());
// For the args not bound by the signature...
for (sig_unbound, stack_arg) in all_args().filter(|(sig, _)| !sig.is_bound()) {
let mut stack_lowbits = stack_arg.get_deps().or(stack_arg.get_bound_digit())?;
if mode == Mode::Def {
for (idx, dep) in stack_bound_by_sig.clone().enumerate() {
if sig_unbound.depends_on_((idx + 1) as u64) {
stack_lowbits &= !(dep?);
}
}
}
new_type_accum |= stack_lowbits
}
// For definitions with dependent return types, add the appropriate dependencies
// to the type accumulator.
if mode == Mode::Def && termref.ret().has_deps() {
for (idx, bvar) in stack_bound_by_sig.enumerate() {
if termref.ret().depends_on_((idx + 1) as u64) {
new_type_accum |= bvar?;
}
}
}
// I think this will get around it.
let drain = self.stack.drain((self.stack.len() - (termref.num_args_no_ret() as usize))..);
let mut stack_args_out = BumpVec::new_in(self.bump);
for elem in drain {
stack_args_out.push(elem);
}
let t = self.alloc(MmbItem::Expr(self.alloc(MmbExpr::App {
term_num,
ty: new_type_accum,
args: self.alloc(stack_args_out),
})));
if save {
self.heap.push(t);
}
Ok(self.stack.push(t))
}
fn proof_thm(
&mut self,
thm_num: u32,
save: bool
) -> Res<()> {
make_sure!(thm_num < self.outline.header.num_thms);
let thmref = self.outline.get_assert_by_num(thm_num)?;
let sig_args = thmref.args();
let a = none_err!(self.stack.pop())?;
// Wait to remove these in order to save an allocation.
let drain_from = self.stack.len() - sig_args.len();
let stack_args = &self.stack[drain_from..];
let bound_by_sig = sig_args.zip(stack_args).enumerate().filter(|(_, (sig, _))| sig.is_bound());
self.uheap.extend(stack_args.into_iter());
let mut bound_len = 0usize;
// For each variable bound by the signature...
for (idx, (_, stack_a)) in bound_by_sig.clone() {
bound_len += 1;
for j in 0..idx {
make_sure!(*&self.uheap[j].get_ty().unwrap().disjoint(stack_a.low_bits()))
}
}
// For the args not bound in the signature
for (sig_a, stack_a) in sig_args.zip(stack_args).filter(|(sig, _)| !sig.is_bound()) {
for j in 0..bound_len {
make_sure!(
!(sig_a.disjoint(Type { inner: 1 << j }))
|| bound_by_sig.clone().nth(j).unwrap().1.1.clone().low_bits().disjoint(stack_a.low_bits())
)
}
}
// Now we actually remove the stack_args from the stack
self.stack.truncate(drain_from);
self.run_unify(UMode::UThm, thmref.unify(), a)?;
let proof = self.alloc(MmbItem::Proof(a));
if save {
self.heap.push(proof);
}
Ok(self.stack.push(proof))
}
fn proof_hyp(
&mut self,
mode: Mode,
) -> Res<()> {
make_sure!(mode != Mode::Def);
let e = none_err!(self.stack.pop())?;
//assert that e is in a provable sort since it's a hyp
let e_sort_numx = e.get_ty()?.sort();
let e_sort_mods = self.outline.get_sort_mods(e_sort_numx as usize).unwrap().inner;
make_sure!(e_sort_mods & crate::mmb::SORT_PROVABLE != 0);
self.hstack.push(e);
let proof = self.alloc(MmbItem::Proof(e));
Ok(self.heap.push(proof))
}
fn proof_conv(&mut self) -> Res<()> {
let e2proof = none_err!(self.stack.pop())?;
let e1 = none_err!(self.stack.pop())?;
match e2proof {
MmbItem::Proof(conc) => {
let e1proof = self.alloc(MmbItem::Proof(e1));
self.stack.push(e1proof);
let coconv_e1_e2 = self.alloc(MmbItem::CoConv(e1, conc));
Ok(self.stack.push(coconv_e1_e2))
},
_ => return Err(VerifErr::Unreachable(file!(), line!()))
}
}
fn proof_refl(&mut self) -> Res<()> {
let e = none_err!(self.stack.pop())?;
if let MmbItem::CoConv(cc1, cc2) = e {
Ok(make_sure!((*cc1) as *const _ == (*cc2) as *const _))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn proof_sym(&mut self) -> Res<()> {
let e = none_err!(self.stack.pop())?;
if let MmbItem::CoConv(cc1, cc2) = e {
let swapped = self.alloc(MmbItem::CoConv(cc2, cc1));
Ok(self.stack.push(swapped))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn proof_cong(&mut self) -> Res<()> {
let e = none_err!(self.stack.pop())?;
if let MmbItem::CoConv(cc1, cc2) = e {
match (cc1, cc2) {
(MmbItem::Expr(MmbExpr::App { term_num: n1, args: as1, .. }), MmbItem::Expr(MmbExpr::App { term_num: n2, args: as2, .. })) => {
make_sure!(n1 == n2);
make_sure!(as1.len() == as2.len());
for (lhs, rhs) in as1.iter().zip(as2.iter()).rev() {
let cc = self.alloc(MmbItem::CoConv(lhs, rhs));
self.stack.push(cc);
}
Ok(())
},
_ => return Err(VerifErr::Unreachable(file!(), line!()))
}
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn proof_unfold(&mut self) -> Res<()> {
let e_prime = none_err!(self.stack.pop())?;
let f_ebar = none_err!(self.stack.pop())?;
let (term_num, ebar) = match f_ebar {
MmbItem::Expr(MmbExpr::App{ term_num, args, .. }) => (term_num, args.clone()),
_ => return Err(VerifErr::Unreachable(file!(), line!()))
};
make_sure!(self.uheap.is_empty());
self.uheap.extend(ebar);
self.run_unify(
crate::mmb::unify::UMode::UDef,
self.outline.get_term_by_num(*term_num)?.unify(),
e_prime,
)?;
let cc = none_err!(self.stack.pop())?;
if let MmbItem::CoConv(f_ebar2, e_doubleprime) = cc {
make_sure!(f_ebar == *f_ebar2);
let coconv = self.alloc(MmbItem::CoConv(e_prime, e_doubleprime));
Ok(self.stack.push(coconv))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn proof_conv_cut(&mut self) -> Res<()> {
let p = none_err!(self.stack.pop())?;
if let MmbItem::CoConv(cc1, cc2) = p {
let p1 = self.alloc(MmbItem::Conv(cc1, cc2));
self.stack.push(p1);
Ok(self.stack.push(p))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn proof_conv_ref(&mut self, i: u32) -> Res<()> {
let heap_conv = none_err!(self.heap.get(i as usize).copied())?;
let stack_coconv = none_err!(self.stack.pop())?;
if let (MmbItem::Conv(c1, c2), MmbItem::CoConv(cc1, cc2)) = (heap_conv, stack_coconv) {
make_sure!(c1 == cc1);
Ok(make_sure!(c2 == cc2))
} else {
return Err(VerifErr::Unreachable(file!(), line!()));
}
}
fn proof_conv_save(&mut self) -> Res<()> {
let p = localize!(none_err!(self.stack.pop()))?;
make_sure!(matches!(p, MmbItem::Conv {..}));
Ok(self.heap.push(p))
}
fn proof_save(&mut self) -> Res<()> {
let last = none_err!(self.stack.last().copied())?;
match last {
MmbItem::CoConv {..} => Err(VerifErr::Msg(format!("Can't save co-conv"))),
_ => Ok(self.heap.push(last))
}
}
}
use std::convert::TryFrom;
use bumpalo::Bump;
use bumpalo::collections::Vec as BumpVec;
use crate::make_sure;
use crate::Outline;
use crate::util::{ Res, VerifErr };
use crate::mmb::proof::{ ProofIter };
use crate::util::{
Type,
Term,
Assert,
Args,
parse_u8,
parse_u16,
parse_u32,
parse_u64,
};
use crate::mmb::stmt::StmtCmd;
use crate::conv_err;
use crate::none_err;
pub mod proof;
pub mod unify;
pub mod index;
pub mod stmt;
const MM0B_MAGIC: u32 = 0x42304D4D;
// Each sort has one byte associated to it, which
// contains flags for the sort modifiers.
// The high four bits are unused.
pub const SORT_PURE : u8 = 1;
pub const SORT_STRICT : u8 = 2;
pub const SORT_PROVABLE : u8 = 4;
pub const SORT_FREE : u8 = 8;
/// bound mask: 10000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000
pub const TYPE_BOUND_MASK: u64 = 1 << 63;
/// deps mask: 00000000_11111111_11111111_11111111_11111111_11111111_11111111_11111111
pub const TYPE_DEPS_MASK: u64 = (1 << 56) - 1;
// Returns true if a value with type 'from' can be cast to a value of type 'to'.
// This requires that the sorts be the same, and additionally if 'to' is a
// name then so is 'from'.
pub fn sorts_compatible(from: Type, to: Type) -> bool {
let (from, to) = (from.inner, to.inner);
let diff = from ^ to;
let c1 = || (diff & !TYPE_DEPS_MASK) == 0;
let c2 = || (diff & !TYPE_BOUND_MASK & !TYPE_DEPS_MASK) == 0;
let c3 = || ((from & TYPE_BOUND_MASK) != 0);
c1() || (c2() && c3())
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MmbExpr<'b> {
Var {
idx: usize,
ty: Type
},
App {
term_num: u32,
args: &'b [&'b MmbItem<'b>],
ty: Type,
},
}
// Stack item
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MmbItem<'b> {
Expr(&'b MmbExpr<'b>),
Proof(&'b MmbItem<'b>),
Conv(&'b MmbItem<'b>, &'b MmbItem<'b>),
CoConv(&'b MmbItem<'b>, &'b MmbItem<'b>)
}
impl<'b> MmbItem<'b> {
pub fn get_ty(&self) -> Res<Type> {
match self {
| MmbItem::Expr(MmbExpr::Var { ty, .. })
| MmbItem::Expr(MmbExpr::App { ty, ..}) => Ok(*ty),
_ => Err(VerifErr::Msg(format!("Can't get type from a non-expr MmbItem")))
}
}
pub fn get_deps(&self) -> Res<Type> {
self.get_ty()
.and_then(|ty| ty.deps())
.map(|deps| Type { inner: deps })
}
pub fn get_bound_digit(&self) -> Res<Type> {
self.get_ty()
.and_then(|ty| ty.bound_digit())
.map(|bound_idx| Type { inner: bound_idx })
}
pub fn low_bits(&self) -> Type {
self.get_deps().or(self.get_bound_digit()).unwrap()
}
}
#[derive(Debug, Clone, Copy)]
pub struct Header {
/// "= MM0B_VERSION"
pub magic: u32,
pub version: u8,
/// Number of declared sorts
pub num_sorts: u8,
pub reserved: u16,
/// Number of terms and defs
pub num_terms: u32,
/// Number of axioms and theorems
pub num_thms: u32,
/// Pointer to start of term table
pub terms_start: u32,
/// Pointer to start of theorem table
pub thms_start: u32,
/// Pointer to start of proof section
pub proof_stream_start: u32,
pub reserved2: u32,
/// Pointer to start of index, or 0
pub index_start: u64,
// The list of all sorts. The number of sorts is
// limited to 128 because of the data layout.
// So don't monomorphize too much.
pub sort_data_start: u32,
}
impl std::default::Default for Header {
fn default() -> Header {
Header {
magic: 0,
version: 0,
num_sorts: 0,
reserved: 0,
num_terms: 0,
num_thms: 0,
terms_start: 0,
thms_start: 0,
proof_stream_start: 0,
reserved2: 0,
index_start: 0,
sort_data_start: 0
}
}
}
pub fn parse_header(mmb: &[u8]) -> Res<Header> {
let (magic, source) = parse_u32(mmb)?;
assert_eq!(magic, MM0B_MAGIC);
let (version, source) = parse_u8(source)?;
let (num_sorts, source) = parse_u8(source)?;
let (reserved, source) = parse_u16(source)?;
let (num_terms, source) = parse_u32(source)?;
let (num_thms, source) = parse_u32(source)?;
let (terms_start, source) = parse_u32(source)?;
let (thms_start, source) = parse_u32(source)?;
let (proof_stream_start, source) = parse_u32(source)?;
let (reserved2, source) = parse_u32(source)?;
let (index_start, source) = parse_u64(source)?;
let sort_data_start = conv_err!(u32::try_from(mmb.len() - source.len()))?;
Ok(Header {
magic,
version,
num_sorts,
reserved,
num_terms,
num_thms,
terms_start,
thms_start,
proof_stream_start,
reserved2,
index_start,
sort_data_start
})
}
//#[derive(Debug)]
pub struct MmbState<'b, 'a: 'b> {
pub outline: &'a Outline<'a>,
pub bump: &'b Bump,
pub stack: BumpVec<'b, &'b MmbItem<'b>>,
pub heap: BumpVec<'b, &'b MmbItem<'b>>,
pub ustack: BumpVec<'b, &'b MmbItem<'b>>,
pub uheap: BumpVec<'b, &'b MmbItem<'b>>,
pub hstack: BumpVec<'b, &'b MmbItem<'b>>,
pub next_bv: u64
}
impl<'b, 'a: 'b> MmbState<'b, 'a> {
pub fn new_from(outline: &'a Outline, bump: &'b mut Bump) -> MmbState<'b, 'a> {
bump.reset();
MmbState {
outline,
bump: &*bump,
stack: BumpVec::new_in(&*bump),
heap: BumpVec::new_in(&*bump),
ustack: BumpVec::new_in(&*bump),
uheap: BumpVec::new_in(&*bump),
hstack: BumpVec::new_in(&*bump),
next_bv: 1u64
}
}
pub fn verify1(outline: &'a Outline<'a>, bump: &mut Bump, stmt: StmtCmd, proof: ProofIter<'a>) -> Res<()> {
match stmt {
StmtCmd::Sort {..} => {
if !proof.is_null() {
return Err(VerifErr::Msg(format!("mmb sorts must have null proof iterators")));
}
},
StmtCmd::TermDef { num, .. } => {
let term = outline.get_term_by_num(num.unwrap())?;
if !term.is_def() && !proof.is_null() {
return Err(VerifErr::Msg(format!("mmb terms must have null proof iterators")));
}
MmbState::new_from(outline, bump).verify_termdef(stmt, term, proof)?;
}
StmtCmd::Axiom { num } | StmtCmd::Thm { num, .. } => {
let assert = outline.get_assert_by_num(num.unwrap())?;
MmbState::new_from(outline, bump).verify_assert(stmt, assert, proof)?;
}
}
Ok(outline.add_declar(stmt))
}
pub fn alloc<A>(&self, item: A) -> &'b A {
&*self.bump.alloc(item)
}
}
impl<'b, 'a: 'b> MmbState<'b, 'a> {
pub fn take_next_bv(&mut self) -> u64 {
let outgoing = self.next_bv;
// Assert we're under the limit of 55 bound variables.
assert!(outgoing >> 56 == 0);
self.next_bv *= 2;
outgoing
}
fn load_args(&mut self, args: Args<'a>, stmt: StmtCmd) -> Res<()> {
make_sure!(self.heap.len() == 0);
make_sure!(self.next_bv == 1);
for (idx, arg) in args.enumerate() {
if arg.is_bound() {
// b/c we have a bound var, assert the arg's sort is not strict
make_sure!(self.outline.get_sort_mods(arg.sort() as usize).unwrap().inner & SORT_STRICT == 0);
// increment the bv counter/checker
let this_bv = self.take_next_bv();
// assert that the mmb file has the right/sequential bv idx for this bound var
make_sure!(arg.bound_digit()? == this_bv);
} else {
// assert that this doesn't have any dependencies with a bit pos/idx greater
// than the number of bvs that have been declared/seen.
make_sure!(0 == (arg.deps().unwrap() & !(self.next_bv - 1)));
}
self.heap.push(self.alloc(MmbItem::Expr(self.alloc(MmbExpr::Var { idx, ty: arg }))));
}
// For termdefs, pop the last item (which is the return) off the stack.
if let StmtCmd::TermDef {..} = stmt {
self.heap.pop();
}
Ok(())
}
pub fn verify_termdef(
&mut self,
stmt: StmtCmd,
term: Term<'a>,
proof: ProofIter,
) -> Res<()> {
self.load_args(term.args(), stmt)?;
if term.is_def() {
self.run_proof(crate::mmb::proof::Mode::Def, proof)?;
let final_val = none_err!(self.stack.pop())?;
let ty = final_val.get_ty()?;
make_sure!(self.stack.is_empty());
make_sure!(sorts_compatible(ty, term.ret()));
make_sure!(self.uheap.is_empty());
for arg in self.heap.iter().take(term.num_args_no_ret() as usize) {
self.uheap.push(*arg);
}
self.run_unify(crate::mmb::unify::UMode::UDef, term.unify(), final_val)?;
}
Ok(())
}
pub fn verify_assert(
&mut self,
stmt: StmtCmd,
assert: Assert<'a>,
proof: ProofIter,
) -> Res<()> {
self.load_args(assert.args(), stmt)?;
self.run_proof(crate::mmb::proof::Mode::Thm, proof)?;
let final_val = match none_err!(self.stack.pop())? {
MmbItem::Proof(p) if matches!(stmt, StmtCmd::Thm {..}) => p,
owise if matches!(stmt, StmtCmd::Axiom {..}) => owise,
owise => return Err(VerifErr::Msg(format!("Expected a proof; got {:?}", owise)))
};
make_sure!(self.stack.is_empty());
make_sure!(self.uheap.is_empty());
for arg in self.heap.iter().take(assert.args().len()) {
self.uheap.push(*arg);
}
self.run_unify(crate::mmb::unify::UMode::UThmEnd, assert.unify(), final_val)
}
}
use std::fmt::{ Debug, Formatter, Result as FmtResult };
use crate::mmb::Header;
use crate::util::{
parse_u8,
parse_u32,
parse_u64,
Res,
VerifErr
};
use crate::none_err;
/// A parsed `MMB` file index.
pub struct Index<'a> {
/// The full file
pub mmb: &'a [u8],
/// A pointer to the root of the binary search tree, for searching based on name.
pub root: u64,
/// Pointers to the index entries for the sorts
pub sorts: Vec<u64>,
/// Pointers to the index entries for the terms
// The u64 you get back is to an `indexentry`.
pub terms: Vec<u64>,
/// Pointers to the index entries for the theorems
pub thms: Vec<u64>,
}
impl<'a> Debug for Index<'a> {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
let mut d = f.debug_struct("Index");
d.field("root", &self.root);
d.field("sorts", &self.sorts);
d.field("terms", &self.terms);
d.field("thms", &self.thms);
d.finish()
}
}
impl<'a> std::default::Default for Index<'a> {
fn default() -> Index<'a> {
Index {
mmb: b"",
root: 0,
sorts: Vec::new(),
terms: Vec::new(),
thms: Vec::new(),
}
}
}
#[derive(Debug, Clone)]
pub struct IndexEntry<'a> {
// pointer to left subchild (for binary searching by strings)
pub left: u64,
// pointer to right subchild
pub right: u64,
// For locating in the source file
pub row: u32,
// For locating in the source file
pub col: u32,
// pointer to the command that declares this item
pub proof: u64,
// Index of the object in the relevant table
pub ix: u32,
// sort, term, thm, var
pub kind: u8,
// zero-terminated char* buffer
pub charbuff: &'a [u8],
}
use crate::Outline;
impl<'a> Outline<'a> {
#[inline]
pub fn term_index_entry(&self, term_num: u32) -> Option<IndexEntry<'a>> {
let entry = *&self.index.terms[term_num as usize];
self.index_entry(entry as usize)
}
#[inline]
pub fn assert_index_entry(&self, assert_num: u32) -> Option<IndexEntry<'a>> {
let entry = *&self.index.thms[assert_num as usize];
self.index_entry(entry as usize)
}
#[inline]
pub fn index_entry(&self, start_at: usize) -> Option<IndexEntry<'a>> {
let (left, rest) = parse_u64(&self.mmb()[start_at..]).unwrap();
let (right, rest) = parse_u64(rest).unwrap();
let (row, rest) = parse_u32(rest).unwrap();
let (col, rest) = parse_u32(rest).unwrap();
let (proof, rest) = parse_u64(rest).unwrap();
let (ix, rest) = parse_u32(rest).unwrap();
let (kind, rest) = parse_u8(rest).unwrap();
let charbuff = parse_cstr(rest)?;
Some(IndexEntry {
left,
right,
row,
col,
proof,
ix,
kind,
charbuff,
})
}
}
fn parse_cstr<'a>(source: &'a [u8]) -> Option<&'a [u8]> {
let null = source.iter().position(|c| *c == 0)?;
let (s, _) = source.split_at(null);
Some(s)
}
pub fn parse_index<'a>(mmb: &'a [u8], header: Header) -> Res<Index<'a>> {
let (root, rest) = parse_u64(&mmb[header.index_start as usize..])
.expect("Failed to get u64 for index root");
let (sorts, rest) = prefix_u64(rest, header.num_sorts as usize)
.expect("Failed to get sorts in index");
let (terms, rest) = prefix_u64(rest, header.num_terms as usize)
.expect("Failed to get num terms");
let (thms, _) = prefix_u64(rest, header.num_thms as usize)
.expect("Failed to get num thms in index");
Ok(Index {
mmb,
root,
sorts,
terms,
thms,
})
}
#[inline]
pub fn prefix_u64(mut bytes: &[u8], num_elems: usize) -> Res<(Vec<u64>, &[u8])> {
let split_point = none_err!{ std::mem::size_of::<u64>().checked_mul(num_elems) }?;
if split_point <= bytes.len() {
let mut v = Vec::with_capacity(num_elems);
for _ in 0..num_elems {
let (inner, rest_) = parse_u64(bytes)?;
bytes = rest_;
v.push(inner);
}
Ok((v, bytes))
} else {
Err(VerifErr::Msg(format!("Bad prefix arg")))
}
}
#![forbid(unsafe_code)]
#![forbid(unreachable_patterns)]
#![forbid(unused_mut)]
#![forbid(unused_variables)]
#![forbid(unused_must_use)]
#![forbid(unused_imports)]
#![allow(unused_parens)]
// Temporary.
#![allow(dead_code)]
mod util;
mod mmb;
mod mmz;
mod fs;
use std::path::PathBuf;
use std::sync::atomic::{ AtomicUsize, Ordering::Relaxed };
use bumpalo::Bump;
use clap::{ Arg, App };
use crossbeam_utils::thread;
use crate::mmz::MmzMem;
use crate::util::VerifErr;
use crate::fs::FileData;
use crate::util::Outline;
fn main() {
let start = std::time::Instant::now();
let matches = App::new("Verifier")
.version("0.1")
.author("ammkrn@tuta.io")
.about("A metamath zero verifier")
.arg(
Arg::with_name("num_threads")
.short("t")
.long("threads")
.help("specify the number of threads to use")
.takes_value(true)
)
.arg(
Arg::with_name("mmb_file")
.value_name("mmb file")
.required(true)
.takes_value(true)
)
.arg(
Arg::with_name("mmz_file")
.value_name("mmz file")
.required(false)
.takes_value(true)
)
.get_matches();
let num_threads = match matches.value_of("num_threads") {
None => 1,
Some(s) => match s.parse::<usize>() {
Ok(0) => 1,
Ok(n) => n,
Err(_) => panic!("The number of threads must be a natural number. got {}", s)
}
};
// Safe to unwrap since this is required by the clap app.
let mmb_path = matches.value_of("mmb_file").map(|s| PathBuf::from(s)).unwrap();
let mmz_path = matches.value_of("mmz_file").map(|s| PathBuf::from(s));
let file_data = FileData::new_from(mmb_path, mmz_path).unwrap();
let outline = Outline::new_from(&file_data).unwrap();
// Now that all the file IO is done, we can confidently begin verification.
let errs = if num_threads == 1 {
verify_serial(&outline)
} else {
verify_par(&outline, num_threads)
};
if let Some((e, es)) = errs.split_last() {
println!("verification was unsuccessful. Terminated with error {:?}\n + {} other errors", e, es.len());
} else {
println!(
"\n* verified {} sorts, {} terms, and {} assertions in {}ms",
outline.header.num_sorts,
outline.header.num_terms,
outline.header.num_thms,
start.elapsed().as_millis()
);
}
}
fn verify_serial<'a>(outline: &'a Outline<'a>) -> Vec<VerifErr> {
let task_counter = AtomicUsize::new(0);
let (mut errs) = verify_mmz(outline);
let mut mmb_errs = verify_mmb(outline, &task_counter);
errs.append(&mut mmb_errs);
outline.assert_mmb_done(&mut errs);
errs
}
fn verify_par<'a>(outline: &'a Outline<'a>, num_threads: usize) -> Vec<VerifErr> {
let task_counter = AtomicUsize::new(0);
thread::scope(|sco| {
let mut mmb_tasks = Vec::new();
for _ in 0..num_threads {
mmb_tasks.push(sco.spawn(|_| verify_mmb(outline, &task_counter)));
}
let mut errs = match sco.spawn(|_| verify_mmz(outline)).join() {
Err(_) => vec![VerifErr::Msg(format!("mmz thread panicked!"))],
Ok(errs) => errs,
};
for (idx, mmb_task) in mmb_tasks.into_iter().enumerate() {
match mmb_task.join() {
Err(_) => { errs.push(VerifErr::Msg(format!("mmb thread {} panicked", idx))); },
Ok(mut mmb_errs) => errs.append(&mut mmb_errs),
}
}
errs
}).unwrap()
}
// Parsing/verifying the contents of the mmz file is done in serial
fn verify_mmz<'a>(outline: &'a Outline<'a>) -> Vec<VerifErr> {
let mut mem = MmzMem::new_from(outline).unwrap();
let mut bump = Bump::new();
let mut errs = Vec::new();
for (stmt, _proof) in outline.declarations.iter() {
if let Err(e) = mem.verify1(&mut bump, *stmt) {
errs.push(e);
}
}
outline.assert_mmz_done(&mem, &mut errs);
errs
}
fn verify_mmb<'a>(outline: &'a Outline<'a>, task_counter: &AtomicUsize) -> Vec<VerifErr> {
let mut bump = Bump::new();
let mut errs = Vec::new();
while let Some((stmt, proof)) = outline.declarations.get(task_counter.fetch_add(1, Relaxed)) {
if let Err(e) = crate::mmb::MmbState::verify1(outline, &mut bump, *stmt, *proof) {
errs.push(e);
}
}
errs
}
use std::path::PathBuf;
use crate::util::Res;
use crate::util::VerifErr;
use std::fmt::{ Debug, Formatter, Result as FmtResult };
#[derive(Debug, PartialEq, Eq)]
pub enum ImportGraph {
NoImports(PathBuf),
HasImports(PathBuf, Vec<ImportGraph>),
}
pub struct FileData {
pub mmb_file: Vec<u8>,
/// From left to right, represents a backwards depth-first walk
/// of the dependency graph, so if we have:
///```text
/// a
/// / \
/// b c
/// / \ / \
/// d e f g
///```
/// then the vec contains [d, e, b, f, g, c, a]
pub mmz_files: Vec<String>,
// The structure of the import hierarchy. Might be useful to let users
// inspect/view this as part of the output.
pub mmz_hierarchy: ImportGraph,
root_mmz_path: PathBuf,
// For detecting cycles
todos: Vec<PathBuf>,
// For detecting diamonds
done: Vec<PathBuf>,
}
impl std::default::Default for FileData {
fn default() -> Self {
FileData {
mmb_file: Vec::new(),
mmz_files: Vec::new(),
root_mmz_path: PathBuf::from(""),
mmz_hierarchy: ImportGraph::NoImports(PathBuf::from("")),
todos: Vec::new(),
done: Vec::new(),
}
}
}
use std::fs::OpenOptions;
use std::io::Read;
impl FileData {
pub fn w_filename(&self, filename: impl AsRef<std::ffi::OsStr>) -> PathBuf {
let mut new_path = self.root_mmz_path.clone();
new_path.set_file_name(filename);
new_path
}
pub fn new_from(mmb_path: impl Into<PathBuf>, root_mmz_path: Option<impl Into<PathBuf>>) -> Res<Self> {
let mmb_path = mmb_path.into().canonicalize().unwrap();
let root_mmz_path = match root_mmz_path {
None => {
let mut base = mmb_path.clone();
base.set_extension("mm0");
base
},
Some(p) => p.into().canonicalize().unwrap()
};
let mut mmb_handle = OpenOptions::new()
.read(true)
.truncate(false)
.open(&mmb_path)
.map_err(|_| VerifErr::Msg(format!("IO err in add_file mmb")))
.unwrap();
let mut mmb_file = Vec::<u8>::with_capacity(mmb_handle.metadata().unwrap().len() as usize);
mmb_handle.read_to_end(&mut mmb_file).unwrap();
let mut data = FileData::default();
data.mmb_file = mmb_file;
data.root_mmz_path = root_mmz_path.clone();
data.mmz_hierarchy = data.add_mmz_aux(root_mmz_path)?;
Ok(data)
}
/// Form the import graph by mutual recursion with `find_imports`.
/// Open the specified file, look for any import statements therein. If there are any,
/// Do the same thing with those.
fn add_mmz_aux(&mut self, mmz_path: impl Into<PathBuf>) -> Res<ImportGraph> {
let mmz_path = mmz_path.into();
self.todos.push(mmz_path.clone());
let s = std::fs::read_to_string(&mmz_path).unwrap();
let imports = self.find_imports(s.split_whitespace())?;
self.mmz_files.push(s);
let popped = self.todos.pop().unwrap();
debug_assert_eq!(popped, mmz_path);
self.done.push(popped);
if imports.is_empty() {
Ok(ImportGraph::NoImports(mmz_path))
} else {
Ok(ImportGraph::HasImports(mmz_path, imports))
}
}
/// Form the import graph by mutual recursion with `add_mmz_aux`.
fn find_imports(&mut self, mut ws: std::str::SplitWhitespace) -> Res<Vec<ImportGraph>> {
let mut acc = Vec::<ImportGraph>::new();
while let Some(fst) = ws.next() {
if fst == "import" {
match ws.next() {
Some(path) if path.starts_with("\"") => {
let end_pos = path.chars().skip(1).position(|c| c == '"').unwrap();
let pathbuf = PathBuf::from(&path[1..end_pos + 1]);
// This is part of a diamond that's already been added, so just skip this loop iteration.
if self.done.iter().any(|done| done == &self.w_filename(&pathbuf)) {
continue
} else if self.todos.iter().any(|todo| todo == &self.w_filename(&pathbuf)) {
return Err(VerifErr::Msg(format!("Import cycle detected")))
}
let hierarchy = self.add_mmz_aux(self.w_filename(pathbuf))?;
acc.push(hierarchy);
},
_ => continue
}
}
}
Ok(acc)
}
}
impl Debug for FileData {
fn fmt(&self, f: &mut Formatter) -> FmtResult {
let mut d = f.debug_struct("FileData");
d.field("mmz_files", &"<omitted>");
d.field("mmz_hierarchy", &self.mmz_hierarchy);
d.field("todos", &self.todos);
d.field("done", &self.done);
d.field("root_mmz_file", &self.root_mmz_path);
d.finish()
}
}
#[test]
fn import_test1() {
let file_data = FileData::new_from("./test_resources/a.mmb", Some("./test_resources/a.mm0")).unwrap();
let file_data2 = FileData::new_from("./test_resources/a.mmb", None::<String>).unwrap();
assert_eq!(file_data.mmz_hierarchy, file_data2.mmz_hierarchy);
let d = ImportGraph::NoImports(PathBuf::from("./test_resources/d.mm0").canonicalize().unwrap());
let e = ImportGraph::NoImports(PathBuf::from("./test_resources/e.mm0").canonicalize().unwrap());
let f = ImportGraph::NoImports(PathBuf::from("./test_resources/f.mm0").canonicalize().unwrap());
let g = ImportGraph::NoImports(PathBuf::from("./test_resources/g.mm0").canonicalize().unwrap());
let b = ImportGraph::HasImports(PathBuf::from("./test_resources/b.mm0").canonicalize().unwrap(), vec![d, e]);
let c = ImportGraph::HasImports(PathBuf::from("./test_resources/c.mm0").canonicalize().unwrap(), vec![f, g]);
let a = ImportGraph::HasImports(PathBuf::from("./test_resources/a.mm0").canonicalize().unwrap(), vec![b, c]);
assert_eq!(file_data.mmz_hierarchy, a);
}
#[test]
fn import_test_diamond1() {
let file_data= FileData::new_from("./test_resources/diamond/a.mmb", Some("./test_resources/diamond/a.mm0")).unwrap();
let d = ImportGraph::NoImports(PathBuf::from("test_resources/diamond/d.mm0").canonicalize().unwrap());
let e = ImportGraph::NoImports(PathBuf::from("test_resources/diamond/e.mm0").canonicalize().unwrap());
let f = ImportGraph::NoImports(PathBuf::from("test_resources/diamond/f.mm0").canonicalize().unwrap());
let g = ImportGraph::NoImports(PathBuf::from("test_resources/diamond/g.mm0").canonicalize().unwrap());
let b = ImportGraph::HasImports(PathBuf::from("test_resources/diamond/b.mm0").canonicalize().unwrap(), vec![d, e]);
let c = ImportGraph::HasImports(PathBuf::from("test_resources/diamond/c.mm0").canonicalize().unwrap(), vec![f, g]);
let a = ImportGraph::HasImports(PathBuf::from("test_resources/diamond/a.mm0").canonicalize().unwrap(), vec![b, c]);
assert_eq!(file_data.mmz_hierarchy, a);
}
#[test]
#[should_panic]
fn import_test_cycle1() {
let _ = FileData::new_from("./test_resources/cycle/cycle.mmb", Some("./test_resources/cycle/cycleA.mm0")).unwrap();
}