doughlang: "fix" semi elision, add "fully qualified" paths, add proper pattern prec parsing.
This actually gets some old code parsing!
This commit is contained in:
66
samples/usable.do
Normal file
66
samples/usable.do
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
//! Sample doughlang code
|
||||||
|
|
||||||
|
struct Expr {
|
||||||
|
Atom (f64),
|
||||||
|
Op (char, [Expr]),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn execute(expr: Expr) -> f64 {
|
||||||
|
match expr {
|
||||||
|
ExprAtom(value) => value,
|
||||||
|
ExprOp('*', [lhs, rhs]) => execute(lhs) * execute(rhs),
|
||||||
|
ExprOp('/', [lhs, rhs]) => execute(lhs) / execute(rhs),
|
||||||
|
ExprOp('%', [lhs, rhs]) => execute(lhs) % execute(rhs),
|
||||||
|
ExprOp('+', [lhs, rhs]) => execute(lhs) + execute(rhs),
|
||||||
|
ExprOp('-', [lhs, rhs]) => execute(lhs) - execute(rhs),
|
||||||
|
// ExprOp('>', [lhs, rhs]) => (execute(lhs) as u64 >> execute(rhs) as u64) as f64,
|
||||||
|
// ExprOp('<', [lhs, rhs]) => (execute(lhs) as u64 << execute(rhs) as u64) as f64,
|
||||||
|
ExprOp('-', [lhs]) => - execute(lhs),
|
||||||
|
other => {
|
||||||
|
panic("Unknown operation: " + fmt(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Formats an expression to a string
|
||||||
|
fn fmt_expr(expr: Expr) -> str {
|
||||||
|
match expr {
|
||||||
|
ExprAtom(value) => fmt(value),
|
||||||
|
ExprOp(operator, [lhs, rhs]) => fmt('(', fmt_expr(lhs), ' ', operator, ' ', fmt_expr(rhs), ')'),
|
||||||
|
ExprOp(operator, [rhs]) => fmt(operator, fmt_expr(rhs)),
|
||||||
|
_ => println("Unexpected expr: ", expr),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prints an expression
|
||||||
|
fn print_expr(expr: Expr) {
|
||||||
|
println(fmt_expr(expr))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parses expressions
|
||||||
|
fn parse(line: [char], power: i32) -> (Expr, [char]) {
|
||||||
|
fn map((expr, line): (Expr, [char]), f: fn(Expr) -> Expr) -> (Expr, [char]) {
|
||||||
|
(f(expr), line)
|
||||||
|
}
|
||||||
|
|
||||||
|
line = space(line);
|
||||||
|
|
||||||
|
let (lhs, line) = match line {
|
||||||
|
['0'..'9', ..] => number(line),
|
||||||
|
['(', ..rest] => match parse(rest, Power::None) {
|
||||||
|
(expr, [')', ..rest]) => (expr, rest),
|
||||||
|
(expr, rest) => panic(fmt("Expected ')', got ", expr, ", ", rest)),
|
||||||
|
},
|
||||||
|
[op, ..rest] => parse(rest, pre_bp(op)).map(|lhs| Expr::Op(op, [lhs])),
|
||||||
|
_ => panic("Unexpected end of input"),
|
||||||
|
};
|
||||||
|
|
||||||
|
while let [op, ..rest] = space(line) {
|
||||||
|
let (before, after) = inf_bp(op);
|
||||||
|
if before < power {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
(lhs, line) = parse(rest, after).map(|rhs| Expr::Op(op, [lhs, rhs]));
|
||||||
|
};
|
||||||
|
(lhs, line)
|
||||||
|
}
|
||||||
125
src/ast.rs
125
src/ast.rs
@@ -18,9 +18,19 @@ impl<T: Annotation, A: Annotation> std::fmt::Debug for Anno<T, A> {
|
|||||||
pub trait Annotation: Clone + std::fmt::Display + std::fmt::Debug + PartialEq + Eq {}
|
pub trait Annotation: Clone + std::fmt::Display + std::fmt::Debug + PartialEq + Eq {}
|
||||||
impl<T: Clone + std::fmt::Debug + std::fmt::Display + PartialEq + Eq> Annotation for T {}
|
impl<T: Clone + std::fmt::Debug + std::fmt::Display + PartialEq + Eq> Annotation for T {}
|
||||||
|
|
||||||
//
|
/// A qualified identifier
|
||||||
// TODO: Identifier interning
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
//
|
pub struct FqPath {
|
||||||
|
// TODO: Identifier interning
|
||||||
|
pub parts: Vec<String>,
|
||||||
|
// TODO:
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for FqPath {
|
||||||
|
fn from(value: &str) -> Self {
|
||||||
|
Self { parts: vec![value.to_owned()] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A literal value (boolean, character, integer, string)
|
/// A literal value (boolean, character, integer, string)
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
@@ -40,26 +50,37 @@ pub enum Literal {
|
|||||||
pub enum Pat {
|
pub enum Pat {
|
||||||
/// Matches anything without binding
|
/// Matches anything without binding
|
||||||
Ignore,
|
Ignore,
|
||||||
/// Matches nothing; used for macro substitution.
|
/// Matches nothing; used for macro substitution
|
||||||
MetId(String),
|
MetId(String),
|
||||||
/// Matches anything, and binds it to a name
|
/// Matches anything, and binds it to a name
|
||||||
Name(String),
|
Name(String),
|
||||||
|
/// Matches against a named const value
|
||||||
|
Path(FqPath),
|
||||||
/// Matches a Struct Expression `Ident { Pat }`
|
/// Matches a Struct Expression `Ident { Pat }`
|
||||||
Struct(String, Box<Pat>),
|
Struct(FqPath, Box<Pat>),
|
||||||
/// Matches a Tuple Struct Expression `Ident ( Pat )`
|
/// Matches a Tuple Struct Expression `Ident ( Pat )`
|
||||||
TupStruct(String, Box<Pat>),
|
TupStruct(FqPath, Box<Pat>),
|
||||||
/// Matches a partial decomposition (`..rest`) or upper-bounded range (`..100`).
|
|
||||||
Rest(Option<Box<Pat>>),
|
|
||||||
/// Matches a literal value by equality comparison
|
/// Matches a literal value by equality comparison
|
||||||
Lit(Literal),
|
Lit(Literal),
|
||||||
/// Matches the elements of a tuple
|
|
||||||
Tuple(Vec<Pat>),
|
|
||||||
/// Matches the elements
|
|
||||||
Slice(Vec<Pat>),
|
|
||||||
/// Matches one of the provided alternates
|
|
||||||
Alt(Vec<Pat>),
|
|
||||||
/// Matches a typed pattern
|
/// Matches a typed pattern
|
||||||
Typed(Box<Pat>, Ty),
|
Typed(Box<Pat>, Ty),
|
||||||
|
/// Matches a compound pattern
|
||||||
|
Op(PatOp, Vec<Pat>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Operators on lists of patterns
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub enum PatOp {
|
||||||
|
/// Matches a partial decomposition (`..rest`) or upper-bounded range (`..100`)
|
||||||
|
Rest,
|
||||||
|
/// Matches an exclusive bounded range (`0..100`)
|
||||||
|
RangeEx,
|
||||||
|
/// Matches the elements of a tuple
|
||||||
|
Tuple,
|
||||||
|
/// Matches the elements of a slice or array
|
||||||
|
Slice,
|
||||||
|
/// Matches one of a list of alternatives
|
||||||
|
Alt,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// In-universe types
|
/// In-universe types
|
||||||
@@ -68,7 +89,7 @@ pub enum Ty {
|
|||||||
/// `_`
|
/// `_`
|
||||||
Infer,
|
Infer,
|
||||||
/// `(Identifier :: )* Identifier`
|
/// `(Identifier :: )* Identifier`
|
||||||
Named(String),
|
Named(FqPath),
|
||||||
/// `(..Tys)`
|
/// `(..Tys)`
|
||||||
Tuple(Vec<Ty>),
|
Tuple(Vec<Ty>),
|
||||||
/// `[Ty]`
|
/// `[Ty]`
|
||||||
@@ -78,11 +99,6 @@ pub enum Ty {
|
|||||||
/// `[..Args, Rety]`
|
/// `[..Args, Rety]`
|
||||||
Fn(Vec<Ty>),
|
Fn(Vec<Ty>),
|
||||||
}
|
}
|
||||||
impl Default for Ty {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::Tuple(vec![])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A `let` binding
|
/// A `let` binding
|
||||||
/// ```ignore
|
/// ```ignore
|
||||||
@@ -148,7 +164,7 @@ pub struct Struct(pub Pat);
|
|||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum Expr<A: Annotation = Span> {
|
pub enum Expr<A: Annotation = Span> {
|
||||||
/// An identifier
|
/// An identifier
|
||||||
Id(String),
|
Id(FqPath),
|
||||||
/// A meta-identifier
|
/// A meta-identifier
|
||||||
MetId(String),
|
MetId(String),
|
||||||
/// A literal bool, string, char, or int
|
/// A literal bool, string, char, or int
|
||||||
@@ -197,11 +213,7 @@ impl<A: Annotation> Expr<A> {
|
|||||||
pub fn is_place(&self) -> bool {
|
pub fn is_place(&self) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
self,
|
self,
|
||||||
Self::Id(_)
|
Self::Id(_) | Self::Op(Op::Index, _) | Self::Op(Op::Dot, _) | Self::Op(Op::Deref, _)
|
||||||
| Self::Op(Op::Index, _)
|
|
||||||
| Self::Op(Op::Dot, _)
|
|
||||||
| Self::Op(Op::Path, _)
|
|
||||||
| Self::Op(Op::Deref, _)
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -237,8 +249,7 @@ pub enum Op {
|
|||||||
Break, // break Expr
|
Break, // break Expr
|
||||||
Return, // return Expr
|
Return, // return Expr
|
||||||
|
|
||||||
Dot, // Expr . Expr
|
Dot, // Expr . Expr
|
||||||
Path, // Expr :: Expr
|
|
||||||
|
|
||||||
RangeEx, // Expr? ..Expr
|
RangeEx, // Expr? ..Expr
|
||||||
RangeIn, // Expr? ..=Expr
|
RangeIn, // Expr? ..=Expr
|
||||||
@@ -290,6 +301,13 @@ impl Display for Literal {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Display for FqPath {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let Self { parts } = self;
|
||||||
|
f.list(parts, "::")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T: Display + Annotation, A: Annotation> Display for Anno<T, A> {
|
impl<T: Display + Annotation, A: Annotation> Display for Anno<T, A> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{}", self.0)
|
write!(f, "{}", self.0)
|
||||||
@@ -351,7 +369,7 @@ impl Display for Struct {
|
|||||||
let Self(pat) = self;
|
let Self(pat) = self;
|
||||||
match pat {
|
match pat {
|
||||||
Pat::Struct(name, bind) => match bind.as_ref() {
|
Pat::Struct(name, bind) => match bind.as_ref() {
|
||||||
Pat::Tuple(parts) => f
|
Pat::Op(PatOp::Tuple, parts) => f
|
||||||
.delimit_indented(fmt!("struct {name} {{"), "}")
|
.delimit_indented(fmt!("struct {name} {{"), "}")
|
||||||
.list_wrap("\n", parts, ",\n", ",\n"),
|
.list_wrap("\n", parts, ",\n", ",\n"),
|
||||||
other => write!(f, "{name} {{ {other} }}"),
|
other => write!(f, "{name} {{ {other} }}"),
|
||||||
@@ -431,7 +449,6 @@ impl Display for Op {
|
|||||||
Op::Break => "break ".fmt(f),
|
Op::Break => "break ".fmt(f),
|
||||||
Op::Return => "return ".fmt(f),
|
Op::Return => "return ".fmt(f),
|
||||||
Op::Dot => ".".fmt(f),
|
Op::Dot => ".".fmt(f),
|
||||||
Op::Path => "::".fmt(f),
|
|
||||||
Op::RangeEx => "..".fmt(f),
|
Op::RangeEx => "..".fmt(f),
|
||||||
Op::RangeIn => "..=".fmt(f),
|
Op::RangeIn => "..=".fmt(f),
|
||||||
Op::Neg => "-".fmt(f),
|
Op::Neg => "-".fmt(f),
|
||||||
@@ -486,17 +503,23 @@ impl Display for Pat {
|
|||||||
Self::Lit(literal) => literal.fmt(f),
|
Self::Lit(literal) => literal.fmt(f),
|
||||||
Self::MetId(name) => write!(f, "`{name}"),
|
Self::MetId(name) => write!(f, "`{name}"),
|
||||||
Self::Name(name) => name.fmt(f),
|
Self::Name(name) => name.fmt(f),
|
||||||
|
Self::Path(path) => path.fmt(f),
|
||||||
Self::Struct(name, bind) => match bind.as_ref() {
|
Self::Struct(name, bind) => match bind.as_ref() {
|
||||||
Pat::Tuple(parts) => f.delimit(fmt!("{name} {{"), "}").list(parts, ", "),
|
Pat::Op(PatOp::Tuple, parts) => f.delimit(fmt!("{name} {{"), "}").list(parts, ", "),
|
||||||
other => write!(f, "{name} {{ {other} }}"),
|
other => write!(f, "{name} {{ {other} }}"),
|
||||||
},
|
},
|
||||||
Self::TupStruct(name, bind) => write!(f, "{name} {bind}"),
|
Self::TupStruct(name, bind) => write!(f, "{name} {bind}"),
|
||||||
Self::Rest(Some(rest)) => write!(f, "..{rest}"),
|
|
||||||
Self::Rest(None) => write!(f, ".."),
|
|
||||||
Self::Tuple(pats) => f.delimit("(", ")").list(pats, ", "),
|
|
||||||
Self::Slice(pats) => f.delimit("[", "]").list(pats, ", "),
|
|
||||||
Self::Alt(pats) => f.delimit("<", ">").list(pats, " | "),
|
|
||||||
Self::Typed(pat, ty) => write!(f, "{pat}: {ty}"),
|
Self::Typed(pat, ty) => write!(f, "{pat}: {ty}"),
|
||||||
|
Self::Op(PatOp::Rest, pats) => match pats.as_slice() {
|
||||||
|
[] => write!(f, ".."),
|
||||||
|
[rest] => write!(f, "..{rest}"),
|
||||||
|
[from, to] => write!(f, "{from}..{to}"),
|
||||||
|
_ => f.list(pats, "<..>"),
|
||||||
|
},
|
||||||
|
Self::Op(PatOp::RangeEx, pats) => f.delimit("(", ")").list(pats, ".."),
|
||||||
|
Self::Op(PatOp::Tuple, pats) => f.delimit("(", ")").list(pats, ", "),
|
||||||
|
Self::Op(PatOp::Slice, pats) => f.delimit("[", "]").list(pats, ", "),
|
||||||
|
Self::Op(PatOp::Alt, pats) => f.delimit("<", ">").list(pats, " | "),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -524,21 +547,35 @@ impl<A: Annotation> TryFrom<Expr<A>> for Pat {
|
|||||||
|
|
||||||
fn try_from(value: Expr<A>) -> Result<Self, Self::Error> {
|
fn try_from(value: Expr<A>) -> Result<Self, Self::Error> {
|
||||||
Ok(match value {
|
Ok(match value {
|
||||||
Expr::Id(name) if name == "_" => Self::Ignore,
|
Expr::Id(FqPath { mut parts }) if parts.len() == 1 => {
|
||||||
Expr::Id(name) => Self::Name(name),
|
match parts.pop().expect("parts should have len 1") {
|
||||||
|
ig if ig == "_" => Self::Ignore,
|
||||||
|
name => Self::Name(name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Expr::Id(path) => Self::Path(path),
|
||||||
Expr::MetId(name) => Self::MetId(name),
|
Expr::MetId(name) => Self::MetId(name),
|
||||||
Expr::Lit(literal) => Self::Lit(literal),
|
Expr::Lit(literal) => Self::Lit(literal),
|
||||||
Expr::Op(Op::RangeEx, exprs) if exprs.is_empty() => Self::Rest(None),
|
Expr::Op(Op::RangeEx, exprs) => Self::Op(
|
||||||
Expr::Op(Op::RangeEx, mut exprs) if exprs.len() == 1 => {
|
if exprs.len() > 1 {
|
||||||
Self::Rest(Some(Box::new(Self::try_from(exprs.remove(0))?)))
|
PatOp::RangeEx
|
||||||
}
|
} else {
|
||||||
Expr::Op(Op::Tuple, exprs) => Self::Tuple(
|
PatOp::Rest
|
||||||
|
},
|
||||||
|
exprs
|
||||||
|
.into_iter()
|
||||||
|
.map(Self::try_from)
|
||||||
|
.collect::<Result<Vec<_>, Expr<A>>>()?,
|
||||||
|
),
|
||||||
|
Expr::Op(Op::Tuple, exprs) => Self::Op(
|
||||||
|
PatOp::Tuple,
|
||||||
exprs
|
exprs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(Self::try_from)
|
.map(Self::try_from)
|
||||||
.collect::<Result<_, _>>()?,
|
.collect::<Result<_, _>>()?,
|
||||||
),
|
),
|
||||||
Expr::Op(Op::Array, exprs) => Self::Slice(
|
Expr::Op(Op::Array, exprs) => Self::Op(
|
||||||
|
PatOp::Slice,
|
||||||
exprs
|
exprs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(Self::try_from)
|
.map(Self::try_from)
|
||||||
|
|||||||
@@ -260,28 +260,24 @@ impl<A: Annotation> Match<A> for Pat {
|
|||||||
(Pat::Ignore, _) => false,
|
(Pat::Ignore, _) => false,
|
||||||
(Pat::Name(pat), Pat::Name(expr)) => pat == expr,
|
(Pat::Name(pat), Pat::Name(expr)) => pat == expr,
|
||||||
(Pat::Name(_), _) => false,
|
(Pat::Name(_), _) => false,
|
||||||
|
(Pat::Path(_), Pat::Path(_)) => true,
|
||||||
|
(Pat::Path(_), _) => false,
|
||||||
(Pat::Struct(_, pat), Pat::Struct(_, expr)) => Match::recurse(sub, pat, expr),
|
(Pat::Struct(_, pat), Pat::Struct(_, expr)) => Match::recurse(sub, pat, expr),
|
||||||
(Pat::Struct(..), _) => false,
|
(Pat::Struct(..), _) => false,
|
||||||
(Pat::TupStruct(_, pat), Pat::TupStruct(_, expr)) => Match::recurse(sub, pat, expr),
|
(Pat::TupStruct(_, pat), Pat::TupStruct(_, expr)) => Match::recurse(sub, pat, expr),
|
||||||
(Pat::TupStruct(..), _) => false,
|
(Pat::TupStruct(..), _) => false,
|
||||||
(Pat::Rest(pat), Pat::Rest(expr)) => Match::recurse(sub, pat, expr),
|
|
||||||
(Pat::Rest(_), _) => false,
|
|
||||||
(Pat::Lit(pat), Pat::Lit(expr)) => pat == expr,
|
(Pat::Lit(pat), Pat::Lit(expr)) => pat == expr,
|
||||||
(Pat::Lit(_), _) => false,
|
(Pat::Lit(_), _) => false,
|
||||||
(Pat::Tuple(pat), Pat::Tuple(expr)) => Match::recurse(sub, pat, expr),
|
|
||||||
(Pat::Tuple(_), _) => false,
|
|
||||||
(Pat::Slice(pat), Pat::Slice(expr)) => Match::recurse(sub, pat, expr),
|
|
||||||
(Pat::Slice(_), _) => false,
|
|
||||||
(Pat::Alt(pat), Pat::Alt(expr)) => Match::recurse(sub, pat, expr),
|
|
||||||
(Pat::Alt(_), _) => false,
|
|
||||||
(Pat::Typed(pat, _), Pat::Typed(expr, _)) => Match::recurse(sub, pat, expr),
|
(Pat::Typed(pat, _), Pat::Typed(expr, _)) => Match::recurse(sub, pat, expr),
|
||||||
(Pat::Typed(..), _) => false,
|
(Pat::Typed(..), _) => false,
|
||||||
|
(Pat::Op(_, pat), Pat::Op(_, expr)) => Match::recurse(sub, pat, expr),
|
||||||
|
(Pat::Op(..), _) => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply(&mut self, sub: &Subst<A>) {
|
fn apply(&mut self, sub: &Subst<A>) {
|
||||||
match self {
|
match self {
|
||||||
Pat::Ignore | Pat::Name(_) | Pat::Lit(_) => {}
|
Pat::Ignore | Pat::Name(_) | Pat::Path(_) | Pat::Lit(_) => {}
|
||||||
Pat::MetId(id) => {
|
Pat::MetId(id) => {
|
||||||
if let Some(expr) = sub.pat.get(id) {
|
if let Some(expr) = sub.pat.get(id) {
|
||||||
*self = expr.clone()
|
*self = expr.clone()
|
||||||
@@ -289,14 +285,11 @@ impl<A: Annotation> Match<A> for Pat {
|
|||||||
}
|
}
|
||||||
Pat::Struct(_, expr) => expr.apply(sub),
|
Pat::Struct(_, expr) => expr.apply(sub),
|
||||||
Pat::TupStruct(_, expr) => expr.apply(sub),
|
Pat::TupStruct(_, expr) => expr.apply(sub),
|
||||||
Pat::Rest(pat) => pat.apply(sub),
|
|
||||||
Pat::Tuple(pats) => pats.apply(sub),
|
|
||||||
Pat::Slice(pats) => pats.apply(sub),
|
|
||||||
Pat::Alt(pats) => pats.apply(sub),
|
|
||||||
Pat::Typed(pat, ty) => {
|
Pat::Typed(pat, ty) => {
|
||||||
pat.apply(sub);
|
pat.apply(sub);
|
||||||
ty.apply(sub);
|
ty.apply(sub);
|
||||||
}
|
}
|
||||||
|
Pat::Op(_, pats) => pats.apply(sub),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ fn pats() -> Result<(), Box<dyn Error>> {
|
|||||||
return Ok(Response::Break);
|
return Ok(Response::Break);
|
||||||
}
|
}
|
||||||
loop {
|
loop {
|
||||||
match parser.parse::<Pat>(PPrec::Max) {
|
match parser.parse::<Pat>(PPrec::Min) {
|
||||||
Err(ParseError::FromLexer(LexError { res: "EOF", .. })) => {
|
Err(ParseError::FromLexer(LexError { res: "EOF", .. })) => {
|
||||||
break Ok(Response::Accept);
|
break Ok(Response::Accept);
|
||||||
}
|
}
|
||||||
|
|||||||
274
src/parser.rs
274
src/parser.rs
@@ -43,12 +43,13 @@ pub struct Parser<'t> {
|
|||||||
pub lexer: Lexer<'t>,
|
pub lexer: Lexer<'t>,
|
||||||
pub next_tok: Option<Token>,
|
pub next_tok: Option<Token>,
|
||||||
pub last_loc: Span,
|
pub last_loc: Span,
|
||||||
|
pub elide_do: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> Parser<'t> {
|
impl<'t> Parser<'t> {
|
||||||
/// Constructs a new Parser
|
/// Constructs a new Parser
|
||||||
pub fn new(lexer: Lexer<'t>) -> Self {
|
pub fn new(lexer: Lexer<'t>) -> Self {
|
||||||
Self { lexer, next_tok: None, last_loc: Span::default() }
|
Self { lexer, next_tok: None, last_loc: Span::default(), elide_do: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The identity function. This exists to make production chaining easier.
|
/// The identity function. This exists to make production chaining easier.
|
||||||
@@ -86,7 +87,9 @@ impl<'t> Parser<'t> {
|
|||||||
|
|
||||||
/// Consumes and returns the currently-peeked [Token].
|
/// Consumes and returns the currently-peeked [Token].
|
||||||
pub fn take(&mut self) -> Option<Token> {
|
pub fn take(&mut self) -> Option<Token> {
|
||||||
self.next_tok.take()
|
let tok = self.next_tok.take();
|
||||||
|
self.elide_do = matches!(tok, Some(Token { kind: TKind::RCurly, .. }));
|
||||||
|
tok
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consumes the currently-peeked [Token], returning its lexeme without cloning.
|
/// Consumes the currently-peeked [Token], returning its lexeme without cloning.
|
||||||
@@ -123,7 +126,7 @@ impl<'t> Parser<'t> {
|
|||||||
) -> PResult<Vec<P>> {
|
) -> PResult<Vec<P>> {
|
||||||
// TODO: This loses lexer errors
|
// TODO: This loses lexer errors
|
||||||
while self.peek_if(end).is_none() {
|
while self.peek_if(end).is_none() {
|
||||||
elems.push(self.parse(level)?);
|
elems.push(self.parse(level.clone())?);
|
||||||
if self.next_if(sep).is_err() {
|
if self.next_if(sep).is_err() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -143,7 +146,7 @@ impl<'t> Parser<'t> {
|
|||||||
sep: TKind,
|
sep: TKind,
|
||||||
) -> PResult<Vec<P>> {
|
) -> PResult<Vec<P>> {
|
||||||
loop {
|
loop {
|
||||||
elems.push(self.parse(level)?);
|
elems.push(self.parse(level.clone())?);
|
||||||
if self.next_if(sep).is_err() {
|
if self.next_if(sep).is_err() {
|
||||||
break Ok(elems);
|
break Ok(elems);
|
||||||
}
|
}
|
||||||
@@ -181,11 +184,36 @@ impl<'t> Parser<'t> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait Parse<'t> {
|
pub trait Parse<'t> {
|
||||||
type Prec: Copy;
|
type Prec: Clone;
|
||||||
fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult<Self>
|
fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult<Self>
|
||||||
where Self: Sized;
|
where Self: Sized;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'t> Parse<'t> for FqPath {
|
||||||
|
// ugly hack: provide a partial path to parse()
|
||||||
|
type Prec = ();
|
||||||
|
|
||||||
|
fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult<Self> {
|
||||||
|
let mut parts = vec![];
|
||||||
|
if p.next_if(TKind::ColonColon).is_ok() {
|
||||||
|
parts.push("".into()); // the "root"
|
||||||
|
}
|
||||||
|
loop {
|
||||||
|
parts.push(
|
||||||
|
p.next_if(TKind::Identifier)?
|
||||||
|
.lexeme
|
||||||
|
.string()
|
||||||
|
.expect("Identifier should have String"),
|
||||||
|
);
|
||||||
|
if p.next_if(TKind::ColonColon).is_err() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(FqPath { parts })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'t> Parse<'t> for Literal {
|
impl<'t> Parse<'t> for Literal {
|
||||||
type Prec = ();
|
type Prec = ();
|
||||||
fn parse(p: &mut Parser<'t>, _level: ()) -> PResult<Self> {
|
fn parse(p: &mut Parser<'t>, _level: ()) -> PResult<Self> {
|
||||||
@@ -220,24 +248,41 @@ impl<'t> Parse<'t> for Literal {
|
|||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub enum PPrec {
|
pub enum PPrec {
|
||||||
Min,
|
Min,
|
||||||
Typed,
|
|
||||||
Tuple,
|
|
||||||
Alt,
|
Alt,
|
||||||
|
Tuple,
|
||||||
|
Typed,
|
||||||
|
Range,
|
||||||
Max,
|
Max,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PPrec {
|
impl PPrec {
|
||||||
fn next(self) -> Self {
|
fn next(self) -> Self {
|
||||||
match self {
|
match self {
|
||||||
Self::Min => Self::Min,
|
Self::Min => Self::Alt,
|
||||||
Self::Typed => Self::Min,
|
|
||||||
Self::Tuple => Self::Typed,
|
|
||||||
Self::Alt => Self::Tuple,
|
Self::Alt => Self::Tuple,
|
||||||
Self::Max => Self::Alt,
|
Self::Tuple => Self::Typed,
|
||||||
|
Self::Typed => Self::Range,
|
||||||
|
Self::Range => Self::Max,
|
||||||
|
Self::Max => Self::Max,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum PatPs {
|
||||||
|
Typed,
|
||||||
|
Op(PatOp),
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pat_from_infix(token: &Token) -> Option<(PatPs, PPrec)> {
|
||||||
|
Some(match token.kind {
|
||||||
|
TKind::DotDot => (PatPs::Op(PatOp::RangeEx), PPrec::Range),
|
||||||
|
TKind::Colon => (PatPs::Typed, PPrec::Typed),
|
||||||
|
TKind::Comma => (PatPs::Op(PatOp::Tuple), PPrec::Tuple),
|
||||||
|
TKind::Bar => (PatPs::Op(PatOp::Alt), PPrec::Alt),
|
||||||
|
_ => None?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
impl<'t> Parse<'t> for Pat {
|
impl<'t> Parse<'t> for Pat {
|
||||||
type Prec = PPrec;
|
type Prec = PPrec;
|
||||||
fn parse(p: &mut Parser<'t>, level: PPrec) -> PResult<Self> {
|
fn parse(p: &mut Parser<'t>, level: PPrec) -> PResult<Self> {
|
||||||
@@ -249,63 +294,75 @@ impl<'t> Parse<'t> for Pat {
|
|||||||
TKind::True | TKind::False | TKind::Character | TKind::Integer | TKind::String => {
|
TKind::True | TKind::False | TKind::Character | TKind::Integer | TKind::String => {
|
||||||
Pat::Lit(p.parse(())?)
|
Pat::Lit(p.parse(())?)
|
||||||
}
|
}
|
||||||
|
TKind::Bar => p.consume().parse(level)?,
|
||||||
TKind::Identifier => match tok.lexeme.str() {
|
TKind::Identifier => match tok.lexeme.str() {
|
||||||
Some("_") => p.consume().then(Pat::Ignore),
|
Some("_") => p.consume().then(Pat::Ignore),
|
||||||
_ => {
|
_ => {
|
||||||
let name = p
|
let mut path: FqPath = p.parse(())?;
|
||||||
.take_lexeme()
|
// TODO: make these postfix.
|
||||||
.expect("should have Token")
|
match p.peek().map(|t| t.kind) {
|
||||||
.string()
|
Ok(TKind::LParen) => Pat::TupStruct(path, p.parse(PPrec::Typed)?),
|
||||||
.expect("Identifier token should have String");
|
Ok(TKind::LCurly) => Pat::Struct(
|
||||||
match p.peek().map(|t| t.kind)? {
|
path,
|
||||||
TKind::LParen => Pat::TupStruct(name, p.parse(PPrec::Tuple)?),
|
|
||||||
TKind::LCurly => Pat::Struct(
|
|
||||||
name,
|
|
||||||
p.consume()
|
p.consume()
|
||||||
.opt(PPrec::Tuple, TKind::RCurly)?
|
.opt(PPrec::Alt, TKind::RCurly)?
|
||||||
.unwrap_or_else(|| Box::new(Pat::Tuple(vec![]))),
|
.unwrap_or_else(|| Box::new(Pat::Op(PatOp::Tuple, vec![]))),
|
||||||
),
|
),
|
||||||
_ => Pat::Name(name),
|
Ok(_) | Err(ParseError::FromLexer(LexError { pos: _, res: "EOF" })) => {
|
||||||
|
match path.parts.len() {
|
||||||
|
1 => Self::Name(path.parts.pop().expect("name has 1 part")),
|
||||||
|
_ => Self::Path(path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => Err(e)?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TKind::Grave => Pat::MetId(p.consume().next()?.lexeme.to_string()),
|
TKind::Grave => Pat::MetId(p.consume().next()?.lexeme.to_string()),
|
||||||
TKind::DotDot => Pat::Rest(match p.consume().peek_if(TKind::Identifier) {
|
TKind::DotDot => Pat::Op(
|
||||||
Some(_) => Some(p.parse(level)?),
|
PatOp::Rest,
|
||||||
None => None,
|
// Identifier in Rest position always becomes binder
|
||||||
}),
|
match p.consume().peek()?.kind {
|
||||||
TKind::LParen => {
|
TKind::Identifier => vec![Pat::Name(
|
||||||
Pat::Tuple(
|
p.take_lexeme()
|
||||||
p.consume()
|
.expect("should have lexeme")
|
||||||
.list(vec![], PPrec::Typed, TKind::Comma, TKind::RParen)?,
|
.string()
|
||||||
)
|
.expect("should be string"),
|
||||||
}
|
)],
|
||||||
TKind::LBrack => {
|
TKind::Grave | TKind::Integer | TKind::Character => vec![p.parse(level)?],
|
||||||
Pat::Slice(
|
_ => vec![],
|
||||||
p.consume()
|
},
|
||||||
.list(vec![], PPrec::Typed, TKind::Comma, TKind::RBrack)?,
|
),
|
||||||
)
|
TKind::LParen => Pat::Op(
|
||||||
}
|
PatOp::Tuple,
|
||||||
|
p.consume()
|
||||||
|
.list(vec![], PPrec::Typed, TKind::Comma, TKind::RParen)?,
|
||||||
|
),
|
||||||
|
TKind::LBrack => Pat::Op(
|
||||||
|
PatOp::Slice,
|
||||||
|
p.consume()
|
||||||
|
.list(vec![], PPrec::Typed, TKind::Comma, TKind::RBrack)?,
|
||||||
|
),
|
||||||
_ => Err(ParseError::NotPattern(tok.kind, tok.span))?,
|
_ => Err(ParseError::NotPattern(tok.kind, tok.span))?,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Infix
|
while let Ok(tok) = p.peek()
|
||||||
while let Ok(tok) = p.peek() {
|
&& let Some((op, prec)) = pat_from_infix(tok)
|
||||||
|
&& level <= prec
|
||||||
|
{
|
||||||
let kind = tok.kind;
|
let kind = tok.kind;
|
||||||
|
head = match op {
|
||||||
head = match kind {
|
PatPs::Typed => Pat::Typed(head.into(), p.consume().parse(())?),
|
||||||
TKind::Colon if level >= PPrec::Typed => {
|
PatPs::Op(op @ PatOp::RangeEx) => Pat::Op(
|
||||||
Pat::Typed(head.into(), p.consume().parse(())?)
|
op,
|
||||||
}
|
match p.consume().peek().map(|t| t.kind) {
|
||||||
TKind::Comma if level >= PPrec::Tuple => Pat::Tuple(p.consume().list_bare(
|
Ok(TKind::Integer | TKind::Character | TKind::Identifier) => {
|
||||||
vec![head],
|
vec![head, p.parse(prec.next())?]
|
||||||
PPrec::Tuple.next(),
|
}
|
||||||
kind,
|
_ => vec![head],
|
||||||
)?),
|
},
|
||||||
TKind::Bar if level >= PPrec::Alt => {
|
),
|
||||||
Pat::Alt(p.consume().list_bare(vec![head], PPrec::Alt.next(), kind)?)
|
PatPs::Op(op) => Pat::Op(op, p.consume().list_bare(vec![head], prec.next(), kind)?),
|
||||||
}
|
|
||||||
_ => break,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -323,12 +380,7 @@ impl<'t> Parse<'t> for Ty {
|
|||||||
let head = match tok.kind {
|
let head = match tok.kind {
|
||||||
TKind::Identifier => match tok.lexeme.str() {
|
TKind::Identifier => match tok.lexeme.str() {
|
||||||
Some("_") => p.consume().then(Ty::Infer),
|
Some("_") => p.consume().then(Ty::Infer),
|
||||||
_ => Ty::Named(
|
_ => Ty::Named(p.parse(())?),
|
||||||
p.take_lexeme()
|
|
||||||
.expect("should have Token")
|
|
||||||
.string()
|
|
||||||
.expect("Identifier token should have String"),
|
|
||||||
),
|
|
||||||
},
|
},
|
||||||
TKind::LBrack => {
|
TKind::LBrack => {
|
||||||
let ty = p.consume().parse(level)?;
|
let ty = p.consume().parse(level)?;
|
||||||
@@ -346,13 +398,11 @@ impl<'t> Parse<'t> for Ty {
|
|||||||
p.consume().consume_if(TKind::LParen)?;
|
p.consume().consume_if(TKind::LParen)?;
|
||||||
|
|
||||||
let mut tys = p.list(vec![], (), TKind::Comma, TKind::RParen)?;
|
let mut tys = p.list(vec![], (), TKind::Comma, TKind::RParen)?;
|
||||||
match p.next_if(TKind::Arrow) {
|
tys.push(match p.next_if(TKind::Arrow) {
|
||||||
Ok(_) => {
|
Ok(_) => p.parse(())?,
|
||||||
tys.push(p.parse(())?);
|
_ => Ty::Tuple(vec![]),
|
||||||
Ty::Fn(tys)
|
});
|
||||||
}
|
Ty::Fn(tys)
|
||||||
_ => Ty::Tuple(tys),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
TKind::LParen => {
|
TKind::LParen => {
|
||||||
let mut tys = p.consume().list(vec![], (), TKind::Comma, TKind::RParen)?;
|
let mut tys = p.consume().list(vec![], (), TKind::Comma, TKind::RParen)?;
|
||||||
@@ -452,6 +502,7 @@ pub enum Ps {
|
|||||||
Match, // match Expr { MatchArm,* }
|
Match, // match Expr { MatchArm,* }
|
||||||
Mod, // mod Ty Expr
|
Mod, // mod Ty Expr
|
||||||
ImplicitDo, // An implicit semicolon
|
ImplicitDo, // An implicit semicolon
|
||||||
|
ExplicitDo, // An explicit leading semicolon
|
||||||
End, // Produces an empty value.
|
End, // Produces an empty value.
|
||||||
Op(Op), // A normal [ast::Op]
|
Op(Op), // A normal [ast::Op]
|
||||||
}
|
}
|
||||||
@@ -459,10 +510,10 @@ pub enum Ps {
|
|||||||
fn from_prefix(token: &Token) -> PResult<(Ps, Prec)> {
|
fn from_prefix(token: &Token) -> PResult<(Ps, Prec)> {
|
||||||
Ok(match token.kind {
|
Ok(match token.kind {
|
||||||
TKind::Do => (Ps::Op(Op::Do), Prec::Do),
|
TKind::Do => (Ps::Op(Op::Do), Prec::Do),
|
||||||
|
TKind::Semi => (Ps::ExplicitDo, Prec::Do),
|
||||||
|
|
||||||
TKind::Identifier => (Ps::Id, Prec::Max),
|
TKind::Identifier | TKind::ColonColon => (Ps::Id, Prec::Max),
|
||||||
TKind::Grave => (Ps::Mid, Prec::Max),
|
TKind::Grave => (Ps::Mid, Prec::Max),
|
||||||
TKind::ColonColon => (Ps::Op(Op::Path), Prec::Max),
|
|
||||||
TKind::True | TKind::False | TKind::Character | TKind::Integer | TKind::String => {
|
TKind::True | TKind::False | TKind::Character | TKind::Integer | TKind::String => {
|
||||||
(Ps::Lit, Prec::Max)
|
(Ps::Lit, Prec::Max)
|
||||||
}
|
}
|
||||||
@@ -510,7 +561,6 @@ fn from_infix(token: &Token) -> PResult<(Ps, Prec)> {
|
|||||||
TKind::As => (Ps::Op(Op::As), Prec::Body),
|
TKind::As => (Ps::Op(Op::As), Prec::Body),
|
||||||
TKind::Comma => (Ps::Op(Op::Tuple), Prec::Tuple),
|
TKind::Comma => (Ps::Op(Op::Tuple), Prec::Tuple),
|
||||||
TKind::Dot => (Ps::Op(Op::Dot), Prec::Project),
|
TKind::Dot => (Ps::Op(Op::Dot), Prec::Project),
|
||||||
TKind::ColonColon => (Ps::Op(Op::Path), Prec::Max),
|
|
||||||
TKind::AmpAmp => (Ps::Op(Op::LogAnd), Prec::LogAnd),
|
TKind::AmpAmp => (Ps::Op(Op::LogAnd), Prec::LogAnd),
|
||||||
TKind::BarBar => (Ps::Op(Op::LogOr), Prec::LogOr),
|
TKind::BarBar => (Ps::Op(Op::LogOr), Prec::LogOr),
|
||||||
TKind::Question => (Ps::Op(Op::Try), Prec::Unary),
|
TKind::Question => (Ps::Op(Op::Try), Prec::Unary),
|
||||||
@@ -539,22 +589,7 @@ fn from_infix(token: &Token) -> PResult<(Ps, Prec)> {
|
|||||||
TKind::Slash => (Ps::Op(Op::Div), Prec::Term),
|
TKind::Slash => (Ps::Op(Op::Div), Prec::Term),
|
||||||
TKind::Rem => (Ps::Op(Op::Rem), Prec::Term),
|
TKind::Rem => (Ps::Op(Op::Rem), Prec::Term),
|
||||||
|
|
||||||
TKind::True
|
_ => (Ps::ImplicitDo, Prec::Do),
|
||||||
| TKind::False
|
|
||||||
| TKind::Character
|
|
||||||
| TKind::Integer
|
|
||||||
| TKind::String
|
|
||||||
| TKind::Identifier
|
|
||||||
| TKind::Public
|
|
||||||
| TKind::Module
|
|
||||||
| TKind::Fn
|
|
||||||
| TKind::Do
|
|
||||||
| TKind::While
|
|
||||||
| TKind::If
|
|
||||||
| TKind::For
|
|
||||||
| TKind::Break
|
|
||||||
| TKind::Return => (Ps::ImplicitDo, Prec::Do),
|
|
||||||
kind => Err(ParseError::NotInfix(kind, token.span))?,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -563,7 +598,7 @@ impl<'t> Parse<'t> for Const {
|
|||||||
|
|
||||||
fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult<Self> {
|
fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult<Self> {
|
||||||
Ok(Self(
|
Ok(Self(
|
||||||
p.consume().parse(PPrec::Alt)?,
|
p.consume().parse(PPrec::Tuple)?,
|
||||||
p.consume_if(TKind::Eq)?.parse(Prec::Tuple.value())?,
|
p.consume_if(TKind::Eq)?.parse(Prec::Tuple.value())?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@@ -573,7 +608,7 @@ impl<'t> Parse<'t> for Struct {
|
|||||||
type Prec = ();
|
type Prec = ();
|
||||||
|
|
||||||
fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult<Self> {
|
fn parse(p: &mut Parser<'t>, _level: Self::Prec) -> PResult<Self> {
|
||||||
let value = p.consume().parse(PPrec::Tuple)?;
|
let value = p.consume().parse(PPrec::Min)?;
|
||||||
Ok(Self(value))
|
Ok(Self(value))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -585,19 +620,22 @@ impl<'t> Parse<'t> for Fn {
|
|||||||
match p.consume().next_if(TKind::Identifier) {
|
match p.consume().next_if(TKind::Identifier) {
|
||||||
Ok(Token { lexeme, .. }) => Ok(Self(
|
Ok(Token { lexeme, .. }) => Ok(Self(
|
||||||
lexeme.string(),
|
lexeme.string(),
|
||||||
p.parse(PPrec::Typed)?,
|
p.parse(PPrec::Tuple)?,
|
||||||
p.opt_if((), TKind::Arrow)?.unwrap_or_default(),
|
p.opt_if((), TKind::Arrow)?.unwrap_or(Ty::Tuple(vec![])),
|
||||||
p.parse(Prec::Body.next())?,
|
p.parse(Prec::Body.next())?,
|
||||||
)),
|
)),
|
||||||
_ => Ok(Self(
|
_ => Ok(Self(
|
||||||
None,
|
None,
|
||||||
Pat::Tuple(p.consume_if(TKind::LParen)?.list(
|
Pat::Op(
|
||||||
vec![],
|
PatOp::Tuple,
|
||||||
PPrec::Tuple,
|
p.consume_if(TKind::LParen)?.list(
|
||||||
TKind::Comma,
|
vec![],
|
||||||
TKind::RParen,
|
PPrec::Tuple,
|
||||||
)?),
|
TKind::Comma,
|
||||||
p.opt_if((), TKind::Arrow)?.unwrap_or_default(),
|
TKind::RParen,
|
||||||
|
)?,
|
||||||
|
),
|
||||||
|
p.opt_if((), TKind::Arrow)?.unwrap_or(Ty::Tuple(vec![])),
|
||||||
p.parse(Prec::Body.next())?,
|
p.parse(Prec::Body.next())?,
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
@@ -637,7 +675,7 @@ impl<'t> Parse<'t> for MatchArm {
|
|||||||
fn parse(p: &mut Parser<'t>, level: usize) -> PResult<Self> {
|
fn parse(p: &mut Parser<'t>, level: usize) -> PResult<Self> {
|
||||||
p.next_if(TKind::Bar).ok();
|
p.next_if(TKind::Bar).ok();
|
||||||
Ok(MatchArm(
|
Ok(MatchArm(
|
||||||
p.parse(PPrec::Max)?,
|
p.parse(PPrec::Min)?,
|
||||||
p.consume_if(TKind::FatArrow)?.parse(level)?,
|
p.consume_if(TKind::FatArrow)?.parse(level)?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@@ -751,7 +789,10 @@ fn parse_for<'t>(p: &mut Parser<'t>, _level: ()) -> PResult<Expr> {
|
|||||||
Expr::Op(Op::Break, vec![fail]).anno(fspan),
|
Expr::Op(Op::Break, vec![fail]).anno(fspan),
|
||||||
),
|
),
|
||||||
MatchArm(
|
MatchArm(
|
||||||
Pat::TupStruct("Some".into(), Box::new(Pat::Tuple(vec![pat]))),
|
Pat::TupStruct(
|
||||||
|
"Some".into(),
|
||||||
|
Box::new(Pat::Op(PatOp::Tuple, vec![pat])),
|
||||||
|
),
|
||||||
pass,
|
pass,
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@@ -784,7 +825,12 @@ impl<'t> Parse<'t> for Expr {
|
|||||||
Ps::End if level == prec.next() => Expr::Op(Op::Tuple, vec![]),
|
Ps::End if level == prec.next() => Expr::Op(Op::Tuple, vec![]),
|
||||||
Ps::End => Err(ParseError::NotPrefix(tok.kind, span))?,
|
Ps::End => Err(ParseError::NotPrefix(tok.kind, span))?,
|
||||||
|
|
||||||
Ps::Id => Expr::Id(p.take_lexeme().expect("should have ident").to_string()),
|
Ps::ExplicitDo => {
|
||||||
|
p.consume();
|
||||||
|
Expr::Op(Op::Tuple, vec![])
|
||||||
|
}
|
||||||
|
|
||||||
|
Ps::Id => Expr::Id(p.parse(())?),
|
||||||
Ps::Mid => Expr::MetId(p.consume().next()?.lexeme.to_string()),
|
Ps::Mid => Expr::MetId(p.consume().next()?.lexeme.to_string()),
|
||||||
Ps::Lit => Expr::Lit(p.parse(())?),
|
Ps::Lit => Expr::Lit(p.parse(())?),
|
||||||
Ps::Let => Expr::Let(p.parse(())?),
|
Ps::Let => Expr::Let(p.parse(())?),
|
||||||
@@ -820,14 +866,14 @@ impl<'t> Parse<'t> for Expr {
|
|||||||
None,
|
None,
|
||||||
p.consume()
|
p.consume()
|
||||||
.opt(PPrec::Tuple, TKind::Bar)?
|
.opt(PPrec::Tuple, TKind::Bar)?
|
||||||
.unwrap_or(Pat::Tuple(vec![])),
|
.unwrap_or(Pat::Op(PatOp::Tuple, vec![])),
|
||||||
p.opt_if((), TKind::Arrow)?.unwrap_or_default(),
|
p.opt_if((), TKind::Arrow)?.unwrap_or(Ty::Infer),
|
||||||
p.parse(Prec::Body.next())?,
|
p.parse(Prec::Body.next())?,
|
||||||
))),
|
))),
|
||||||
Ps::Lambda0 => Expr::Fn(Box::new(Fn(
|
Ps::Lambda0 => Expr::Fn(Box::new(Fn(
|
||||||
None,
|
None,
|
||||||
Pat::Tuple(vec![]),
|
Pat::Op(PatOp::Tuple, vec![]),
|
||||||
p.consume().opt_if((), TKind::Arrow)?.unwrap_or_default(),
|
p.consume().opt_if((), TKind::Arrow)?.unwrap_or(Ty::Infer),
|
||||||
p.parse(Prec::Body.next())?,
|
p.parse(Prec::Body.next())?,
|
||||||
))),
|
))),
|
||||||
Ps::DoubleRef => p.consume().parse(prec.next()).map(|Anno(expr, span)| {
|
Ps::DoubleRef => p.consume().parse(prec.next()).map(|Anno(expr, span)| {
|
||||||
@@ -853,16 +899,16 @@ impl<'t> Parse<'t> for Expr {
|
|||||||
head = match op {
|
head = match op {
|
||||||
// Make (structor expressions) are context-sensitive
|
// Make (structor expressions) are context-sensitive
|
||||||
Ps::Make => match &head {
|
Ps::Make => match &head {
|
||||||
Expr::Op(Op::Path, _) | Expr::Id(_) | Expr::MetId(_) => {
|
Expr::Id(_) | Expr::MetId(_) => Expr::Make(Box::new(Make(
|
||||||
Expr::Make(Box::new(Make(
|
head.anno(span),
|
||||||
head.anno(span),
|
p.consume().list(vec![], (), TKind::Comma, TKind::RCurly)?,
|
||||||
p.consume().list(vec![], (), TKind::Comma, TKind::RCurly)?,
|
))),
|
||||||
)))
|
|
||||||
}
|
|
||||||
_ => break,
|
_ => break,
|
||||||
},
|
},
|
||||||
|
// As is ImplicitDo (semicolon elision)
|
||||||
|
Ps::ImplicitDo if p.elide_do => head.and_do(span, p.parse(prec.next())?),
|
||||||
|
Ps::ImplicitDo => break,
|
||||||
Ps::Op(Op::Do) => head.and_do(span, p.consume().parse(prec.next())?),
|
Ps::Op(Op::Do) => head.and_do(span, p.consume().parse(prec.next())?),
|
||||||
Ps::ImplicitDo => head.and_do(span, p.parse(prec.next())?),
|
|
||||||
Ps::Op(Op::Index) => Expr::Op(
|
Ps::Op(Op::Index) => Expr::Op(
|
||||||
Op::Index,
|
Op::Index,
|
||||||
p.consume()
|
p.consume()
|
||||||
@@ -873,7 +919,7 @@ impl<'t> Parse<'t> for Expr {
|
|||||||
p.consume()
|
p.consume()
|
||||||
.list(vec![head.anno(span)], 0, TKind::Comma, TKind::RParen)?,
|
.list(vec![head.anno(span)], 0, TKind::Comma, TKind::RParen)?,
|
||||||
),
|
),
|
||||||
Ps::Op(op @ (Op::Tuple | Op::Dot | Op::Path | Op::LogAnd | Op::LogOr)) => Expr::Op(
|
Ps::Op(op @ (Op::Tuple | Op::Dot | Op::LogAnd | Op::LogOr)) => Expr::Op(
|
||||||
op,
|
op,
|
||||||
p.consume()
|
p.consume()
|
||||||
.list_bare(vec![head.anno(span)], prec.next(), kind)?,
|
.list_bare(vec![head.anno(span)], prec.next(), kind)?,
|
||||||
@@ -883,7 +929,7 @@ impl<'t> Parse<'t> for Expr {
|
|||||||
Expr::Op(op, vec![head.anno(span)])
|
Expr::Op(op, vec![head.anno(span)])
|
||||||
}
|
}
|
||||||
Ps::Op(op) => Expr::Op(op, vec![head.anno(span), p.consume().parse(prec.next())?]),
|
Ps::Op(op) => Expr::Op(op, vec![head.anno(span), p.consume().parse(prec.next())?]),
|
||||||
_ => unimplemented!("infix {op:?}"),
|
_ => Err(ParseError::NotInfix(kind, span))?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user