fix: lexer, parser align, create parser example

This commit is contained in:
Artemy Egorov 2024-08-12 13:20:46 +03:00
parent 9592a8ffba
commit 50f5c7fe19
7 changed files with 132 additions and 32 deletions

View file

@ -96,8 +96,8 @@ row [
this is codeblock
}
# {# Text} Text after "`# " not modified
code "markdown" {# this is codeblock}
# {# Text} Text after "{#" not modified
code "markdown" {# this is codeblock}
]]
[[
@ -125,14 +125,14 @@ row [
# Element
# Description
# ]
table {
+| Tag | Description |
| h | Heading |
| p | Paragraph |
| img | Image |
| link | Link |
| btn | Button |
| ul | Unordered list |
| br | Line break |
+| quantity | 7 |
}
# table {
# +| Tag | Description |
# | h | Heading |
# | p | Paragraph |
# | img | Image |
# | link | Link |
# | btn | Button |
# | ul | Unordered list |
# | br | Line break |
# +| quantity | 7 |
# }

View file

@ -15,7 +15,7 @@ fn main() {
}
Err(e) => e.into_iter().for_each(|e| {
Report::build(ReportKind::Error, src_file, e.span().start)
.with_code("Compiler")
.with_code("Lexer")
.with_message(e.to_string().clone())
.with_label(
Label::new((src_file, e.span().into_range()))

32
examples/daleth_parser.rs Normal file
View file

@ -0,0 +1,32 @@
use ariadne::{Color, Label, Report, ReportKind, Source};
use chumsky::{input::Input, Parser};
use dalet::daleth::{lexer::lexer, parser::parser};
fn main() {
let src_file = "daleth.dlth";
let src = include_str!("./daleth.dlth");
let lexed = lexer().parse(src).unwrap();
let parsed = parser().parse(lexed.as_slice().spanned((0..src.len()).into()));
match parsed.into_result() {
Ok(t) => {
println!("{:#?}", t);
// println!("{}", format(&t));
}
Err(e) => e.into_iter().for_each(|e| {
// println!("{:#}", )
Report::build(ReportKind::Error, src_file, e.span().start)
.with_code("Parser")
.with_message(e.to_string())
.with_label(
Label::new((src_file, e.span().into_range()))
.with_message(e.to_string())
.with_color(Color::Red),
)
.finish()
.print((src_file, Source::from(&src)))
.unwrap()
}),
};
}

View file

@ -10,8 +10,8 @@ pub fn lexer<'src>(
let token = choice((symbol(), tag(), argument(), textual()));
token
.padded_by(comment().padded().repeated())
.padded()
.padded_by(comment())
.map_with(|t, e| (t, e.span()))
.repeated()
.collect()

View file

@ -1,3 +1,5 @@
use core::fmt;
#[derive(Clone, Debug, PartialEq)]
pub enum Token<'src> {
// Symbols
@ -64,3 +66,55 @@ pub enum Token<'src> {
Pre,
Meta,
}
impl<'src> fmt::Display for Token<'src> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Token::LSquare => write!(f, "["),
Token::RSquare => write!(f, "]"),
Token::ElOpen => write!(f, "[["),
Token::ElClose => write!(f, "]]"),
Token::NumberArgument(n) => write!(f, "{}", n),
Token::TextArgument(t) => write!(f, "{}", t),
Token::TextBody(_) => write!(f, "text body"),
Token::MLText(_) => write!(f, "text body"),
Token::MLMSText(_, _) => write!(f, "text body"),
Token::MLRText(_) => write!(f, "text body"),
Token::TextTag(_) => write!(f, "text tag"),
Token::Paragraph(_) => write!(f, "paragraph"),
Token::Comment(_) => write!(f, "comment"),
Token::EmptyLine => write!(f, "empty line"),
Token::El => write!(f, "el"),
Token::H => write!(f, "h"),
Token::P => write!(f, "p"),
Token::Br => write!(f, "br"),
Token::Ul => write!(f, "ul"),
Token::Ol => write!(f, "el"),
Token::Row => write!(f, "ol"),
Token::Link => write!(f, "link"),
Token::Navlink => write!(f, "navlink"),
Token::Btn => write!(f, "btn"),
Token::Navbtn => write!(f, "navbtn"),
Token::Img => write!(f, "img"),
Token::Table => write!(f, "table"),
Token::Tcol => write!(f, "tcol"),
Token::Tpcol => write!(f, "tpcol"),
Token::Hr => write!(f, "hr"),
Token::B => write!(f, "b"),
Token::I => write!(f, "i"),
Token::Bq => write!(f, "bq"),
Token::Footlnk => write!(f, "footlnk"),
Token::Footn => write!(f, "footn"),
Token::A => write!(f, "a"),
Token::S => write!(f, "s"),
Token::Sup => write!(f, "sup"),
Token::Sub => write!(f, "sub"),
Token::Disc => write!(f, "disc"),
Token::Block => write!(f, "block"),
Token::Carousel => write!(f, "carousel"),
Token::Code => write!(f, "code"),
Token::Pre => write!(f, "pre"),
Token::Meta => write!(f, "meta"),
}
}
}

View file

@ -2,19 +2,25 @@ pub mod types;
use super::{
lexer::types::Token,
types::{Span, Spanned},
utils::{set_indent, trim_indent},
types::Span,
utils::{set_spaces, trim_indent},
};
use crate::typed::{
AlignArg, Body, Hl, NNArg, NNBody, Page, TNullArg,
Tag::{self, *},
};
use crate::typed::{AlignArg, Body, Hl, NNArg, NNBody, Page, TNullArg, Tag::*};
use chumsky::prelude::*;
use types::*;
pub fn parser<'tokens, 'src: 'tokens>() -> impl Parser<
'tokens,
ParserInput<'tokens, 'src>,
Spanned<Page>,
extra::Err<Rich<'tokens, Token<'src>, Span>>,
> {
pub fn parser<'tokens, 'src: 'tokens>(
) -> impl Parser<'tokens, ParserInput<'tokens, 'src>, Page, extra::Err<Rich<'tokens, Token<'src>, Span>>>
{
tag().repeated().collect().map(|t| (Page { data: t }))
}
pub fn tag<'tokens, 'src: 'tokens>(
) -> impl Parser<'tokens, ParserInput<'tokens, 'src>, Tag, extra::Err<Rich<'tokens, Token<'src>, Span>>>
{
recursive(|tag| {
let tags_body = tag
.clone()
@ -25,7 +31,7 @@ pub fn parser<'tokens, 'src: 'tokens>() -> impl Parser<
let text_body = select! {
Token::TextBody(t) => t.to_owned(),
Token::MLText(t) => trim_indent(t).to_owned(),
Token::MLMSText(n, t) => set_indent(t, n).to_owned(),
Token::MLMSText(n, t) => set_spaces(t, n).to_owned(),
Token::MLRText(t) => t.to_owned()
};
@ -53,8 +59,11 @@ pub fn parser<'tokens, 'src: 'tokens>() -> impl Parser<
.or_not()
.map(|v| v.unwrap_or(TNullArg::Null));
let hlarg = num_arg.try_map(|n, s| Hl::try_from(n).map_err(|e| Rich::custom(s, e)));
let alignarg =
num_arg.try_map(|n, s| AlignArg::try_from(n).map_err(|e| Rich::custom(s, e)));
let alignarg = choice((
just(Token::TextArgument("start")).to(AlignArg::Start),
just(Token::TextArgument("center")).to(AlignArg::Start),
just(Token::TextArgument("end")).to(AlignArg::Start),
));
let el = just(Token::El).ignore_then(nnbody.clone()).map(El);
let h = just(Token::H)
@ -66,7 +75,7 @@ pub fn parser<'tokens, 'src: 'tokens>() -> impl Parser<
let ul = just(Token::Ul).ignore_then(tags_body.clone()).map(Ul);
let ol = just(Token::Ol).ignore_then(tags_body.clone()).map(Ol);
let row = just(Token::Row)
.ignore_then(alignarg.or_not())
.ignore_then(alignarg.clone().or_not())
.then(tags_body.clone())
.map(|(arg, body)| Row(body, arg.unwrap_or(AlignArg::Start)));
let link = just(Token::Link)
@ -141,7 +150,4 @@ pub fn parser<'tokens, 'src: 'tokens>() -> impl Parser<
.or(choice((block, carousel, code, pre, meta)))
.or(choice((el_text, el_tags, paragraph)))
})
.repeated()
.collect()
.map_with(|t, e| (Page { data: t }, e.span()))
}

View file

@ -28,6 +28,10 @@ pub fn set_indent(input: &str, indent: usize) -> String {
prepend_indent(&trim_indent(input), indent)
}
pub fn set_spaces(input: &str, spaces: usize) -> String {
prepend_spaces(&trim_indent(input), spaces)
}
fn trim_unused<'a>(s: &'a str) -> &'a str {
let mut trim_start = 0;
let mut been_newlines = false;
@ -50,7 +54,11 @@ fn trim_unused<'a>(s: &'a str) -> &'a str {
}
pub fn prepend_indent(input: &str, indent: usize) -> String {
let indent = &" ".repeat(indent);
prepend_spaces(input, indent * 4)
}
fn prepend_spaces(input: &str, spaces: usize) -> String {
let indent = &" ".repeat(spaces);
let lines: Vec<String> = input
.lines()
.map(|line| format!("{}{}", indent, line))