Allow to use proc_macro tokens

And report errors
This commit is contained in:
Olivier Goffart 2020-05-11 22:24:28 +02:00
parent dc33cbbcda
commit b9cd725f78
9 changed files with 198 additions and 32 deletions

View file

@ -10,4 +10,5 @@ path = "lib.rs"
[dependencies]
quote = "1.0"
sixtyfps_compiler = { path = "../../../sixtyfps_compiler", features = ["proc_macro_span"] }

View file

@ -1,9 +1,104 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use sixtyfps_compiler::*;
fn fill_token_vec(stream: TokenStream, vec: &mut Vec<parser::Token>) {
for t in stream {
use parser::SyntaxKind;
use proc_macro::TokenTree;
match t {
TokenTree::Ident(i) => {
vec.push(parser::Token {
kind: SyntaxKind::Identifier,
text: i.to_string().into(),
span: Some(i.span()),
..Default::default()
});
}
TokenTree::Punct(p) => {
let kind = match p.as_char() {
':' => SyntaxKind::Colon,
'=' => SyntaxKind::Equal,
';' => SyntaxKind::Semicolon,
_ => SyntaxKind::Error,
};
vec.push(parser::Token {
kind,
text: p.to_string().into(),
span: Some(p.span()),
..Default::default()
});
}
TokenTree::Literal(l) => {
let s = l.to_string();
// Why can't the rust API give me the type of the literal
let f = s.chars().next().unwrap();
let kind = if f == '"' {
SyntaxKind::StringLiteral
} else if f.is_digit(10) {
SyntaxKind::NumberLiteral
} else {
SyntaxKind::Error
};
vec.push(parser::Token {
kind,
text: s.into(),
span: Some(l.span()),
..Default::default()
});
}
TokenTree::Group(g) => {
use proc_macro::Delimiter::*;
use SyntaxKind::*;
let (l, r, sl, sr) = match g.delimiter() {
Parenthesis => (LParent, RParent, "(", ")"),
Brace => (LBrace, RBrace, "{", "}"),
Bracket => todo!(),
None => todo!(),
};
vec.push(parser::Token {
kind: l,
text: sl.into(),
span: Some(g.span()), // span_open is not stable
..Default::default()
});
fill_token_vec(g.stream(), vec);
vec.push(parser::Token {
kind: r,
text: sr.into(),
span: Some(g.span()), // span_clone is not stable
..Default::default()
});
}
}
}
}
#[proc_macro]
pub fn sixtyfps(_item: TokenStream) -> TokenStream {
pub fn sixtyfps(stream: TokenStream) -> TokenStream {
let mut tokens = vec![];
fill_token_vec(stream, &mut tokens);
let (syntax_node, mut diag) = parser::parse_tokens(tokens);
//println!("{:#?}", syntax_node);
let tr = typeregister::TypeRegister::builtin();
let tree = object_tree::Document::from_node(syntax_node, &mut diag, &tr);
//println!("{:#?}", tree);
if !diag.inner.is_empty() {
let diags: Vec<_> = diag
.into_iter()
.map(|diagnostics::CompilerDiagnostic { message, span }| {
quote::quote_spanned!(span.span.unwrap().into() => compile_error!{ #message })
})
.collect();
return quote!(#(#diags)*).into();
}
let l = lower::LoweredComponent::lower(&*tree.root_component);
generator::generate(&l);
quote!(
#[derive(Default)]
struct SuperSimple;

View file

@ -24,5 +24,5 @@ sixtyfps::sixtyfps! {
}
fn main() {
SuperSimple.run();
SuperSimple::default().run();
}

View file

@ -10,6 +10,9 @@ path = "lib.rs"
[features]
# cpp: code generator for C++ code
cpp = []
# support for proc_macro spans in the token
proc_macro_span = []
[dependencies]
m_lexer = "0.0.4" # simple lexer for now

View file

@ -1,7 +1,33 @@
#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Default)]
pub struct Span {
pub offset: usize,
#[cfg(feature = "proc_macro_span")]
pub span: Option<proc_macro::Span>,
}
impl PartialEq for Span {
fn eq(&self, other: &Span) -> bool {
self.offset == other.offset
}
}
impl Span {
pub fn new(offset: usize) -> Self {
Self { offset, ..Default::default() }
}
}
#[cfg(feature = "proc_macro_span")]
impl From<proc_macro::Span> for Span {
fn from(span: proc_macro::Span) -> Self {
Self { span: Some(span), ..Default::default() }
}
}
#[derive(Default, Debug, Clone, PartialEq)]
pub struct CompilerDiagnostic {
pub message: String,
pub offset: usize,
pub span: Span,
}
#[derive(Default, Debug)]
@ -10,11 +36,19 @@ pub struct Diagnostics {
}
impl Diagnostics {
pub fn push_error(&mut self, message: String, offset: usize) {
self.inner.push(CompilerDiagnostic { message, offset });
pub fn push_error(&mut self, message: String, span: Span) {
self.inner.push(CompilerDiagnostic { message, span });
}
pub fn has_error(&self) -> bool {
!self.inner.is_empty()
}
}
impl IntoIterator for Diagnostics {
type Item = CompilerDiagnostic;
type IntoIter = <Vec<CompilerDiagnostic> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.inner.into_iter()
}
}

View file

@ -10,6 +10,9 @@ source code -> parser -> object_tree -> lower -> generator
*/
#[cfg(feature = "proc_macro_span")]
extern crate proc_macro;
pub mod diagnostics;
pub mod generator;
pub mod lower;

View file

@ -76,7 +76,7 @@ impl Element {
r.base_type = if let Some(ty) = tr.lookup(&r.base) {
ty
} else {
diag.push_error(format!("Unkown type {}", r.base), node.text_range().start().into());
diag.push_error(format!("Unkown type {}", r.base), node.span());
return r;
};
for b in node.children().filter(|n| n.kind() == SyntaxKind::Binding) {
@ -88,14 +88,14 @@ impl Element {
if !r.base_type.properties.contains_key(&name) {
diag.push_error(
format!("Unkown property {} in {}", name, r.base),
name_token.text_range().start().into(),
crate::diagnostics::Span::new(name_token.text_range().start().into()),
);
}
if let Some(csn) = b.child_node(SyntaxKind::CodeStatement) {
if r.bindings.insert(name, CodeStatement::from_node(csn, diag)).is_some() {
diag.push_error(
"Duplicated property".into(),
name_token.text_range().start().into(),
crate::diagnostics::Span::new(name_token.text_range().start().into()),
);
}
}
@ -109,10 +109,7 @@ impl Element {
assert!(diag.has_error());
}
} else if se.kind() == SyntaxKind::RepeatedElement {
diag.push_error(
"TODO: for not implemented".to_owned(),
se.text_range().start().into(),
)
diag.push_error("TODO: for not implemented".to_owned(), se.span())
}
}
r

View file

@ -9,6 +9,8 @@ This module has different sub modules with the actual parser functions
*/
use crate::diagnostics::Diagnostics;
pub use rowan::SmolStr;
use std::convert::TryFrom;
mod document;
@ -20,8 +22,6 @@ mod prelude {
pub use parser_test_macro::parser_test;
}
use crate::diagnostics::Diagnostics;
/// This macro is invoked once, to declare all the token and syntax kind.
/// The purpose of this macro is to declare the token with its regexp at the same place
macro_rules! declare_token_kind {
@ -81,9 +81,23 @@ impl From<SyntaxKind> for rowan::SyntaxKind {
#[derive(Clone, Debug)]
pub struct Token {
kind: SyntaxKind,
text: rowan::SmolStr,
offset: usize,
pub kind: SyntaxKind,
pub text: SmolStr,
pub offset: usize,
#[cfg(feature = "proc_macro_span")]
pub span: Option<proc_macro::Span>,
}
impl Default for Token {
fn default() -> Self {
Token {
kind: SyntaxKind::Eof,
text: Default::default(),
offset: 0,
#[cfg(feature = "proc_macro_span")]
span: None,
}
}
}
impl Token {
@ -99,6 +113,12 @@ pub struct Parser {
diags: Diagnostics,
}
impl From<Vec<Token>> for Parser {
fn from(tokens: Vec<Token>) -> Self {
Self { builder: Default::default(), tokens, cursor: 0, diags: Default::default() }
}
}
/// The return value of `Parser::start_node`. This borrows the parser
/// and finishes the node on Drop
#[derive(derive_more::Deref, derive_more::DerefMut)]
@ -120,16 +140,16 @@ impl Parser {
let s: rowan::SmolStr = source[*start_offset..*start_offset + t.len].into();
let offset = *start_offset;
*start_offset += t.len;
Some(Token { kind: SyntaxKind::try_from(t.kind.0).unwrap(), text: s, offset })
Some(Token {
kind: SyntaxKind::try_from(t.kind.0).unwrap(),
text: s,
offset,
..Default::default()
})
})
.collect()
}
Self {
builder: Default::default(),
tokens: lex(source),
cursor: 0,
diags: Default::default(),
}
Self::from(lex(source))
}
/// Enter a new node. The node is going to be finished when
@ -140,11 +160,7 @@ impl Parser {
}
fn current_token(&self) -> Token {
self.tokens.get(self.cursor).cloned().unwrap_or(Token {
kind: SyntaxKind::Eof,
text: Default::default(),
offset: 0,
})
self.tokens.get(self.cursor).cloned().unwrap_or_default()
}
pub fn peek(&mut self) -> Token {
@ -199,7 +215,13 @@ impl Parser {
/// Reports an error at the current token location
pub fn error(&mut self, e: impl Into<String>) {
self.diags.push_error(e.into(), self.current_token().offset);
let current_token = self.current_token();
let mut span = crate::diagnostics::Span::new(current_token.offset);
#[cfg(feature = "proc_macro_span")]
{
span.span = current_token.span;
}
self.diags.push_error(e.into(), span);
}
/// consume everyting until reaching a token of this kind
@ -233,6 +255,7 @@ pub trait SyntaxNodeEx {
fn child_node(&self, kind: SyntaxKind) -> Option<SyntaxNode>;
fn child_token(&self, kind: SyntaxKind) -> Option<SyntaxToken>;
fn child_text(&self, kind: SyntaxKind) -> Option<String>;
fn span(&self) -> crate::diagnostics::Span;
}
impl SyntaxNodeEx for SyntaxNode {
@ -247,6 +270,10 @@ impl SyntaxNodeEx for SyntaxNode {
.find(|n| n.kind() == kind)
.and_then(|x| x.as_token().map(|x| x.text().to_string()))
}
fn span(&self) -> crate::diagnostics::Span {
// FIXME! this does not work with proc_macro span
crate::diagnostics::Span::new(self.text_range().start().into())
}
}
// Actual parser
@ -255,3 +282,9 @@ pub fn parse(source: &str) -> (SyntaxNode, Diagnostics) {
document::parse_document(&mut p);
(SyntaxNode::new_root(p.builder.finish()), p.diags)
}
pub fn parse_tokens(tokens: Vec<Token>) -> (SyntaxNode, Diagnostics) {
let mut p = Parser::from(tokens);
document::parse_document(&mut p);
(SyntaxNode::new_root(p.builder.finish()), p.diags)
}

View file

@ -55,7 +55,7 @@ fn process_file(path: &std::path::Path) -> std::io::Result<bool> {
};
let offset = source[..line_begin_offset].rfind('\n').unwrap_or(0) + column;
match diag.inner.iter().position(|e| e.offset == offset && r.is_match(&e.message)) {
match diag.inner.iter().position(|e| e.span.offset == offset && r.is_match(&e.message)) {
Some(idx) => {
diag.inner.remove(idx);
}