diff --git a/api/sixtyfps-rs/sixtyfps-rs-macro/Cargo.toml b/api/sixtyfps-rs/sixtyfps-rs-macro/Cargo.toml index aa9b0a6f5..495983d40 100644 --- a/api/sixtyfps-rs/sixtyfps-rs-macro/Cargo.toml +++ b/api/sixtyfps-rs/sixtyfps-rs-macro/Cargo.toml @@ -10,4 +10,5 @@ path = "lib.rs" [dependencies] quote = "1.0" +sixtyfps_compiler = { path = "../../../sixtyfps_compiler", features = ["proc_macro_span"] } diff --git a/api/sixtyfps-rs/sixtyfps-rs-macro/lib.rs b/api/sixtyfps-rs/sixtyfps-rs-macro/lib.rs index 88a9022a7..86c2beeba 100644 --- a/api/sixtyfps-rs/sixtyfps-rs-macro/lib.rs +++ b/api/sixtyfps-rs/sixtyfps-rs-macro/lib.rs @@ -1,9 +1,104 @@ extern crate proc_macro; use proc_macro::TokenStream; use quote::quote; +use sixtyfps_compiler::*; + +fn fill_token_vec(stream: TokenStream, vec: &mut Vec) { + for t in stream { + use parser::SyntaxKind; + use proc_macro::TokenTree; + + match t { + TokenTree::Ident(i) => { + vec.push(parser::Token { + kind: SyntaxKind::Identifier, + text: i.to_string().into(), + span: Some(i.span()), + ..Default::default() + }); + } + TokenTree::Punct(p) => { + let kind = match p.as_char() { + ':' => SyntaxKind::Colon, + '=' => SyntaxKind::Equal, + ';' => SyntaxKind::Semicolon, + _ => SyntaxKind::Error, + }; + vec.push(parser::Token { + kind, + text: p.to_string().into(), + span: Some(p.span()), + ..Default::default() + }); + } + TokenTree::Literal(l) => { + let s = l.to_string(); + // Why can't the rust API give me the type of the literal + let f = s.chars().next().unwrap(); + let kind = if f == '"' { + SyntaxKind::StringLiteral + } else if f.is_digit(10) { + SyntaxKind::NumberLiteral + } else { + SyntaxKind::Error + }; + vec.push(parser::Token { + kind, + text: s.into(), + span: Some(l.span()), + ..Default::default() + }); + } + TokenTree::Group(g) => { + use proc_macro::Delimiter::*; + use SyntaxKind::*; + let (l, r, sl, sr) = match g.delimiter() { + Parenthesis => (LParent, RParent, "(", ")"), + Brace => (LBrace, RBrace, "{", "}"), + Bracket => todo!(), + None => todo!(), + }; + vec.push(parser::Token { + kind: l, + text: sl.into(), + span: Some(g.span()), // span_open is not stable + ..Default::default() + }); + fill_token_vec(g.stream(), vec); + vec.push(parser::Token { + kind: r, + text: sr.into(), + span: Some(g.span()), // span_clone is not stable + ..Default::default() + }); + } + } + } +} #[proc_macro] -pub fn sixtyfps(_item: TokenStream) -> TokenStream { +pub fn sixtyfps(stream: TokenStream) -> TokenStream { + let mut tokens = vec![]; + fill_token_vec(stream, &mut tokens); + + let (syntax_node, mut diag) = parser::parse_tokens(tokens); + //println!("{:#?}", syntax_node); + let tr = typeregister::TypeRegister::builtin(); + let tree = object_tree::Document::from_node(syntax_node, &mut diag, &tr); + //println!("{:#?}", tree); + if !diag.inner.is_empty() { + let diags: Vec<_> = diag + .into_iter() + .map(|diagnostics::CompilerDiagnostic { message, span }| { + quote::quote_spanned!(span.span.unwrap().into() => compile_error!{ #message }) + }) + .collect(); + return quote!(#(#diags)*).into(); + } + + let l = lower::LoweredComponent::lower(&*tree.root_component); + generator::generate(&l); + quote!( #[derive(Default)] struct SuperSimple; diff --git a/examples/rusttest/src/main.rs b/examples/rusttest/src/main.rs index 2bddd176b..b33f2f7b1 100644 --- a/examples/rusttest/src/main.rs +++ b/examples/rusttest/src/main.rs @@ -24,5 +24,5 @@ sixtyfps::sixtyfps! { } fn main() { - SuperSimple.run(); + SuperSimple::default().run(); } diff --git a/sixtyfps_compiler/Cargo.toml b/sixtyfps_compiler/Cargo.toml index a3685e13e..4c7f5adb5 100644 --- a/sixtyfps_compiler/Cargo.toml +++ b/sixtyfps_compiler/Cargo.toml @@ -10,6 +10,9 @@ path = "lib.rs" [features] # cpp: code generator for C++ code cpp = [] +# support for proc_macro spans in the token +proc_macro_span = [] + [dependencies] m_lexer = "0.0.4" # simple lexer for now diff --git a/sixtyfps_compiler/diagnostics.rs b/sixtyfps_compiler/diagnostics.rs index 96a154b54..269b76db3 100644 --- a/sixtyfps_compiler/diagnostics.rs +++ b/sixtyfps_compiler/diagnostics.rs @@ -1,7 +1,33 @@ -#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Default)] +pub struct Span { + pub offset: usize, + #[cfg(feature = "proc_macro_span")] + pub span: Option, +} + +impl PartialEq for Span { + fn eq(&self, other: &Span) -> bool { + self.offset == other.offset + } +} + +impl Span { + pub fn new(offset: usize) -> Self { + Self { offset, ..Default::default() } + } +} + +#[cfg(feature = "proc_macro_span")] +impl From for Span { + fn from(span: proc_macro::Span) -> Self { + Self { span: Some(span), ..Default::default() } + } +} + +#[derive(Default, Debug, Clone, PartialEq)] pub struct CompilerDiagnostic { pub message: String, - pub offset: usize, + pub span: Span, } #[derive(Default, Debug)] @@ -10,11 +36,19 @@ pub struct Diagnostics { } impl Diagnostics { - pub fn push_error(&mut self, message: String, offset: usize) { - self.inner.push(CompilerDiagnostic { message, offset }); + pub fn push_error(&mut self, message: String, span: Span) { + self.inner.push(CompilerDiagnostic { message, span }); } pub fn has_error(&self) -> bool { !self.inner.is_empty() } } + +impl IntoIterator for Diagnostics { + type Item = CompilerDiagnostic; + type IntoIter = as IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.inner.into_iter() + } +} diff --git a/sixtyfps_compiler/lib.rs b/sixtyfps_compiler/lib.rs index 4403f8c88..601dbad68 100644 --- a/sixtyfps_compiler/lib.rs +++ b/sixtyfps_compiler/lib.rs @@ -10,6 +10,9 @@ source code -> parser -> object_tree -> lower -> generator */ +#[cfg(feature = "proc_macro_span")] +extern crate proc_macro; + pub mod diagnostics; pub mod generator; pub mod lower; diff --git a/sixtyfps_compiler/object_tree.rs b/sixtyfps_compiler/object_tree.rs index 1054998fd..7b5f1f294 100644 --- a/sixtyfps_compiler/object_tree.rs +++ b/sixtyfps_compiler/object_tree.rs @@ -76,7 +76,7 @@ impl Element { r.base_type = if let Some(ty) = tr.lookup(&r.base) { ty } else { - diag.push_error(format!("Unkown type {}", r.base), node.text_range().start().into()); + diag.push_error(format!("Unkown type {}", r.base), node.span()); return r; }; for b in node.children().filter(|n| n.kind() == SyntaxKind::Binding) { @@ -88,14 +88,14 @@ impl Element { if !r.base_type.properties.contains_key(&name) { diag.push_error( format!("Unkown property {} in {}", name, r.base), - name_token.text_range().start().into(), + crate::diagnostics::Span::new(name_token.text_range().start().into()), ); } if let Some(csn) = b.child_node(SyntaxKind::CodeStatement) { if r.bindings.insert(name, CodeStatement::from_node(csn, diag)).is_some() { diag.push_error( "Duplicated property".into(), - name_token.text_range().start().into(), + crate::diagnostics::Span::new(name_token.text_range().start().into()), ); } } @@ -109,10 +109,7 @@ impl Element { assert!(diag.has_error()); } } else if se.kind() == SyntaxKind::RepeatedElement { - diag.push_error( - "TODO: for not implemented".to_owned(), - se.text_range().start().into(), - ) + diag.push_error("TODO: for not implemented".to_owned(), se.span()) } } r diff --git a/sixtyfps_compiler/parser.rs b/sixtyfps_compiler/parser.rs index 0c0cae424..87ee8a09c 100644 --- a/sixtyfps_compiler/parser.rs +++ b/sixtyfps_compiler/parser.rs @@ -9,6 +9,8 @@ This module has different sub modules with the actual parser functions */ +use crate::diagnostics::Diagnostics; +pub use rowan::SmolStr; use std::convert::TryFrom; mod document; @@ -20,8 +22,6 @@ mod prelude { pub use parser_test_macro::parser_test; } -use crate::diagnostics::Diagnostics; - /// This macro is invoked once, to declare all the token and syntax kind. /// The purpose of this macro is to declare the token with its regexp at the same place macro_rules! declare_token_kind { @@ -81,9 +81,23 @@ impl From for rowan::SyntaxKind { #[derive(Clone, Debug)] pub struct Token { - kind: SyntaxKind, - text: rowan::SmolStr, - offset: usize, + pub kind: SyntaxKind, + pub text: SmolStr, + pub offset: usize, + #[cfg(feature = "proc_macro_span")] + pub span: Option, +} + +impl Default for Token { + fn default() -> Self { + Token { + kind: SyntaxKind::Eof, + text: Default::default(), + offset: 0, + #[cfg(feature = "proc_macro_span")] + span: None, + } + } } impl Token { @@ -99,6 +113,12 @@ pub struct Parser { diags: Diagnostics, } +impl From> for Parser { + fn from(tokens: Vec) -> Self { + Self { builder: Default::default(), tokens, cursor: 0, diags: Default::default() } + } +} + /// The return value of `Parser::start_node`. This borrows the parser /// and finishes the node on Drop #[derive(derive_more::Deref, derive_more::DerefMut)] @@ -120,16 +140,16 @@ impl Parser { let s: rowan::SmolStr = source[*start_offset..*start_offset + t.len].into(); let offset = *start_offset; *start_offset += t.len; - Some(Token { kind: SyntaxKind::try_from(t.kind.0).unwrap(), text: s, offset }) + Some(Token { + kind: SyntaxKind::try_from(t.kind.0).unwrap(), + text: s, + offset, + ..Default::default() + }) }) .collect() } - Self { - builder: Default::default(), - tokens: lex(source), - cursor: 0, - diags: Default::default(), - } + Self::from(lex(source)) } /// Enter a new node. The node is going to be finished when @@ -140,11 +160,7 @@ impl Parser { } fn current_token(&self) -> Token { - self.tokens.get(self.cursor).cloned().unwrap_or(Token { - kind: SyntaxKind::Eof, - text: Default::default(), - offset: 0, - }) + self.tokens.get(self.cursor).cloned().unwrap_or_default() } pub fn peek(&mut self) -> Token { @@ -199,7 +215,13 @@ impl Parser { /// Reports an error at the current token location pub fn error(&mut self, e: impl Into) { - self.diags.push_error(e.into(), self.current_token().offset); + let current_token = self.current_token(); + let mut span = crate::diagnostics::Span::new(current_token.offset); + #[cfg(feature = "proc_macro_span")] + { + span.span = current_token.span; + } + self.diags.push_error(e.into(), span); } /// consume everyting until reaching a token of this kind @@ -233,6 +255,7 @@ pub trait SyntaxNodeEx { fn child_node(&self, kind: SyntaxKind) -> Option; fn child_token(&self, kind: SyntaxKind) -> Option; fn child_text(&self, kind: SyntaxKind) -> Option; + fn span(&self) -> crate::diagnostics::Span; } impl SyntaxNodeEx for SyntaxNode { @@ -247,6 +270,10 @@ impl SyntaxNodeEx for SyntaxNode { .find(|n| n.kind() == kind) .and_then(|x| x.as_token().map(|x| x.text().to_string())) } + fn span(&self) -> crate::diagnostics::Span { + // FIXME! this does not work with proc_macro span + crate::diagnostics::Span::new(self.text_range().start().into()) + } } // Actual parser @@ -255,3 +282,9 @@ pub fn parse(source: &str) -> (SyntaxNode, Diagnostics) { document::parse_document(&mut p); (SyntaxNode::new_root(p.builder.finish()), p.diags) } + +pub fn parse_tokens(tokens: Vec) -> (SyntaxNode, Diagnostics) { + let mut p = Parser::from(tokens); + document::parse_document(&mut p); + (SyntaxNode::new_root(p.builder.finish()), p.diags) +} diff --git a/sixtyfps_compiler/tests/syntax_tests.rs b/sixtyfps_compiler/tests/syntax_tests.rs index 4128dfdea..5719356aa 100644 --- a/sixtyfps_compiler/tests/syntax_tests.rs +++ b/sixtyfps_compiler/tests/syntax_tests.rs @@ -55,7 +55,7 @@ fn process_file(path: &std::path::Path) -> std::io::Result { }; let offset = source[..line_begin_offset].rfind('\n').unwrap_or(0) + column; - match diag.inner.iter().position(|e| e.offset == offset && r.is_match(&e.message)) { + match diag.inner.iter().position(|e| e.span.offset == offset && r.is_match(&e.message)) { Some(idx) => { diag.inner.remove(idx); }