mirror of
https://github.com/erg-lang/erg.git
synced 2025-08-04 10:49:54 +00:00
Merge branch 'main' into fix-match
This commit is contained in:
commit
0f96ee945d
19 changed files with 359 additions and 122 deletions
12
Cargo.lock
generated
12
Cargo.lock
generated
|
@ -97,7 +97,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "els"
|
||||
version = "0.1.33-nightly.4"
|
||||
version = "0.1.33"
|
||||
dependencies = [
|
||||
"erg_common",
|
||||
"erg_compiler",
|
||||
|
@ -110,7 +110,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "erg"
|
||||
version = "0.6.21-nightly.4"
|
||||
version = "0.6.21"
|
||||
dependencies = [
|
||||
"els",
|
||||
"erg_common",
|
||||
|
@ -120,7 +120,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "erg_common"
|
||||
version = "0.6.21-nightly.4"
|
||||
version = "0.6.21"
|
||||
dependencies = [
|
||||
"backtrace-on-stack-overflow",
|
||||
"crossterm",
|
||||
|
@ -130,7 +130,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "erg_compiler"
|
||||
version = "0.6.21-nightly.4"
|
||||
version = "0.6.21"
|
||||
dependencies = [
|
||||
"erg_common",
|
||||
"erg_parser",
|
||||
|
@ -138,7 +138,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "erg_parser"
|
||||
version = "0.6.21-nightly.4"
|
||||
version = "0.6.21"
|
||||
dependencies = [
|
||||
"erg_common",
|
||||
"unicode-xid",
|
||||
|
@ -146,7 +146,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "erg_proc_macros"
|
||||
version = "0.6.21-nightly.4"
|
||||
version = "0.6.21"
|
||||
dependencies = [
|
||||
"erg_common",
|
||||
"quote",
|
||||
|
|
12
Cargo.toml
12
Cargo.toml
|
@ -21,7 +21,7 @@ members = [
|
|||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.6.21-nightly.4"
|
||||
version = "0.6.21"
|
||||
authors = ["erg-lang team <moderation.erglang@gmail.com>"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
|
@ -65,11 +65,11 @@ full = ["els", "full-repl", "unicode", "pretty"]
|
|||
experimental = ["erg_common/experimental", "erg_parser/experimental", "erg_compiler/experimental"]
|
||||
|
||||
[workspace.dependencies]
|
||||
erg_common = { version = "0.6.21-nightly.4", path = "./crates/erg_common" }
|
||||
erg_parser = { version = "0.6.21-nightly.4", path = "./crates/erg_parser" }
|
||||
erg_compiler = { version = "0.6.21-nightly.4", path = "./crates/erg_compiler" }
|
||||
els = { version = "0.1.33-nightly.4", path = "./crates/els" }
|
||||
erg_proc_macros = { version = "0.6.21-nightly.4", path = "./crates/erg_proc_macros" }
|
||||
erg_common = { version = "0.6.21", path = "./crates/erg_common" }
|
||||
erg_parser = { version = "0.6.21", path = "./crates/erg_parser" }
|
||||
erg_compiler = { version = "0.6.21", path = "./crates/erg_compiler" }
|
||||
els = { version = "0.1.33", path = "./crates/els" }
|
||||
erg_proc_macros = { version = "0.6.21", path = "./crates/erg_proc_macros" }
|
||||
|
||||
[dependencies]
|
||||
erg_common = { workspace = true }
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
name = "els"
|
||||
description = "An Erg compiler frontend for IDEs, implements LSP."
|
||||
documentation = "http://docs.rs/els"
|
||||
version = "0.1.33-nightly.4"
|
||||
version = "0.1.33"
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::str::FromStr;
|
|||
use erg_compiler::artifact::BuildRunnable;
|
||||
use erg_compiler::erg_parser::parse::Parsable;
|
||||
|
||||
use erg_compiler::hir::Def;
|
||||
use erg_compiler::hir::{Accessor, Array, Def, Dict, Expr, KeyValue, Set, Tuple};
|
||||
use erg_compiler::varinfo::{AbsLocation, VarInfo};
|
||||
use lsp_types::{
|
||||
CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
|
||||
|
@ -78,7 +78,114 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
|||
params: CallHierarchyOutgoingCallsParams,
|
||||
) -> ELSResult<Option<Vec<CallHierarchyOutgoingCall>>> {
|
||||
_log!(self, "call hierarchy outgoing calls requested: {params:?}");
|
||||
Ok(None)
|
||||
let Some(data) = params.item.data.as_ref().and_then(|d| d.as_str()) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Ok(loc) = AbsLocation::from_str(data) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(module) = loc.module else {
|
||||
return Ok(None);
|
||||
};
|
||||
let uri = NormalizedUrl::from_file_path(module)?;
|
||||
let Some(pos) = loc_to_pos(loc.loc) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let mut calls = vec![];
|
||||
if let Some(def) = self.get_min::<Def>(&uri, pos) {
|
||||
for chunk in def.body.block.iter() {
|
||||
calls.extend(self.gen_outgoing_call(chunk));
|
||||
}
|
||||
}
|
||||
Ok(Some(calls))
|
||||
}
|
||||
|
||||
/// Indirect calls are excluded. For example, calls in an anonymous function.
|
||||
#[allow(clippy::only_used_in_recursion)]
|
||||
fn gen_outgoing_call(&self, expr: &Expr) -> Vec<CallHierarchyOutgoingCall> {
|
||||
let mut calls = vec![];
|
||||
match expr {
|
||||
Expr::Call(call) => {
|
||||
for arg in call.args.pos_args.iter() {
|
||||
calls.extend(self.gen_outgoing_call(&arg.expr));
|
||||
}
|
||||
if let Some(var) = call.args.var_args.as_ref() {
|
||||
calls.extend(self.gen_outgoing_call(&var.expr));
|
||||
}
|
||||
for arg in call.args.kw_args.iter() {
|
||||
calls.extend(self.gen_outgoing_call(&arg.expr));
|
||||
}
|
||||
if let Some(attr) = call.attr_name.as_ref() {
|
||||
let Some(to) = hierarchy_item(attr.inspect().to_string(), &attr.vi) else {
|
||||
return calls;
|
||||
};
|
||||
let call = CallHierarchyOutgoingCall {
|
||||
to,
|
||||
from_ranges: vec![],
|
||||
};
|
||||
calls.push(call);
|
||||
} else if let Expr::Accessor(acc) = call.obj.as_ref() {
|
||||
let Some(to) = hierarchy_item(acc.last_name().to_string(), acc.var_info())
|
||||
else {
|
||||
return calls;
|
||||
};
|
||||
let call = CallHierarchyOutgoingCall {
|
||||
to,
|
||||
from_ranges: vec![],
|
||||
};
|
||||
calls.push(call);
|
||||
}
|
||||
calls
|
||||
}
|
||||
Expr::TypeAsc(tasc) => self.gen_outgoing_call(&tasc.expr),
|
||||
Expr::Accessor(Accessor::Attr(attr)) => self.gen_outgoing_call(&attr.obj),
|
||||
Expr::BinOp(binop) => {
|
||||
calls.extend(self.gen_outgoing_call(&binop.lhs));
|
||||
calls.extend(self.gen_outgoing_call(&binop.rhs));
|
||||
calls
|
||||
}
|
||||
Expr::UnaryOp(unop) => self.gen_outgoing_call(&unop.expr),
|
||||
Expr::Array(Array::Normal(arr)) => {
|
||||
for arg in arr.elems.pos_args.iter() {
|
||||
calls.extend(self.gen_outgoing_call(&arg.expr));
|
||||
}
|
||||
calls
|
||||
}
|
||||
Expr::Dict(Dict::Normal(dict)) => {
|
||||
for KeyValue { key, value } in dict.kvs.iter() {
|
||||
calls.extend(self.gen_outgoing_call(key));
|
||||
calls.extend(self.gen_outgoing_call(value));
|
||||
}
|
||||
calls
|
||||
}
|
||||
Expr::Set(Set::Normal(set)) => {
|
||||
for arg in set.elems.pos_args.iter() {
|
||||
calls.extend(self.gen_outgoing_call(&arg.expr));
|
||||
}
|
||||
calls
|
||||
}
|
||||
Expr::Tuple(Tuple::Normal(tuple)) => {
|
||||
for arg in tuple.elems.pos_args.iter() {
|
||||
calls.extend(self.gen_outgoing_call(&arg.expr));
|
||||
}
|
||||
calls
|
||||
}
|
||||
Expr::Record(rec) => {
|
||||
for attr in rec.attrs.iter() {
|
||||
for chunk in attr.body.block.iter() {
|
||||
calls.extend(self.gen_outgoing_call(chunk));
|
||||
}
|
||||
}
|
||||
calls
|
||||
}
|
||||
Expr::Def(def) if !def.sig.is_subr() => {
|
||||
for chunk in def.body.block.iter() {
|
||||
calls.extend(self.gen_outgoing_call(chunk));
|
||||
}
|
||||
calls
|
||||
}
|
||||
_ => calls,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn handle_call_hierarchy_prepare(
|
||||
|
|
|
@ -14,6 +14,7 @@ use erg_common::traits::Stream;
|
|||
use erg_common::{fn_name, lsp_log};
|
||||
use erg_compiler::artifact::BuildRunnable;
|
||||
use erg_compiler::erg_parser::ast::Module;
|
||||
use erg_compiler::erg_parser::error::IncompleteArtifact;
|
||||
use erg_compiler::erg_parser::parse::Parsable;
|
||||
use erg_compiler::error::CompileErrors;
|
||||
|
||||
|
@ -31,29 +32,56 @@ use crate::server::{DefaultFeatures, ELSResult, RedirectableStdout, Server};
|
|||
use crate::server::{ASK_AUTO_SAVE_ID, HEALTH_CHECKER_ID};
|
||||
use crate::util::{self, project_root_of, NormalizedUrl};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum BuildASTError {
|
||||
NoFile,
|
||||
ParseError(IncompleteArtifact),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ChangeKind {
|
||||
New,
|
||||
NoChange,
|
||||
Valid,
|
||||
Invalid,
|
||||
}
|
||||
|
||||
impl ChangeKind {
|
||||
pub const fn is_no_change(&self) -> bool {
|
||||
matches!(self, Self::NoChange)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||
pub(crate) fn build_ast(&self, uri: &NormalizedUrl) -> Option<Module> {
|
||||
let code = self.file_cache.get_entire_code(uri).ok()?;
|
||||
Parser::parse(code).ok().map(|artifact| artifact.ast)
|
||||
pub(crate) fn build_ast(&self, uri: &NormalizedUrl) -> Result<Module, BuildASTError> {
|
||||
let code = self
|
||||
.file_cache
|
||||
.get_entire_code(uri)
|
||||
.map_err(|_| BuildASTError::NoFile)?;
|
||||
Parser::parse(code)
|
||||
.map(|artifact| artifact.ast)
|
||||
.map_err(BuildASTError::ParseError)
|
||||
}
|
||||
|
||||
pub(crate) fn any_changes(&self, uri: &NormalizedUrl) -> bool {
|
||||
pub(crate) fn change_kind(&self, uri: &NormalizedUrl) -> ChangeKind {
|
||||
let deps = self.dependencies_of(uri);
|
||||
if deps.is_empty() {
|
||||
return true;
|
||||
return ChangeKind::New;
|
||||
}
|
||||
for dep in deps {
|
||||
let Some(old) = self.get_ast(&dep) else {
|
||||
return true;
|
||||
return ChangeKind::Invalid;
|
||||
};
|
||||
if let Some(new) = self.build_ast(&dep) {
|
||||
if let Ok(new) = self.build_ast(&dep) {
|
||||
if !ASTDiff::diff(old, &new).is_nop() {
|
||||
return true;
|
||||
return ChangeKind::Valid;
|
||||
}
|
||||
} else {
|
||||
return ChangeKind::Invalid;
|
||||
}
|
||||
}
|
||||
_log!(self, "no changes: {uri}");
|
||||
false
|
||||
ChangeKind::NoChange
|
||||
}
|
||||
|
||||
pub(crate) fn recheck_file(
|
||||
|
@ -61,7 +89,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
|||
uri: NormalizedUrl,
|
||||
code: impl Into<String>,
|
||||
) -> ELSResult<()> {
|
||||
if !self.any_changes(&uri) {
|
||||
if self.change_kind(&uri).is_no_change() {
|
||||
_log!(self, "no changes: {uri}");
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -129,7 +157,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
|||
artifact
|
||||
}
|
||||
};
|
||||
let ast = self.build_ast(&uri);
|
||||
let ast = self.build_ast(&uri).ok();
|
||||
let ctx = checker.pop_context().unwrap();
|
||||
if mode == "declare" {
|
||||
self.shared
|
||||
|
@ -160,7 +188,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
|||
crate::_log!(self, "not found");
|
||||
return Ok(());
|
||||
};
|
||||
let Some(new) = self.build_ast(&uri) else {
|
||||
let Ok(new) = self.build_ast(&uri) else {
|
||||
crate::_log!(self, "not found");
|
||||
return Ok(());
|
||||
};
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::sync::mpsc::Sender;
|
||||
|
@ -63,6 +64,17 @@ impl RedirectableStdout for FileCache {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FileCache {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "FileCache {{")?;
|
||||
for (key, entry) in self.files.borrow().iter() {
|
||||
writeln!(f, "{key}: \"{}\"", entry.code)?;
|
||||
}
|
||||
writeln!(f, "}}")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FileCache {
|
||||
pub fn new(stdout_redirect: Option<Sender<Value>>) -> Self {
|
||||
Self {
|
||||
|
|
|
@ -34,7 +34,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
|||
|
||||
fn fold_imports(&self, uri: &NormalizedUrl) -> Vec<FoldingRange> {
|
||||
let mut res = vec![];
|
||||
if let Some(module) = self.build_ast(uri) {
|
||||
if let Ok(module) = self.build_ast(uri) {
|
||||
let mut ranges = vec![];
|
||||
for chunk in module.into_iter() {
|
||||
match chunk {
|
||||
|
|
1
crates/els/tests/invalid_syntax.er
Normal file
1
crates/els/tests/invalid_syntax.er
Normal file
|
@ -0,0 +1 @@
|
|||
print! 1 +
|
|
@ -8,6 +8,7 @@ const FILE_A: &str = "tests/a.er";
|
|||
const FILE_B: &str = "tests/b.er";
|
||||
const FILE_C: &str = "tests/c.er";
|
||||
const FILE_IMPORTS: &str = "tests/imports.er";
|
||||
const FILE_INVALID_SYNTAX: &str = "tests/invalid_syntax.er";
|
||||
|
||||
use els::{NormalizedUrl, Server};
|
||||
use erg_proc_macros::exec_new_thread;
|
||||
|
@ -262,3 +263,30 @@ fn test_dependents_check() -> Result<(), Box<dyn std::error::Error>> {
|
|||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fix_error() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut client = Server::bind_fake_client();
|
||||
client.request_initialize()?;
|
||||
client.notify_initialized()?;
|
||||
client.wait_messages(3)?;
|
||||
client.responses.clear();
|
||||
client.notify_open(FILE_INVALID_SYNTAX)?;
|
||||
client.wait_messages(6)?;
|
||||
let msg = client.responses.last().unwrap();
|
||||
let diags = PublishDiagnosticsParams::deserialize(&msg["params"])?;
|
||||
assert_eq!(diags.diagnostics.len(), 1);
|
||||
assert_eq!(
|
||||
diags.diagnostics[0].severity,
|
||||
Some(DiagnosticSeverity::ERROR)
|
||||
);
|
||||
client.responses.clear();
|
||||
let uri = NormalizedUrl::from_file_path(Path::new(FILE_INVALID_SYNTAX).canonicalize()?)?;
|
||||
client.notify_change(uri.clone().raw(), add_char(0, 10, " 1"))?;
|
||||
client.notify_save(uri.clone().raw())?;
|
||||
client.wait_messages(4)?;
|
||||
let msg = client.responses.last().unwrap();
|
||||
let diags = PublishDiagnosticsParams::deserialize(&msg["params"])?;
|
||||
assert_eq!(diags.diagnostics.len(), 0);
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -4,12 +4,12 @@
|
|||
|
||||
.is_dataclass: (obj: Obj) -> Bool
|
||||
|
||||
_MISSING_TYPE: ClassType
|
||||
.MISSING: _MISSING_TYPE
|
||||
._MISSING_TYPE: ClassType
|
||||
.MISSING: ._MISSING_TYPE
|
||||
|
||||
_KW_ONLY_TYPE: ClassType
|
||||
._KW_ONLY_TYPE: ClassType
|
||||
.KW_ONLY: _KW_ONLY_TYPE
|
||||
|
||||
.field: (default := _MISSING_TYPE, default_factory := _MISSING_TYPE, init := Bool, repr := Bool, hash := Bool, compare := Bool, metadata := GenericDict, kw_only := _MISSING_TYPE) -> (Obj -> Obj)
|
||||
.field: (default := ._MISSING_TYPE, default_factory := ._MISSING_TYPE, init := Bool, repr := Bool, hash := Bool, compare := Bool, metadata := GenericDict, kw_only := ._MISSING_TYPE) -> (Obj -> Obj)
|
||||
|
||||
.Field: ClassType
|
||||
|
|
|
@ -60,11 +60,11 @@ The name of the operating system dependent module imported. The following names
|
|||
# posix = pyimport "posix"
|
||||
# .uname!: () => posix.UnameResult
|
||||
.uname!: () => {
|
||||
sysname = Str
|
||||
nodename = Str
|
||||
release = Str
|
||||
version = Str
|
||||
machine = Str
|
||||
sysname = Str;
|
||||
nodename = Str;
|
||||
release = Str;
|
||||
version = Str;
|
||||
machine = Str;
|
||||
}
|
||||
|
||||
.getrandom!: (size: Nat) => Bytes
|
||||
|
|
|
@ -7,9 +7,9 @@ logging = pyimport "logging"
|
|||
.copy2!: (src: PathLike, dst: PathLike,) => NoneType
|
||||
.copytree!: (src: PathLike, dst: PathLike,) => NoneType
|
||||
.disk_usage!: (path: PathLike,) => NamedTuple {
|
||||
.total = Nat
|
||||
.used = Nat
|
||||
.free = Nat
|
||||
.total = Nat;
|
||||
.used = Nat;
|
||||
.free = Nat;
|
||||
}
|
||||
.get_archive_formats!: () => [(Str, Str); _]
|
||||
.get_unpack_formats!: () => [(Str, [Str; _], Str); _]
|
||||
|
|
|
@ -12,49 +12,49 @@ io = pyimport "io"
|
|||
.exec_prefix: Str
|
||||
.executable: Str
|
||||
.flags: NamedTuple {
|
||||
.debug = Nat
|
||||
.inspect = Nat
|
||||
.interactive = Nat
|
||||
.optimize = Nat
|
||||
.dont_write_bytecode = Nat
|
||||
.no_user_site = Nat
|
||||
.no_site = Nat
|
||||
.ignore_environment = Nat
|
||||
.verbose = Nat
|
||||
.bytes_warning = Nat
|
||||
.quiet = Nat
|
||||
.hash_randomization = Nat
|
||||
.isolated = Nat
|
||||
.dev_mode = Nat
|
||||
.utf8_mode = Nat
|
||||
.warn_default_encoding = Nat
|
||||
.safe_path = Bool
|
||||
.int_max_str_digits = Int
|
||||
.debug = Nat;
|
||||
.inspect = Nat;
|
||||
.interactive = Nat;
|
||||
.optimize = Nat;
|
||||
.dont_write_bytecode = Nat;
|
||||
.no_user_site = Nat;
|
||||
.no_site = Nat;
|
||||
.ignore_environment = Nat;
|
||||
.verbose = Nat;
|
||||
.bytes_warning = Nat;
|
||||
.quiet = Nat;
|
||||
.hash_randomization = Nat;
|
||||
.isolated = Nat;
|
||||
.dev_mode = Nat;
|
||||
.utf8_mode = Nat;
|
||||
.warn_default_encoding = Nat;
|
||||
.safe_path = Bool;
|
||||
.int_max_str_digits = Int;
|
||||
}
|
||||
.float_indo: NamedTuple {
|
||||
.max = Float
|
||||
.max_exp = Nat
|
||||
.max_10_exp = Nat
|
||||
.min = Float
|
||||
.min_exp = Int
|
||||
.min_10_exp = Int
|
||||
.dig = Nat
|
||||
.mant_dig = Nat
|
||||
.epsilon = Float
|
||||
.radix = Nat
|
||||
.rounds = Nat
|
||||
.max = Float;
|
||||
.max_exp = Nat;
|
||||
.max_10_exp = Nat;
|
||||
.min = Float;
|
||||
.min_exp = Int;
|
||||
.min_10_exp = Int;
|
||||
.dig = Nat;
|
||||
.mant_dig = Nat;
|
||||
.epsilon = Float;
|
||||
.radix = Nat;
|
||||
.rounds = Nat;
|
||||
}
|
||||
.float_repr_style: Str
|
||||
.hash_info: NamedTuple {
|
||||
.width = Nat
|
||||
.modulus = Int
|
||||
.inf = Int
|
||||
.nan = Int
|
||||
.imag = Int
|
||||
.algorithm = Str
|
||||
.hash_bits = Nat
|
||||
.seed_bits = Nat
|
||||
.cutoff = Int
|
||||
.width = Nat;
|
||||
.modulus = Int;
|
||||
.inf = Int;
|
||||
.nan = Int;
|
||||
.imag = Int;
|
||||
.algorithm = Str;
|
||||
.hash_bits = Nat;
|
||||
.seed_bits = Nat;
|
||||
.cutoff = Int;
|
||||
}
|
||||
.path: Array!(Str, _)
|
||||
'''
|
||||
|
@ -79,11 +79,11 @@ io = pyimport "io"
|
|||
.stdlib_module_names: [Str; _]
|
||||
.version: Str
|
||||
.version_info: NamedTuple {
|
||||
.major = Nat
|
||||
.minor = Nat
|
||||
.micro = Nat
|
||||
.releaselevel = Str
|
||||
.serial = Nat
|
||||
.major = Nat;
|
||||
.minor = Nat;
|
||||
.micro = Nat;
|
||||
.releaselevel = Str;
|
||||
.serial = Nat;
|
||||
}
|
||||
|
||||
.addaudithook!: (hook: (Str, *Obj) => NoneType) => NoneType
|
||||
|
|
|
@ -31,25 +31,25 @@ GtForOrd.
|
|||
`>`(self, other: Self): Bool = self.cmp(other) == Ordering.Greater
|
||||
|
||||
Add(R := Self) = Trait {
|
||||
.Output = Type
|
||||
.`_+_` = (self: Self, R) -> Self.Output
|
||||
.Output = Type;
|
||||
.`_+_` = (self: Self, R) -> Self.Output;
|
||||
}
|
||||
Sub(R := Self) = Trait {
|
||||
.Output = Type
|
||||
.`-` = (self: Self, R) -> Self.Output
|
||||
.Output = Type;
|
||||
.`_-_` = (self: Self, R) -> Self.Output;
|
||||
}
|
||||
Mul(R := Self) = Trait {
|
||||
.Output = Type
|
||||
.Output = Type;
|
||||
.`*` = (self: Self, R) -> Self.Output
|
||||
}
|
||||
Div(R := Self) = Trait {
|
||||
.Output = Type
|
||||
.Output = Type;
|
||||
.`/` = (self: Self, R) -> Self.Output or Panic
|
||||
}
|
||||
Num = Add and Sub and Mul
|
||||
|
||||
Seq T = Trait {
|
||||
.__len__ = (self: Ref(Self)) -> Nat
|
||||
.__len__ = (self: Ref(Self)) -> Nat;
|
||||
.get = (self: Ref(Self), Nat) -> T
|
||||
}
|
||||
|
||||
|
|
|
@ -167,6 +167,7 @@ pub struct Lexer /*<'a>*/ {
|
|||
str_cache: CacheSet<str>,
|
||||
chars: Vec<char>,
|
||||
indent_stack: Vec<usize>,
|
||||
enclosure_level: usize,
|
||||
/// indicates the position in the entire source code
|
||||
cursor: usize,
|
||||
/// to determine the type of operators, etc.
|
||||
|
@ -185,6 +186,7 @@ impl Lexer /*<'a>*/ {
|
|||
str_cache: CacheSet::new(),
|
||||
chars: normed.chars().collect::<Vec<char>>(),
|
||||
indent_stack: vec![],
|
||||
enclosure_level: 0,
|
||||
cursor: 0,
|
||||
prev_token: Token::new(TokenKind::BOF, "", 0, 0),
|
||||
lineno_token_starts: 0,
|
||||
|
@ -200,6 +202,7 @@ impl Lexer /*<'a>*/ {
|
|||
str_cache: CacheSet::new(),
|
||||
chars: escaped.chars().collect::<Vec<char>>(),
|
||||
indent_stack: vec![],
|
||||
enclosure_level: 0,
|
||||
cursor: 0,
|
||||
prev_token: Token::new(TokenKind::BOF, "", 0, 0),
|
||||
lineno_token_starts: 0,
|
||||
|
@ -457,12 +460,12 @@ impl Lexer /*<'a>*/ {
|
|||
let is_linebreak = self.peek_cur_ch() == Some('\n');
|
||||
let is_empty = is_space || is_linebreak;
|
||||
let is_toplevel = is_line_break_after && !is_empty;
|
||||
if is_toplevel {
|
||||
if is_toplevel && self.enclosure_level == 0 {
|
||||
let dedent = self.emit_token(Dedent, "");
|
||||
self.indent_stack.pop();
|
||||
self.col_token_starts = 0;
|
||||
return Some(Ok(dedent));
|
||||
} else if is_linebreak {
|
||||
} else if is_linebreak && self.enclosure_level == 0 {
|
||||
self.consume();
|
||||
let token = self.emit_token(Newline, "\n");
|
||||
self.lineno_token_starts += 1;
|
||||
|
@ -1197,12 +1200,28 @@ impl Iterator for Lexer /*<'a>*/ {
|
|||
}
|
||||
}
|
||||
match self.consume() {
|
||||
Some('(') => self.accept(LParen, "("),
|
||||
Some(')') => self.accept(RParen, ")"),
|
||||
Some('[') => self.accept(LSqBr, "["),
|
||||
Some(']') => self.accept(RSqBr, "]"),
|
||||
Some('{') => self.accept(LBrace, "{"),
|
||||
Some('(') => {
|
||||
self.enclosure_level += 1;
|
||||
self.accept(LParen, "(")
|
||||
}
|
||||
Some(')') => {
|
||||
self.enclosure_level = self.enclosure_level.saturating_sub(1);
|
||||
self.accept(RParen, ")")
|
||||
}
|
||||
Some('[') => {
|
||||
self.enclosure_level += 1;
|
||||
self.accept(LSqBr, "[")
|
||||
}
|
||||
Some(']') => {
|
||||
self.enclosure_level = self.enclosure_level.saturating_sub(1);
|
||||
self.accept(RSqBr, "]")
|
||||
}
|
||||
Some('{') => {
|
||||
self.enclosure_level += 1;
|
||||
self.accept(LBrace, "{")
|
||||
}
|
||||
Some('}') => {
|
||||
self.enclosure_level = self.enclosure_level.saturating_sub(1);
|
||||
if self.interpol_stack.last().unwrap().is_in() {
|
||||
Some(self.lex_interpolation_mid())
|
||||
} else {
|
||||
|
@ -1423,10 +1442,16 @@ impl Iterator for Lexer /*<'a>*/ {
|
|||
// Newline
|
||||
// 改行記号はLexer新規生成時に全て\nにreplaceしてある
|
||||
Some('\n') => {
|
||||
let token = self.emit_token(Newline, "\n");
|
||||
self.lineno_token_starts += 1;
|
||||
self.col_token_starts = 0;
|
||||
Some(Ok(token))
|
||||
if self.enclosure_level > 0 {
|
||||
self.lineno_token_starts += 1;
|
||||
self.col_token_starts = 0;
|
||||
self.next()
|
||||
} else {
|
||||
let token = self.emit_token(Newline, "\n");
|
||||
self.lineno_token_starts += 1;
|
||||
self.col_token_starts = 0;
|
||||
Some(Ok(token))
|
||||
}
|
||||
}
|
||||
Some('\t') => {
|
||||
let token = self.emit_token(Illegal, "\t");
|
||||
|
|
|
@ -943,16 +943,22 @@ impl Parser {
|
|||
match self.peek_kind() {
|
||||
Some(Comma) => {
|
||||
self.skip();
|
||||
if self.cur_is(Comma) {
|
||||
let err = self.skip_and_throw_invalid_seq_err(
|
||||
caused_by!(),
|
||||
line!() as usize,
|
||||
&["]", "element"],
|
||||
Comma,
|
||||
);
|
||||
self.errs.push(err);
|
||||
debug_exit_info!(self);
|
||||
return Err(());
|
||||
match self.peek_kind() {
|
||||
Some(Comma) => {
|
||||
let err = self.skip_and_throw_invalid_seq_err(
|
||||
caused_by!(),
|
||||
line!() as usize,
|
||||
&["]", "element"],
|
||||
Comma,
|
||||
);
|
||||
self.errs.push(err);
|
||||
debug_exit_info!(self);
|
||||
return Err(());
|
||||
}
|
||||
Some(RParen | RSqBr | RBrace | Dedent) => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
elems.push_pos(
|
||||
self.try_reduce_elem()
|
||||
|
@ -2768,10 +2774,6 @@ impl Parser {
|
|||
fn try_reduce_brace_container(&mut self) -> ParseResult<BraceContainer> {
|
||||
debug_call_info!(self);
|
||||
let l_brace = expect_pop!(self, fail_next LBrace);
|
||||
if self.cur_is(Newline) {
|
||||
self.skip();
|
||||
expect_pop!(self, fail_next Indent);
|
||||
}
|
||||
|
||||
// Empty brace literals
|
||||
match self.peek_kind() {
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
arr = [
|
||||
1,
|
||||
2
|
||||
]
|
||||
dict = {
|
||||
"a": 1,
|
||||
"b": 2
|
||||
|
@ -10,7 +14,37 @@ tuple = (
|
|||
1,
|
||||
2
|
||||
)
|
||||
rec = {
|
||||
.a = 1;
|
||||
.b = 2;
|
||||
.c = 3
|
||||
}
|
||||
|
||||
print! dict
|
||||
print! set
|
||||
print! tuple
|
||||
print! arr, dict, set, tuple, rec
|
||||
|
||||
arr2 = [
|
||||
1,
|
||||
2,
|
||||
]
|
||||
|
||||
dict2 = {
|
||||
"a": 1,
|
||||
"b": 2,
|
||||
}
|
||||
|
||||
set2 = {
|
||||
1,
|
||||
2,
|
||||
}
|
||||
|
||||
tuple2 = (
|
||||
1,
|
||||
2,
|
||||
)
|
||||
|
||||
rec2 = {
|
||||
.foo = 1;
|
||||
.bar = 2;
|
||||
}
|
||||
|
||||
print! arr2, dict2, set2, tuple2, rec2
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Record is a feature similar to object (literal notation) in JS
|
||||
# `.` means the field is public
|
||||
john = {
|
||||
.name = "John Smith"
|
||||
.name = "John Smith";
|
||||
.age = !27
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ assert john.age == 27
|
|||
|
||||
# A record whose values are all types will also behave as a type
|
||||
Person! = {
|
||||
.name = Str
|
||||
.name = Str;
|
||||
.age = Nat!
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
IO = Trait {
|
||||
.Inp = Type
|
||||
.Output = Type
|
||||
.Inp = Type;
|
||||
.Output = Type;
|
||||
.func = (self, x: .Inp) -> .Output
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue