feat: parallelize the analysis process

This commit is contained in:
Shunsuke Shibayama 2023-06-18 15:58:32 +09:00
parent 1abf812027
commit 75f4c206f6
16 changed files with 427 additions and 136 deletions

17
Cargo.lock generated
View file

@ -120,6 +120,7 @@ dependencies = [
"backtrace-on-stack-overflow", "backtrace-on-stack-overflow",
"crossterm", "crossterm",
"parking_lot", "parking_lot",
"thread_local",
] ]
[[package]] [[package]]
@ -262,6 +263,12 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.12.1" version = "0.12.1"
@ -425,6 +432,16 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "thread_local"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]] [[package]]
name = "tinyvec" name = "tinyvec"
version = "1.6.0" version = "1.6.0"

View file

@ -30,6 +30,7 @@ backtrace-on-stack-overflow = { version = "0.2.0", optional = true }
[dependencies] [dependencies]
crossterm = { optional = true, version = "0.25.0" } crossterm = { optional = true, version = "0.25.0" }
parking_lot = "0.12" parking_lot = "0.12"
thread_local = "1.1"
[lib] [lib]
path = "lib.rs" path = "lib.rs"

View file

@ -1,12 +1,17 @@
use std::fmt; use std::fmt;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::thread::ThreadId;
// use std::rc::Rc; // use std::rc::Rc;
pub use parking_lot::{ pub use parking_lot::{
MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard,
}; };
use std::cell::RefCell;
use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use thread_local::ThreadLocal;
const TIMEOUT: Duration = Duration::from_secs(2); const TIMEOUT: Duration = Duration::from_secs(2);
#[derive(Debug)] #[derive(Debug)]
@ -45,10 +50,11 @@ impl BorrowInfo {
#[derive(Debug)] #[derive(Debug)]
pub struct Shared<T: ?Sized> { pub struct Shared<T: ?Sized> {
data: Arc<RwLock<T>>, data: Arc<RwLock<T>>,
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
last_borrowed_at: Arc<RwLock<BorrowInfo>>, last_borrowed_at: Arc<RwLock<BorrowInfo>>,
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
last_mut_borrowed_at: Arc<RwLock<BorrowInfo>>, last_mut_borrowed_at: Arc<RwLock<BorrowInfo>>,
lock_thread_id: Arc<RwLock<Vec<ThreadId>>>,
} }
impl<T: PartialEq> PartialEq for Shared<T> impl<T: PartialEq> PartialEq for Shared<T>
@ -65,10 +71,11 @@ impl<T: ?Sized> Clone for Shared<T> {
fn clone(&self) -> Shared<T> { fn clone(&self) -> Shared<T> {
Self { Self {
data: Arc::clone(&self.data), data: Arc::clone(&self.data),
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
last_borrowed_at: self.last_borrowed_at.clone(), last_borrowed_at: self.last_borrowed_at.clone(),
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
last_mut_borrowed_at: self.last_mut_borrowed_at.clone(), last_mut_borrowed_at: self.last_mut_borrowed_at.clone(),
lock_thread_id: self.lock_thread_id.clone(),
} }
} }
} }
@ -89,7 +96,7 @@ impl<T: Default> Default for Shared<T> {
impl<T: fmt::Display> fmt::Display for Shared<T> { impl<T: fmt::Display> fmt::Display for Shared<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.borrow()) self.borrow().fmt(f)
} }
} }
@ -97,10 +104,11 @@ impl<T> Shared<T> {
pub fn new(t: T) -> Self { pub fn new(t: T) -> Self {
Self { Self {
data: Arc::new(RwLock::new(t)), data: Arc::new(RwLock::new(t)),
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
last_borrowed_at: Arc::new(RwLock::new(BorrowInfo::new(None))), last_borrowed_at: Arc::new(RwLock::new(BorrowInfo::new(None))),
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
last_mut_borrowed_at: Arc::new(RwLock::new(BorrowInfo::new(None))), last_mut_borrowed_at: Arc::new(RwLock::new(BorrowInfo::new(None))),
lock_thread_id: Arc::new(RwLock::new(vec![])),
} }
} }
@ -115,27 +123,33 @@ impl<T> Shared<T> {
} }
impl<T: ?Sized> Shared<T> { impl<T: ?Sized> Shared<T> {
#[inline] #[track_caller]
pub fn copy(&self) -> Self { fn wait_until_unlocked(&self) {
Self { let mut timeout = TIMEOUT;
data: self.data.clone(), loop {
#[cfg(any(debug_assertions, feature = "debug"))] let lock_thread = self.lock_thread_id.try_read_for(TIMEOUT).unwrap();
last_borrowed_at: self.last_borrowed_at.clone(), if lock_thread.is_empty() || lock_thread.last() == Some(&std::thread::current().id()) {
#[cfg(any(debug_assertions, feature = "debug"))] break;
last_mut_borrowed_at: self.last_mut_borrowed_at.clone(), }
std::thread::sleep(Duration::from_millis(1));
timeout -= Duration::from_millis(1);
if timeout == Duration::from_secs(0) {
panic!("timeout");
}
} }
} }
#[inline] #[inline]
#[track_caller] #[track_caller]
pub fn borrow(&self) -> RwLockReadGuard<'_, T> { pub fn borrow(&self) -> RwLockReadGuard<'_, T> {
#[cfg(any(debug_assertions, feature = "debug"))] self.wait_until_unlocked();
#[cfg(any(feature = "backtrace", feature = "debug"))]
{ {
*self.last_borrowed_at.try_write_for(TIMEOUT).unwrap() = *self.last_borrowed_at.try_write_for(TIMEOUT).unwrap() =
BorrowInfo::new(Some(std::panic::Location::caller())); BorrowInfo::new(Some(std::panic::Location::caller()));
} }
self.data.try_read_for(TIMEOUT).unwrap_or_else(|| { self.data.try_read_for(TIMEOUT).unwrap_or_else(|| {
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
{ {
panic!( panic!(
"Shared::borrow: already borrowed at {}, mutably borrowed at {:?}", "Shared::borrow: already borrowed at {}, mutably borrowed at {:?}",
@ -143,7 +157,7 @@ impl<T: ?Sized> Shared<T> {
self.last_mut_borrowed_at.try_read_for(TIMEOUT).unwrap() self.last_mut_borrowed_at.try_read_for(TIMEOUT).unwrap()
) )
} }
#[cfg(not(any(debug_assertions, feature = "debug")))] #[cfg(not(any(feature = "backtrace", feature = "debug")))]
{ {
panic!("Shared::borrow: already borrowed") panic!("Shared::borrow: already borrowed")
} }
@ -153,7 +167,8 @@ impl<T: ?Sized> Shared<T> {
#[inline] #[inline]
#[track_caller] #[track_caller]
pub fn borrow_mut(&self) -> RwLockWriteGuard<'_, T> { pub fn borrow_mut(&self) -> RwLockWriteGuard<'_, T> {
#[cfg(any(debug_assertions, feature = "debug"))] self.wait_until_unlocked();
#[cfg(any(feature = "backtrace", feature = "debug"))]
{ {
let caller = std::panic::Location::caller(); let caller = std::panic::Location::caller();
*self.last_borrowed_at.try_write_for(TIMEOUT).unwrap() = BorrowInfo::new(Some(caller)); *self.last_borrowed_at.try_write_for(TIMEOUT).unwrap() = BorrowInfo::new(Some(caller));
@ -161,7 +176,7 @@ impl<T: ?Sized> Shared<T> {
BorrowInfo::new(Some(caller)); BorrowInfo::new(Some(caller));
} }
self.data.try_write_for(TIMEOUT).unwrap_or_else(|| { self.data.try_write_for(TIMEOUT).unwrap_or_else(|| {
#[cfg(any(debug_assertions, feature = "debug"))] #[cfg(any(feature = "backtrace", feature = "debug"))]
{ {
panic!( panic!(
"Shared::borrow_mut: already borrowed at {}, mutabbly borrowed at {}", "Shared::borrow_mut: already borrowed at {}, mutabbly borrowed at {}",
@ -169,13 +184,58 @@ impl<T: ?Sized> Shared<T> {
self.last_mut_borrowed_at.try_read_for(TIMEOUT).unwrap() self.last_mut_borrowed_at.try_read_for(TIMEOUT).unwrap()
) )
} }
#[cfg(not(any(debug_assertions, feature = "debug")))] #[cfg(not(any(feature = "backtrace", feature = "debug")))]
{ {
panic!("Shared::borrow_mut: already borrowed") panic!("Shared::borrow_mut: already borrowed")
} }
}) })
} }
/// Lock the data and deny access from other threads.
/// Locking can be done any number of times and will not be available until unlocked the same number of times.
pub fn inter_thread_lock(&self) {
let mut lock_thread = self.lock_thread_id.try_write_for(TIMEOUT).unwrap();
loop {
if lock_thread.is_empty() || lock_thread.last() == Some(&std::thread::current().id()) {
break;
}
drop(lock_thread);
lock_thread = self.lock_thread_id.try_write_for(TIMEOUT).unwrap();
}
lock_thread.push(std::thread::current().id());
}
#[track_caller]
pub fn inter_thread_unlock(&self) {
let mut lock_thread = self.lock_thread_id.try_write_for(TIMEOUT).unwrap();
loop {
if lock_thread.is_empty() {
panic!("not locked");
} else if lock_thread.last() == Some(&std::thread::current().id()) {
break;
}
drop(lock_thread);
lock_thread = self.lock_thread_id.try_write_for(TIMEOUT).unwrap();
}
lock_thread.pop();
}
pub fn inter_thread_unlock_using_id(&self, id: ThreadId) {
let mut lock_thread = self.lock_thread_id.try_write_for(TIMEOUT).unwrap();
loop {
if lock_thread.is_empty() {
panic!("not locked");
} else if lock_thread.last() == Some(&id)
|| lock_thread.last() == Some(&std::thread::current().id())
{
break;
}
drop(lock_thread);
lock_thread = self.lock_thread_id.try_write_for(TIMEOUT).unwrap();
}
lock_thread.pop();
}
pub fn get_mut(&mut self) -> Option<&mut T> { pub fn get_mut(&mut self) -> Option<&mut T> {
Arc::get_mut(&mut self.data).map(|mutex| mutex.get_mut()) Arc::get_mut(&mut self.data).map(|mutex| mutex.get_mut())
} }
@ -205,3 +265,54 @@ impl<T: Clone> Shared<T> {
self.borrow().clone() self.borrow().clone()
} }
} }
/// Thread-local objects that can be shared among threads.
/// The initial value can be shared globally, but the changes are not reflected in other threads.
/// Otherwise, this behaves as a `RefCell`.
#[derive(Clone)]
pub struct LocalShared<T: Send + Clone> {
data: Arc<ThreadLocal<RefCell<T>>>,
init: Arc<T>,
}
impl<T: fmt::Debug + Send + Clone> fmt::Debug for LocalShared<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.deref().fmt(f)
}
}
impl<T: fmt::Display + Send + Clone> fmt::Display for LocalShared<T>
where
RefCell<T>: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.deref().fmt(f)
}
}
impl<T: Send + Clone> Deref for LocalShared<T> {
type Target = RefCell<T>;
fn deref(&self) -> &Self::Target {
self.data
.get_or(|| RefCell::new(self.init.clone().as_ref().clone()))
}
}
impl<T: Send + Clone> LocalShared<T> {
pub fn new(init: T) -> Self {
Self {
data: Arc::new(ThreadLocal::new()),
init: Arc::new(init),
}
}
pub fn update_init(&mut self) {
let clone = self.clone_inner();
// NG: self.init = Arc::new(clone);
*self = Self::new(clone);
}
pub fn clone_inner(&self) -> T {
self.deref().borrow().clone()
}
}

View file

@ -1,6 +1,6 @@
#[cfg(all(unix, any(feature = "debug", feature = "backtrace")))] #[cfg(all(unix, any(feature = "debug", feature = "backtrace")))]
pub use backtrace_on_stack_overflow; pub use backtrace_on_stack_overflow;
use std::thread; use std::thread::{self, JoinHandle};
#[macro_export] #[macro_export]
macro_rules! enable_overflow_stacktrace { macro_rules! enable_overflow_stacktrace {
@ -37,3 +37,17 @@ where
run() run()
} }
} }
pub fn spawn_new_thread<F, T>(run: F, name: &str) -> JoinHandle<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,
{
enable_overflow_stacktrace!();
const STACK_SIZE: usize = 4 * 1024 * 1024;
thread::Builder::new()
.name(name.to_string())
.stack_size(STACK_SIZE)
.spawn(run)
.unwrap()
}

View file

@ -12,6 +12,7 @@ use erg_parser::ast::VarName;
use crate::artifact::{CompleteArtifact, ErrorArtifact}; use crate::artifact::{CompleteArtifact, ErrorArtifact};
use crate::context::{Context, ContextProvider}; use crate::context::{Context, ContextProvider};
use crate::optimize::HIROptimizer;
use crate::ty::codeobj::CodeObj; use crate::ty::codeobj::CodeObj;
use crate::build_hir::HIRBuilder; use crate::build_hir::HIRBuilder;
@ -20,7 +21,7 @@ use crate::desugar_hir::HIRDesugarer;
use crate::error::{CompileError, CompileErrors, CompileWarnings}; use crate::error::{CompileError, CompileErrors, CompileWarnings};
use crate::hir::Expr; use crate::hir::Expr;
use crate::link_hir::HIRLinker; use crate::link_hir::HIRLinker;
use crate::module::{SharedCompilerResource, SharedModuleCache}; use crate::module::SharedCompilerResource;
use crate::varinfo::VarInfo; use crate::varinfo::VarInfo;
/// * registered as global -> Global /// * registered as global -> Global
@ -113,7 +114,7 @@ impl AccessKind {
pub struct Compiler { pub struct Compiler {
pub cfg: ErgConfig, pub cfg: ErgConfig,
builder: HIRBuilder, builder: HIRBuilder,
mod_cache: SharedModuleCache, shared: SharedCompilerResource,
code_generator: PyCodeGenerator, code_generator: PyCodeGenerator,
} }
@ -131,7 +132,7 @@ impl Runnable for Compiler {
fn new(cfg: ErgConfig) -> Self { fn new(cfg: ErgConfig) -> Self {
let shared = SharedCompilerResource::new(cfg.copy()); let shared = SharedCompilerResource::new(cfg.copy());
Self { Self {
mod_cache: shared.mod_cache.clone(), shared: shared.clone(),
builder: HIRBuilder::new_with_cache(cfg.copy(), "<module>", shared), builder: HIRBuilder::new_with_cache(cfg.copy(), "<module>", shared),
code_generator: PyCodeGenerator::new(cfg.copy()), code_generator: PyCodeGenerator::new(cfg.copy()),
cfg, cfg,
@ -231,7 +232,7 @@ impl Compiler {
mode: &str, mode: &str,
) -> Result<CompleteArtifact<CodeObj>, ErrorArtifact> { ) -> Result<CompleteArtifact<CodeObj>, ErrorArtifact> {
log!(info "the compiling process has started."); log!(info "the compiling process has started.");
let arti = self.build_link_desugar(src, mode)?; let arti = self.build_optimize_link_desugar(src, mode)?;
let codeobj = self.code_generator.emit(arti.object); let codeobj = self.code_generator.emit(arti.object);
log!(info "code object:\n{}", codeobj.code_info(Some(self.code_generator.py_version))); log!(info "code object:\n{}", codeobj.code_info(Some(self.code_generator.py_version)));
log!(info "the compiling process has completed"); log!(info "the compiling process has completed");
@ -244,7 +245,7 @@ impl Compiler {
mode: &str, mode: &str,
) -> Result<CompleteArtifact<(CodeObj, Option<Expr>)>, ErrorArtifact> { ) -> Result<CompleteArtifact<(CodeObj, Option<Expr>)>, ErrorArtifact> {
log!(info "the compiling process has started."); log!(info "the compiling process has started.");
let arti = self.build_link_desugar(src, mode)?; let arti = self.build_optimize_link_desugar(src, mode)?;
let last = arti.object.module.last().cloned(); let last = arti.object.module.last().cloned();
let codeobj = self.code_generator.emit(arti.object); let codeobj = self.code_generator.emit(arti.object);
log!(info "code object:\n{}", codeobj.code_info(Some(self.code_generator.py_version))); log!(info "code object:\n{}", codeobj.code_info(Some(self.code_generator.py_version)));
@ -252,14 +253,15 @@ impl Compiler {
Ok(CompleteArtifact::new((codeobj, last), arti.warns)) Ok(CompleteArtifact::new((codeobj, last), arti.warns))
} }
fn build_link_desugar( fn build_optimize_link_desugar(
&mut self, &mut self,
src: String, src: String,
mode: &str, mode: &str,
) -> Result<CompleteArtifact, ErrorArtifact> { ) -> Result<CompleteArtifact, ErrorArtifact> {
let artifact = self.builder.build(src, mode)?; let artifact = self.builder.build(src, mode)?;
let linker = HIRLinker::new(&self.cfg, &self.mod_cache); let optimized = HIROptimizer::optimize(self.shared.clone(), artifact.object);
let hir = linker.link(artifact.object); let linker = HIRLinker::new(&self.cfg, &self.shared.mod_cache);
let hir = linker.link(optimized);
let desugared = HIRDesugarer::desugar(hir); let desugared = HIRDesugarer::desugar(hir);
Ok(CompleteArtifact::new(desugared, artifact.warns)) Ok(CompleteArtifact::new(desugared, artifact.warns))
} }

View file

@ -266,7 +266,7 @@ impl Generalizer {
} }
} }
fn generalize_constraint<T: CanbeFree>(&mut self, fv: &Free<T>) -> Constraint { fn generalize_constraint<T: CanbeFree + Send + Clone>(&mut self, fv: &Free<T>) -> Constraint {
if let Some((sub, sup)) = fv.get_subsup() { if let Some((sub, sup)) = fv.get_subsup() {
let sub = self.generalize_t(sub, true); let sub = self.generalize_t(sub, true);
let sup = self.generalize_t(sup, true); let sup = self.generalize_t(sup, true);
@ -509,9 +509,10 @@ impl<'c, 'q, 'l, L: Locational> Dereferencer<'c, 'q, 'l, L> {
let t = fv.unwrap_linked(); let t = fv.unwrap_linked();
self.deref_tyvar(t) self.deref_tyvar(t)
} }
Type::FreeVar(fv) Type::FreeVar(mut fv)
if fv.is_generalized() && self.qnames.contains(&fv.unbound_name().unwrap()) => if fv.is_generalized() && self.qnames.contains(&fv.unbound_name().unwrap()) =>
{ {
fv.update_init();
Ok(Type::FreeVar(fv)) Ok(Type::FreeVar(fv))
} }
// ?T(:> Nat, <: Int)[n] ==> Nat (self.level <= n) // ?T(:> Nat, <: Int)[n] ==> Nat (self.level <= n)

View file

@ -49,11 +49,20 @@ pub enum SubstituteResult {
impl Context { impl Context {
pub(crate) fn get_ctx_from_path(&self, path: &Path) -> Option<&Context> { pub(crate) fn get_ctx_from_path(&self, path: &Path) -> Option<&Context> {
if self.module_path() == Some(path) {
return self.get_module();
}
if self.get_module() if self.get_module()
.map_or(false, |ctx| matches!((ctx.cfg.input.unescaped_path().canonicalize(), path.canonicalize()), (Ok(l), Ok(r)) if l == r)) .map_or(false, |ctx| matches!((ctx.cfg.input.unescaped_path().canonicalize(), path.canonicalize()), (Ok(l), Ok(r)) if l == r))
{ {
return Some(self.get_module().unwrap()) return Some(self.get_module().unwrap())
} }
if self.shared.is_some()
&& self.promises().is_registered(path)
&& !self.promises().is_finished(path)
{
let _result = self.promises().join(path);
}
self.opt_mod_cache()? self.opt_mod_cache()?
.raw_ref_ctx(path) .raw_ref_ctx(path)
.or_else(|| self.opt_py_mod_cache()?.raw_ref_ctx(path)) .or_else(|| self.opt_py_mod_cache()?.raw_ref_ctx(path))
@ -743,7 +752,7 @@ impl Context {
) -> Triple<VarInfo, TyCheckError> { ) -> Triple<VarInfo, TyCheckError> {
match t { match t {
// (obj: Never).foo: Never // (obj: Never).foo: Never
Type::Never => Triple::Ok(VarInfo::ILLEGAL.clone()), Type::Never => Triple::Ok(VarInfo::ILLEGAL),
Type::FreeVar(fv) if fv.is_linked() => { Type::FreeVar(fv) if fv.is_linked() => {
self.get_attr_info_from_attributive(&fv.crack(), ident) self.get_attr_info_from_attributive(&fv.crack(), ident)
} }

View file

@ -16,6 +16,7 @@ use erg_common::levenshtein::get_similar_name;
use erg_common::pathutil::{DirKind, FileKind}; use erg_common::pathutil::{DirKind, FileKind};
use erg_common::python_util::BUILTIN_PYTHON_MODS; use erg_common::python_util::BUILTIN_PYTHON_MODS;
use erg_common::set::Set; use erg_common::set::Set;
use erg_common::spawn::spawn_new_thread;
use erg_common::traits::{Locational, Stream}; use erg_common::traits::{Locational, Stream};
use erg_common::triple::Triple; use erg_common::triple::Triple;
use erg_common::{get_hash, log, set, unique_in_place, Str}; use erg_common::{get_hash, log, set, unique_in_place, Str};
@ -26,6 +27,7 @@ use ast::{
}; };
use erg_parser::ast; use erg_parser::ast;
use crate::artifact::ErrorArtifact;
use crate::ty::constructors::{ use crate::ty::constructors::{
free_var, func, func0, func1, proc, ref_, ref_mut, tp_enum, unknown_len_array_t, v_enum, free_var, func, func0, func1, proc, ref_, ref_mut, tp_enum, unknown_len_array_t, v_enum,
}; };
@ -1912,36 +1914,50 @@ impl Context {
Ok(()) Ok(())
} }
/// Start a new build process and let it do the module analysis.
/// Currently the analysis is speculative and handles for unused modules may not be joined.
/// In that case, the `HIROptimizer` will remove unused imports from the `HIR`.
fn build_erg_mod( fn build_erg_mod(
&self, &self,
path: PathBuf, path: PathBuf,
__name__: &Str, __name__: &Str,
loc: &impl Locational, loc: &impl Locational,
) -> CompileResult<PathBuf> { ) -> CompileResult<PathBuf> {
let mod_cache = self.mod_cache();
let mut cfg = self.cfg.inherit(path.clone()); let mut cfg = self.cfg.inherit(path.clone());
let src = cfg let src = cfg
.input .input
.try_read() .try_read()
.map_err(|_| self.import_err(line!(), __name__, loc))?; .map_err(|_| self.import_err(line!(), __name__, loc))?;
let mut builder = let name = __name__.clone();
HIRBuilder::new_with_cache(cfg, __name__, self.shared.as_ref().unwrap().clone()); let _path = path.clone();
let shared = self.shared.as_ref().unwrap().clone();
let run = move || {
let mut builder = HIRBuilder::new_with_cache(cfg, name, shared.clone());
match builder.build(src, "exec") { match builder.build(src, "exec") {
Ok(artifact) => { Ok(artifact) => {
mod_cache.register( shared.mod_cache.register(
path.clone(), _path.clone(),
Some(artifact.object), Some(artifact.object),
builder.pop_mod_ctx().unwrap(), builder.pop_mod_ctx().unwrap(),
); );
Ok(path) // shared.warns.extend(artifact.warns);
Ok(())
} }
Err(artifact) => { Err(artifact) => {
if let Some(hir) = artifact.object { if let Some(hir) = artifact.object {
mod_cache.register(path, Some(hir), builder.pop_mod_ctx().unwrap()); shared
.mod_cache
.register(_path, Some(hir), builder.pop_mod_ctx().unwrap());
} }
Err(artifact.errors) // shared.warns.extend(artifact.warns);
// shared.errors.extend(artifact.errors);
Err(ErrorArtifact::new(artifact.errors, artifact.warns))
} }
} }
};
let handle = spawn_new_thread(run, __name__);
self.shared().promises.insert(path.clone(), handle);
Ok(path)
} }
fn similar_builtin_py_mod_name(&self, name: &Str) -> Option<Str> { fn similar_builtin_py_mod_name(&self, name: &Str) -> Option<Str> {

View file

@ -462,4 +462,77 @@ impl SideEffectChecker {
| Expr::Dummy(_) => {} | Expr::Dummy(_) => {}
} }
} }
pub(crate) fn is_impure(expr: &Expr) -> bool {
match expr {
Expr::Call(call) => {
call.ref_t().is_procedure()
|| call
.args
.pos_args
.iter()
.any(|parg| Self::is_impure(&parg.expr))
|| call
.args
.var_args
.iter()
.any(|varg| Self::is_impure(&varg.expr))
|| call
.args
.kw_args
.iter()
.any(|kwarg| Self::is_impure(&kwarg.expr))
}
Expr::BinOp(bin) => Self::is_impure(&bin.lhs) || Self::is_impure(&bin.rhs),
Expr::UnaryOp(unary) => Self::is_impure(&unary.expr),
Expr::Array(arr) => match arr {
Array::Normal(arr) => arr
.elems
.pos_args
.iter()
.any(|elem| Self::is_impure(&elem.expr)),
Array::WithLength(arr) => Self::is_impure(&arr.elem) || Self::is_impure(&arr.len),
_ => todo!(),
},
Expr::Tuple(tup) => match tup {
Tuple::Normal(tup) => tup
.elems
.pos_args
.iter()
.any(|elem| Self::is_impure(&elem.expr)),
},
Expr::Set(set) => match set {
Set::Normal(set) => set
.elems
.pos_args
.iter()
.any(|elem| Self::is_impure(&elem.expr)),
Set::WithLength(set) => Self::is_impure(&set.elem) || Self::is_impure(&set.len),
},
Expr::Dict(dict) => match dict {
Dict::Normal(dict) => dict
.kvs
.iter()
.any(|kv| Self::is_impure(&kv.key) || Self::is_impure(&kv.value)),
_ => todo!(),
},
Expr::Lambda(lambda) => {
lambda.op.is_procedural() || lambda.body.iter().any(Self::is_impure)
}
Expr::Def(def) => def.sig.is_procedural() || def.body.block.iter().any(Self::is_impure),
/*
Expr::ClassDef(class_def) => {
class_def.methods.iter().any(|def| Self::is_impure(def))
}
Expr::PatchDef(patch_def) => {
patch_def.methods.iter().any(|def| Self::is_impure(def))
}*/
Expr::Code(block) | Expr::Compound(block) => block.iter().any(Self::is_impure),
_ => false,
}
}
pub(crate) fn is_pure(expr: &Expr) -> bool {
!Self::is_impure(expr)
}
} }

View file

@ -632,7 +632,7 @@ impl ASTLowerer {
Triple::Ok(vi) => vi, Triple::Ok(vi) => vi,
Triple::Err(errs) => { Triple::Err(errs) => {
self.errs.push(errs); self.errs.push(errs);
VarInfo::ILLEGAL.clone() VarInfo::ILLEGAL
} }
Triple::None => { Triple::None => {
let self_t = obj.t(); let self_t = obj.t();
@ -652,7 +652,7 @@ impl ASTLowerer {
similar_info, similar_info,
); );
self.errs.push(err); self.errs.push(err);
VarInfo::ILLEGAL.clone() VarInfo::ILLEGAL
} }
}; };
self.inc_ref(&vi, &attr.ident.name); self.inc_ref(&vi, &attr.ident.name);
@ -694,7 +694,7 @@ impl ASTLowerer {
Triple::Ok(vi) => vi, Triple::Ok(vi) => vi,
Triple::Err(err) => { Triple::Err(err) => {
self.errs.push(err); self.errs.push(err);
VarInfo::ILLEGAL.clone() VarInfo::ILLEGAL
} }
Triple::None => { Triple::None => {
let (similar_info, similar_name) = self let (similar_info, similar_name) = self
@ -712,7 +712,7 @@ impl ASTLowerer {
similar_info, similar_info,
); );
self.errs.push(err); self.errs.push(err);
VarInfo::ILLEGAL.clone() VarInfo::ILLEGAL
} }
}; };
( (
@ -813,7 +813,7 @@ impl ASTLowerer {
.get_binop_t(&bin.op, &args, &self.cfg.input, &self.module.context) .get_binop_t(&bin.op, &args, &self.cfg.input, &self.module.context)
.unwrap_or_else(|errs| { .unwrap_or_else(|errs| {
self.errs.extend(errs); self.errs.extend(errs);
VarInfo::ILLEGAL.clone() VarInfo::ILLEGAL
}); });
if let Some(guard) = guard { if let Some(guard) = guard {
if let Some(return_t) = vi.t.mut_return_t() { if let Some(return_t) = vi.t.mut_return_t() {
@ -852,7 +852,7 @@ impl ASTLowerer {
.get_unaryop_t(&unary.op, &args, &self.cfg.input, &self.module.context) .get_unaryop_t(&unary.op, &args, &self.cfg.input, &self.module.context)
.unwrap_or_else(|errs| { .unwrap_or_else(|errs| {
self.errs.extend(errs); self.errs.extend(errs);
VarInfo::ILLEGAL.clone() VarInfo::ILLEGAL
}); });
let mut args = args.into_iter(); let mut args = args.into_iter();
let expr = args.next().unwrap().expr; let expr = args.next().unwrap().expr;
@ -973,7 +973,7 @@ impl ASTLowerer {
Err((vi, es)) => { Err((vi, es)) => {
self.module.context.higher_order_caller.pop(); self.module.context.higher_order_caller.pop();
errs.extend(es); errs.extend(es);
vi.unwrap_or(VarInfo::ILLEGAL.clone()) vi.unwrap_or(VarInfo::ILLEGAL)
} }
}; };
if let Err(es) = self.module.context.propagate(&mut vi.t, &obj) { if let Err(es) = self.module.context.propagate(&mut vi.t, &obj) {
@ -1102,7 +1102,7 @@ impl ASTLowerer {
Ok(vi) => vi, Ok(vi) => vi,
Err((vi, errs)) => { Err((vi, errs)) => {
self.errs.extend(errs); self.errs.extend(errs);
vi.unwrap_or(VarInfo::ILLEGAL.clone()) vi.unwrap_or(VarInfo::ILLEGAL)
} }
}; };
let args = hir::Args::pos_only(args, None); let args = hir::Args::pos_only(args, None);

View file

@ -1,14 +1,16 @@
use crate::artifact::CompleteArtifact; use crate::effectcheck::SideEffectChecker;
use crate::error::CompileWarnings;
use crate::hir::*; use crate::hir::*;
use crate::module::SharedCompilerResource;
// use crate::erg_common::traits::Stream; // use crate::erg_common::traits::Stream;
#[derive(Debug)] #[derive(Debug)]
pub struct HIROptimizer {} pub struct HIROptimizer {
shared: SharedCompilerResource,
}
impl HIROptimizer { impl HIROptimizer {
pub fn optimize(hir: HIR) -> CompleteArtifact { pub fn optimize(shared: SharedCompilerResource, hir: HIR) -> HIR {
let mut optimizer = HIROptimizer {}; let mut optimizer = HIROptimizer { shared };
optimizer.eliminate_dead_code(hir) optimizer.eliminate_dead_code(hir)
} }
@ -16,15 +18,50 @@ impl HIROptimizer {
todo!() todo!()
} }
fn _eliminate_unused_variables(&mut self, mut _hir: HIR) -> (HIR, CompileWarnings) { fn eliminate_unused_variables(&mut self, mut hir: HIR) -> HIR {
todo!() for chunk in hir.module.iter_mut() {
self.eliminate_unused_def(chunk);
}
hir
} }
fn eliminate_dead_code(&mut self, hir: HIR) -> CompleteArtifact { fn eliminate_unused_def(&mut self, expr: &mut Expr) {
CompleteArtifact::new( match expr {
self.eliminate_discarded_variables(hir), Expr::Def(def) => {
CompileWarnings::empty(), if self
) .shared
.index
.get_refs(&def.sig.ident().vi.def_loc)
.unwrap()
.referrers
.is_empty()
&& SideEffectChecker::is_pure(expr)
{
*expr = Expr::Dummy(Dummy::empty());
}
}
Expr::Call(call) => {
for arg in call.args.pos_args.iter_mut() {
self.eliminate_unused_def(&mut arg.expr);
}
}
Expr::Code(block) | Expr::Compound(block) => {
for chunk in block.iter_mut() {
self.eliminate_unused_def(chunk);
}
}
Expr::Lambda(lambda) => {
for chunk in lambda.body.iter_mut() {
self.eliminate_unused_def(chunk);
}
}
_ => {}
}
}
fn eliminate_dead_code(&mut self, hir: HIR) -> HIR {
let hir = self.eliminate_discarded_variables(hir);
self.eliminate_unused_variables(hir)
} }
/// ```erg /// ```erg
@ -35,7 +72,7 @@ impl HIROptimizer {
/// ```erg /// ```erg
/// a = 1 /// a = 1
/// ``` /// ```
fn eliminate_discarded_variables(&mut self, mut _hir: HIR) -> HIR { fn eliminate_discarded_variables(&mut self, hir: HIR) -> HIR {
todo!() hir
} }
} }

View file

@ -1,10 +1,10 @@
use std::cell::{Ref, RefMut};
use std::fmt; use std::fmt;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::mem; use std::mem;
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use erg_common::shared::{MappedRwLockReadGuard, RwLockReadGuard, RwLockWriteGuard}; use erg_common::shared::LocalShared;
use erg_common::shared::{MappedRwLockWriteGuard, Shared};
use erg_common::traits::{LimitedDisplay, StructuralEq}; use erg_common::traits::{LimitedDisplay, StructuralEq};
use erg_common::Str; use erg_common::Str;
use erg_common::{addr_eq, log}; use erg_common::{addr_eq, log};
@ -236,7 +236,7 @@ pub trait CanbeFree {
fn update_constraint(&self, constraint: Constraint, in_instantiation: bool); fn update_constraint(&self, constraint: Constraint, in_instantiation: bool);
} }
impl<T: CanbeFree> Free<T> { impl<T: CanbeFree + Send + Clone> Free<T> {
pub fn unbound_name(&self) -> Option<Str> { pub fn unbound_name(&self) -> Option<Str> {
self.borrow().unbound_name() self.borrow().unbound_name()
} }
@ -491,7 +491,7 @@ impl<T> FreeKind<T> {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Free<T>(Shared<FreeKind<T>>); pub struct Free<T: Send + Clone>(LocalShared<FreeKind<T>>);
impl Hash for Free<Type> { impl Hash for Free<Type> {
fn hash<H: Hasher>(&self, state: &mut H) { fn hash<H: Hasher>(&self, state: &mut H) {
@ -584,25 +584,25 @@ impl PartialEq for Free<TyParam> {
impl Eq for Free<Type> {} impl Eq for Free<Type> {}
impl Eq for Free<TyParam> {} impl Eq for Free<TyParam> {}
impl<T: LimitedDisplay> fmt::Display for Free<T> { impl<T: LimitedDisplay + Send + Clone> fmt::Display for Free<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.borrow()) write!(f, "{}", self.0.borrow())
} }
} }
impl<T: LimitedDisplay> LimitedDisplay for Free<T> { impl<T: LimitedDisplay + Send + Clone> LimitedDisplay for Free<T> {
fn limited_fmt(&self, f: &mut fmt::Formatter<'_>, limit: usize) -> fmt::Result { fn limited_fmt(&self, f: &mut fmt::Formatter<'_>, limit: usize) -> fmt::Result {
self.0.borrow().limited_fmt(f, limit) self.0.borrow().limited_fmt(f, limit)
} }
} }
impl<T> Free<T> { impl<T: Send + Clone> Free<T> {
#[track_caller] #[track_caller]
pub fn borrow(&self) -> RwLockReadGuard<'_, FreeKind<T>> { pub fn borrow(&self) -> Ref<'_, FreeKind<T>> {
self.0.borrow() self.0.borrow()
} }
#[track_caller] #[track_caller]
pub fn borrow_mut(&self) -> RwLockWriteGuard<'_, FreeKind<T>> { pub fn borrow_mut(&self) -> RefMut<'_, FreeKind<T>> {
self.0.borrow_mut() self.0.borrow_mut()
} }
/// very unsafe, use `force_replace` instead whenever possible /// very unsafe, use `force_replace` instead whenever possible
@ -612,12 +612,6 @@ impl<T> Free<T> {
pub fn forced_as_ref(&self) -> &FreeKind<T> { pub fn forced_as_ref(&self) -> &FreeKind<T> {
unsafe { self.as_ptr().as_ref() }.unwrap() unsafe { self.as_ptr().as_ref() }.unwrap()
} }
pub fn try_borrow(&self) -> Option<RwLockReadGuard<'_, FreeKind<T>>> {
self.0.try_borrow()
}
pub fn try_borrow_mut(&self) -> Option<RwLockWriteGuard<'_, FreeKind<T>>> {
self.0.try_borrow_mut()
}
} }
impl Free<Type> { impl Free<Type> {
@ -665,7 +659,9 @@ impl Free<TyParam> {
} }
} }
impl<T: StructuralEq + CanbeFree + Clone + Default + fmt::Debug> StructuralEq for Free<T> { impl<T: StructuralEq + CanbeFree + Clone + Default + fmt::Debug + Send + Sync + 'static>
StructuralEq for Free<T>
{
fn structural_eq(&self, other: &Self) -> bool { fn structural_eq(&self, other: &Self) -> bool {
if let (Some((l, r)), Some((l2, r2))) = (self.get_subsup(), other.get_subsup()) { if let (Some((l, r)), Some((l2, r2))) = (self.get_subsup(), other.get_subsup()) {
self.dummy_link(); self.dummy_link();
@ -680,10 +676,14 @@ impl<T: StructuralEq + CanbeFree + Clone + Default + fmt::Debug> StructuralEq fo
} }
} }
impl<T: Clone> Free<T> { impl<T: Send + Clone> Free<T> {
pub fn clone_inner(&self) -> FreeKind<T> { pub fn clone_inner(&self) -> FreeKind<T> {
self.0.clone_inner() self.0.clone_inner()
} }
pub fn update_init(&mut self) {
self.0.update_init();
}
} }
impl HasLevel for Free<Type> { impl HasLevel for Free<Type> {
@ -743,14 +743,14 @@ impl HasLevel for Free<TyParam> {
} }
} }
impl<T> Free<T> { impl<T: Send + Clone> Free<T> {
pub fn new(f: FreeKind<T>) -> Self { pub fn new(f: FreeKind<T>) -> Self {
Self(Shared::new(f)) Self(LocalShared::new(f))
} }
pub fn new_unbound(level: Level, constraint: Constraint) -> Self { pub fn new_unbound(level: Level, constraint: Constraint) -> Self {
UNBOUND_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst); UNBOUND_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
Self(Shared::new(FreeKind::unbound( Self(LocalShared::new(FreeKind::unbound(
UNBOUND_ID.load(std::sync::atomic::Ordering::SeqCst), UNBOUND_ID.load(std::sync::atomic::Ordering::SeqCst),
level, level,
constraint, constraint,
@ -758,29 +758,20 @@ impl<T> Free<T> {
} }
pub fn new_named_unbound(name: Str, level: Level, constraint: Constraint) -> Self { pub fn new_named_unbound(name: Str, level: Level, constraint: Constraint) -> Self {
Self(Shared::new(FreeKind::named_unbound( Self(LocalShared::new(FreeKind::named_unbound(
name, level, constraint, name, level, constraint,
))) )))
} }
pub fn new_linked(t: T) -> Self { pub fn new_linked(t: T) -> Self {
Self(Shared::new(FreeKind::Linked(t))) Self(LocalShared::new(FreeKind::Linked(t)))
}
#[track_caller]
pub fn replace(&self, to: FreeKind<T>) {
// prevent linking to self
if self.is_linked() && addr_eq!(*self.borrow(), to) {
return;
}
*self.borrow_mut() = to;
} }
/// returns linked type (panic if self is unbounded) /// returns linked type (panic if self is unbounded)
/// NOTE: check by `.is_linked` before call /// NOTE: check by `.is_linked` before call
#[track_caller] #[track_caller]
pub fn crack(&self) -> MappedRwLockReadGuard<'_, T> { pub fn crack(&self) -> Ref<'_, T> {
RwLockReadGuard::map(self.borrow(), |f| match f { Ref::map(self.borrow(), |f| match f {
FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t, FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t,
FreeKind::Unbound { .. } | FreeKind::NamedUnbound { .. } => { FreeKind::Unbound { .. } | FreeKind::NamedUnbound { .. } => {
panic!("the value is unbounded") panic!("the value is unbounded")
@ -789,8 +780,8 @@ impl<T> Free<T> {
} }
#[track_caller] #[track_caller]
pub fn crack_constraint(&self) -> MappedRwLockReadGuard<'_, Constraint> { pub fn crack_constraint(&self) -> Ref<'_, Constraint> {
RwLockReadGuard::map(self.borrow(), |f| match f { Ref::map(self.borrow(), |f| match f {
FreeKind::Linked(_) | FreeKind::UndoableLinked { .. } => panic!("the value is linked"), FreeKind::Linked(_) | FreeKind::UndoableLinked { .. } => panic!("the value is linked"),
FreeKind::Unbound { constraint, .. } | FreeKind::NamedUnbound { constraint, .. } => { FreeKind::Unbound { constraint, .. } | FreeKind::NamedUnbound { constraint, .. } => {
constraint constraint
@ -798,22 +789,6 @@ impl<T> Free<T> {
}) })
} }
pub fn is_linked(&self) -> bool {
self.borrow().linked().is_some()
}
pub fn is_undoable_linked(&self) -> bool {
self.borrow().is_undoable_linked()
}
pub fn is_named_unbound(&self) -> bool {
self.borrow().is_named_unbound()
}
pub fn is_unnamed_unbound(&self) -> bool {
self.borrow().is_unnamed_unbound()
}
pub fn unsafe_crack(&self) -> &T { pub fn unsafe_crack(&self) -> &T {
match unsafe { self.as_ptr().as_ref().unwrap() } { match unsafe { self.as_ptr().as_ref().unwrap() } {
FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t, FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t,
@ -828,7 +803,34 @@ impl<T> Free<T> {
} }
} }
impl<T: Clone + fmt::Debug> Free<T> { impl<T: Send + Sync + 'static + Clone> Free<T> {
pub fn is_linked(&self) -> bool {
self.borrow().linked().is_some()
}
pub fn is_undoable_linked(&self) -> bool {
self.borrow().is_undoable_linked()
}
pub fn is_named_unbound(&self) -> bool {
self.borrow().is_named_unbound()
}
pub fn is_unnamed_unbound(&self) -> bool {
self.borrow().is_unnamed_unbound()
}
#[track_caller]
pub fn replace(&self, to: FreeKind<T>) {
// prevent linking to self
if self.is_linked() && addr_eq!(*self.borrow(), to) {
return;
}
*self.borrow_mut() = to;
}
}
impl<T: Clone + fmt::Debug + Send + Sync + 'static> Free<T> {
/// SAFETY: use `Type/TyParam::link` instead of this. /// SAFETY: use `Type/TyParam::link` instead of this.
/// This method may cause circular references. /// This method may cause circular references.
#[track_caller] #[track_caller]
@ -893,11 +895,11 @@ impl<T: Clone + fmt::Debug> Free<T> {
} }
#[track_caller] #[track_caller]
pub fn get_linked_ref(&self) -> Option<MappedRwLockReadGuard<T>> { pub fn get_linked_ref(&self) -> Option<Ref<T>> {
if !self.is_linked() { if !self.is_linked() {
None None
} else { } else {
let mapped = RwLockReadGuard::map(self.borrow(), |f| match f { let mapped = Ref::map(self.borrow(), |f| match f {
FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t, FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t,
FreeKind::Unbound { .. } | FreeKind::NamedUnbound { .. } => unreachable!(), FreeKind::Unbound { .. } | FreeKind::NamedUnbound { .. } => unreachable!(),
}); });
@ -906,11 +908,11 @@ impl<T: Clone + fmt::Debug> Free<T> {
} }
#[track_caller] #[track_caller]
pub fn get_linked_refmut(&self) -> Option<MappedRwLockWriteGuard<T>> { pub fn get_linked_refmut(&self) -> Option<RefMut<T>> {
if !self.is_linked() { if !self.is_linked() {
None None
} else { } else {
let mapped = RwLockWriteGuard::map(self.borrow_mut(), |f| match f { let mapped = RefMut::map(self.borrow_mut(), |f| match f {
FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t, FreeKind::Linked(t) | FreeKind::UndoableLinked { t, .. } => t,
FreeKind::Unbound { .. } | FreeKind::NamedUnbound { .. } => unreachable!(), FreeKind::Unbound { .. } | FreeKind::NamedUnbound { .. } => unreachable!(),
}); });
@ -919,11 +921,11 @@ impl<T: Clone + fmt::Debug> Free<T> {
} }
#[track_caller] #[track_caller]
pub fn get_previous(&self) -> Option<MappedRwLockReadGuard<Box<FreeKind<T>>>> { pub fn get_previous(&self) -> Option<Ref<Box<FreeKind<T>>>> {
if !self.is_undoable_linked() { if !self.is_undoable_linked() {
None None
} else { } else {
let mapped = RwLockReadGuard::map(self.borrow(), |f| match f { let mapped = Ref::map(self.borrow(), |f| match f {
FreeKind::UndoableLinked { previous, .. } => previous, FreeKind::UndoableLinked { previous, .. } => previous,
_ => unreachable!(), _ => unreachable!(),
}); });
@ -939,13 +941,14 @@ impl<T: Clone + fmt::Debug> Free<T> {
} }
} }
impl<T: Default + Clone + fmt::Debug> Free<T> { impl<T: Default + Clone + fmt::Debug + Send + Sync + 'static> Free<T> {
#[track_caller]
pub fn dummy_link(&self) { pub fn dummy_link(&self) {
self.undoable_link(&T::default()); self.undoable_link(&T::default());
} }
} }
impl<T: CanbeFree> Free<T> { impl<T: CanbeFree + Send + Clone> Free<T> {
pub fn get_type(&self) -> Option<Type> { pub fn get_type(&self) -> Option<Type> {
self.constraint().and_then(|c| c.get_type().cloned()) self.constraint().and_then(|c| c.get_type().cloned())
} }

View file

@ -13,6 +13,7 @@ pub mod typaram;
pub mod value; pub mod value;
pub mod vis; pub mod vis;
use std::cell::RefMut;
use std::fmt; use std::fmt;
use std::ops::{BitAnd, BitOr, Deref, Not, Range, RangeInclusive}; use std::ops::{BitAnd, BitOr, Deref, Not, Range, RangeInclusive};
use std::path::PathBuf; use std::path::PathBuf;
@ -23,7 +24,6 @@ use erg_common::fresh::FRESH_GEN;
#[allow(unused_imports)] #[allow(unused_imports)]
use erg_common::log; use erg_common::log;
use erg_common::set::Set; use erg_common::set::Set;
use erg_common::shared::{MappedRwLockWriteGuard, RwLockWriteGuard};
use erg_common::traits::{LimitedDisplay, Locational, StructuralEq}; use erg_common::traits::{LimitedDisplay, Locational, StructuralEq};
use erg_common::{enum_unwrap, fmt_option, ref_addr_eq, set, Str}; use erg_common::{enum_unwrap, fmt_option, ref_addr_eq, set, Str};
@ -2782,12 +2782,12 @@ impl Type {
} }
} }
pub fn tyvar_mut_return_t(&mut self) -> Option<MappedRwLockWriteGuard<Type>> { pub fn tyvar_mut_return_t(&mut self) -> Option<RefMut<Type>> {
match self { match self {
Self::FreeVar(fv) Self::FreeVar(fv)
if fv.is_linked() && fv.get_linked().unwrap().return_t().is_some() => if fv.is_linked() && fv.get_linked().unwrap().return_t().is_some() =>
{ {
Some(RwLockWriteGuard::map(fv.borrow_mut(), |fk| { Some(RefMut::map(fv.borrow_mut(), |fk| {
fk.linked_mut().unwrap().mut_return_t().unwrap() fk.linked_mut().unwrap().mut_return_t().unwrap()
})) }))
} }

View file

@ -236,7 +236,7 @@ impl Default for VarInfo {
} }
impl VarInfo { impl VarInfo {
pub const ILLEGAL: &'static Self = &Self::const_default_private(); pub const ILLEGAL: Self = Self::const_default_private();
pub const fn const_default_private() -> Self { pub const fn const_default_private() -> Self {
Self::new( Self::new(

View file

@ -0,0 +1,2 @@
a = import "a"
b = import "b"

View file

@ -243,6 +243,11 @@ fn exec_unpack() -> Result<(), ()> {
expect_success("examples/unpack.er", 0) expect_success("examples/unpack.er", 0)
} }
#[test]
fn exec_unused_import() -> Result<(), ()> {
expect_success("tests/should_ok/many_import/unused_import.er", 2)
}
#[test] #[test]
fn exec_use_py() -> Result<(), ()> { fn exec_use_py() -> Result<(), ()> {
expect_success("examples/use_py.er", 0) expect_success("examples/use_py.er", 0)