mirror of
https://github.com/erg-lang/erg.git
synced 2025-10-03 05:54:33 +00:00
Merge branch 'main' into torch_d
This commit is contained in:
commit
44e3d6f343
10 changed files with 96 additions and 72 deletions
4
build.rs
4
build.rs
|
@ -1,7 +1,9 @@
|
||||||
use erg_common::python_util::{env_magic_number, env_python_version};
|
use erg_common::python_util::{env_magic_number, env_python_version};
|
||||||
|
|
||||||
fn main() -> std::io::Result<()> {
|
fn main() -> std::io::Result<()> {
|
||||||
let version = env_python_version();
|
let Some(version) = env_python_version() else {
|
||||||
|
panic!("Failed to get python version");
|
||||||
|
};
|
||||||
if version.major != 3 {
|
if version.major != 3 {
|
||||||
panic!("Python 3 is required");
|
panic!("Python 3 is required");
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,33 +136,27 @@ impl SendChannels {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn close(&self) {
|
pub(crate) fn close(&self) {
|
||||||
self.completion.send(WorkerMessage::Kill).unwrap();
|
let _ = self.completion.send(WorkerMessage::Kill);
|
||||||
self.resolve_completion.send(WorkerMessage::Kill).unwrap();
|
let _ = self.resolve_completion.send(WorkerMessage::Kill);
|
||||||
self.goto_definition.send(WorkerMessage::Kill).unwrap();
|
let _ = self.goto_definition.send(WorkerMessage::Kill);
|
||||||
self.semantic_tokens_full.send(WorkerMessage::Kill).unwrap();
|
let _ = self.semantic_tokens_full.send(WorkerMessage::Kill);
|
||||||
self.inlay_hint.send(WorkerMessage::Kill).unwrap();
|
let _ = self.inlay_hint.send(WorkerMessage::Kill);
|
||||||
self.inlay_hint_resolve.send(WorkerMessage::Kill).unwrap();
|
let _ = self.inlay_hint_resolve.send(WorkerMessage::Kill);
|
||||||
self.hover.send(WorkerMessage::Kill).unwrap();
|
let _ = self.hover.send(WorkerMessage::Kill);
|
||||||
self.references.send(WorkerMessage::Kill).unwrap();
|
let _ = self.references.send(WorkerMessage::Kill);
|
||||||
self.code_lens.send(WorkerMessage::Kill).unwrap();
|
let _ = self.code_lens.send(WorkerMessage::Kill);
|
||||||
self.code_action.send(WorkerMessage::Kill).unwrap();
|
let _ = self.code_action.send(WorkerMessage::Kill);
|
||||||
self.code_action_resolve.send(WorkerMessage::Kill).unwrap();
|
let _ = self.code_action_resolve.send(WorkerMessage::Kill);
|
||||||
self.signature_help.send(WorkerMessage::Kill).unwrap();
|
let _ = self.signature_help.send(WorkerMessage::Kill);
|
||||||
self.will_rename_files.send(WorkerMessage::Kill).unwrap();
|
let _ = self.will_rename_files.send(WorkerMessage::Kill);
|
||||||
self.execute_command.send(WorkerMessage::Kill).unwrap();
|
let _ = self.execute_command.send(WorkerMessage::Kill);
|
||||||
self.workspace_symbol.send(WorkerMessage::Kill).unwrap();
|
let _ = self.workspace_symbol.send(WorkerMessage::Kill);
|
||||||
self.document_symbol.send(WorkerMessage::Kill).unwrap();
|
let _ = self.document_symbol.send(WorkerMessage::Kill);
|
||||||
self.call_hierarchy_prepare
|
let _ = self.call_hierarchy_prepare.send(WorkerMessage::Kill);
|
||||||
.send(WorkerMessage::Kill)
|
let _ = self.call_hierarchy_incoming.send(WorkerMessage::Kill);
|
||||||
.unwrap();
|
let _ = self.call_hierarchy_outgoing.send(WorkerMessage::Kill);
|
||||||
self.call_hierarchy_incoming
|
let _ = self.folding_range.send(WorkerMessage::Kill);
|
||||||
.send(WorkerMessage::Kill)
|
let _ = self.health_check.send(WorkerMessage::Kill);
|
||||||
.unwrap();
|
|
||||||
self.call_hierarchy_outgoing
|
|
||||||
.send(WorkerMessage::Kill)
|
|
||||||
.unwrap();
|
|
||||||
self.folding_range.send(WorkerMessage::Kill).unwrap();
|
|
||||||
self.health_check.send(WorkerMessage::Kill).unwrap();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,7 +189,11 @@ pub struct ReceiveChannels {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Sendable<R: lsp_types::request::Request + 'static> {
|
pub trait Sendable<R: lsp_types::request::Request + 'static> {
|
||||||
fn send(&self, id: i64, params: R::Params);
|
fn send(
|
||||||
|
&self,
|
||||||
|
id: i64,
|
||||||
|
params: R::Params,
|
||||||
|
) -> Result<(), mpsc::SendError<WorkerMessage<R::Params>>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! impl_sendable {
|
macro_rules! impl_sendable {
|
||||||
|
@ -203,13 +201,16 @@ macro_rules! impl_sendable {
|
||||||
impl<Checker: BuildRunnable, Parser: Parsable> Sendable<$Request>
|
impl<Checker: BuildRunnable, Parser: Parsable> Sendable<$Request>
|
||||||
for Server<Checker, Parser>
|
for Server<Checker, Parser>
|
||||||
{
|
{
|
||||||
fn send(&self, id: i64, params: $Params) {
|
fn send(
|
||||||
|
&self,
|
||||||
|
id: i64,
|
||||||
|
params: $Params,
|
||||||
|
) -> Result<(), mpsc::SendError<WorkerMessage<$Params>>> {
|
||||||
self.channels
|
self.channels
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.$receiver
|
.$receiver
|
||||||
.send($crate::channels::WorkerMessage::Request(id, params))
|
.send($crate::channels::WorkerMessage::Request(id, params))
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -29,6 +29,7 @@ use crate::util::{self, NormalizedUrl};
|
||||||
impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
pub(crate) fn rename(&mut self, msg: &Value) -> ELSResult<()> {
|
pub(crate) fn rename(&mut self, msg: &Value) -> ELSResult<()> {
|
||||||
let params = RenameParams::deserialize(&msg["params"])?;
|
let params = RenameParams::deserialize(&msg["params"])?;
|
||||||
|
let id = msg["id"].as_i64().unwrap();
|
||||||
self.send_log(format!("rename request: {params:?}"))?;
|
self.send_log(format!("rename request: {params:?}"))?;
|
||||||
let uri = NormalizedUrl::new(params.text_document_position.text_document.uri);
|
let uri = NormalizedUrl::new(params.text_document_position.text_document.uri);
|
||||||
let pos = params.text_document_position.position;
|
let pos = params.text_document_position.position;
|
||||||
|
@ -66,9 +67,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
_ => format!("this {kind} cannot be renamed"),
|
_ => format!("this {kind} cannot be renamed"),
|
||||||
};
|
};
|
||||||
let edit = WorkspaceEdit::new(changes);
|
let edit = WorkspaceEdit::new(changes);
|
||||||
self.send_stdout(
|
self.send_stdout(&json!({ "jsonrpc": "2.0", "id": id, "result": edit }))?;
|
||||||
&json!({ "jsonrpc": "2.0", "id": msg["id"].as_i64().unwrap(), "result": edit }),
|
|
||||||
)?;
|
|
||||||
return self.send_error_info(error_reason);
|
return self.send_error_info(error_reason);
|
||||||
}
|
}
|
||||||
Self::commit_change(&mut changes, &vi.def_loc, params.new_name.clone());
|
Self::commit_change(&mut changes, &vi.def_loc, params.new_name.clone());
|
||||||
|
@ -88,9 +87,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
}
|
}
|
||||||
let timestamps = self.get_timestamps(changes.keys());
|
let timestamps = self.get_timestamps(changes.keys());
|
||||||
let edit = WorkspaceEdit::new(changes);
|
let edit = WorkspaceEdit::new(changes);
|
||||||
self.send_stdout(
|
self.send_stdout(&json!({ "jsonrpc": "2.0", "id": id, "result": edit }))?;
|
||||||
&json!({ "jsonrpc": "2.0", "id": msg["id"].as_i64().unwrap(), "result": edit }),
|
|
||||||
)?;
|
|
||||||
for _ in 0..20 {
|
for _ in 0..20 {
|
||||||
self.send_log("waiting for file to be modified...")?;
|
self.send_log("waiting for file to be modified...")?;
|
||||||
if self.all_changed(×tamps) {
|
if self.all_changed(×tamps) {
|
||||||
|
@ -108,9 +105,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.send_stdout(
|
self.send_stdout(&json!({ "jsonrpc": "2.0", "id": id, "result": Value::Null }))
|
||||||
&json!({ "jsonrpc": "2.0", "id": msg["id"].as_i64().unwrap(), "result": Value::Null }),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn commit_change(
|
fn commit_change(
|
||||||
|
@ -169,7 +164,11 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
.ref_inner()
|
.ref_inner()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|node| node.id == path || self_node.depends_on(&node.id))
|
.filter(|node| node.id == path || self_node.depends_on(&node.id))
|
||||||
.map(|node| NormalizedUrl::new(Url::from_file_path(node.id.to_path_buf()).unwrap()))
|
.filter_map(|node| {
|
||||||
|
Some(NormalizedUrl::new(
|
||||||
|
Url::from_file_path(node.id.to_path_buf()).ok()?,
|
||||||
|
))
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -181,7 +180,11 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
.ref_inner()
|
.ref_inner()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|node| node.depends_on(&path))
|
.filter(|node| node.depends_on(&path))
|
||||||
.map(|node| NormalizedUrl::new(Url::from_file_path(node.id.to_path_buf()).unwrap()))
|
.filter_map(|node| {
|
||||||
|
Some(NormalizedUrl::new(
|
||||||
|
Url::from_file_path(node.id.to_path_buf()).ok()?,
|
||||||
|
))
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -192,26 +195,29 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
old_uri: &NormalizedUrl,
|
old_uri: &NormalizedUrl,
|
||||||
new_uri: &NormalizedUrl,
|
new_uri: &NormalizedUrl,
|
||||||
) -> HashMap<Url, Vec<TextEdit>> {
|
) -> HashMap<Url, Vec<TextEdit>> {
|
||||||
|
let mut changes = HashMap::new();
|
||||||
let old_path = util::uri_to_path(old_uri)
|
let old_path = util::uri_to_path(old_uri)
|
||||||
.file_stem()
|
.file_stem()
|
||||||
.unwrap()
|
.unwrap_or_default()
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.to_string();
|
.to_string();
|
||||||
let old_path = old_path.trim_end_matches(".d");
|
let old_path = old_path.trim_end_matches(".d");
|
||||||
let new_path = util::uri_to_path(new_uri)
|
let new_path = util::uri_to_path(new_uri)
|
||||||
.file_stem()
|
.file_stem()
|
||||||
.unwrap()
|
.unwrap_or_default()
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.to_string();
|
.to_string();
|
||||||
|
if old_path.is_empty() || new_path.is_empty() {
|
||||||
|
return changes;
|
||||||
|
}
|
||||||
let new_path = new_path.trim_end_matches(".d");
|
let new_path = new_path.trim_end_matches(".d");
|
||||||
let mut changes = HashMap::new();
|
|
||||||
for dep in self.dependents_of(old_uri) {
|
for dep in self.dependents_of(old_uri) {
|
||||||
let imports = self.search_imports(&dep, old_path);
|
let imports = self.search_imports(&dep, old_path);
|
||||||
let edits = imports.iter().map(|lit| {
|
let edits = imports.iter().filter_map(|lit| {
|
||||||
TextEdit::new(
|
Some(TextEdit::new(
|
||||||
util::loc_to_range(lit.loc()).unwrap(),
|
util::loc_to_range(lit.loc())?,
|
||||||
lit.token.content.replace(old_path, new_path),
|
lit.token.content.replace(old_path, new_path),
|
||||||
)
|
))
|
||||||
});
|
});
|
||||||
changes.insert(dep.raw(), edits.collect());
|
changes.insert(dep.raw(), edits.collect());
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,12 +43,12 @@ use lsp_types::request::{
|
||||||
use lsp_types::{
|
use lsp_types::{
|
||||||
CallHierarchyServerCapability, CodeActionKind, CodeActionOptions, CodeActionProviderCapability,
|
CallHierarchyServerCapability, CodeActionKind, CodeActionOptions, CodeActionProviderCapability,
|
||||||
CodeLensOptions, CompletionOptions, ConfigurationItem, ConfigurationParams,
|
CodeLensOptions, CompletionOptions, ConfigurationItem, ConfigurationParams,
|
||||||
DidChangeTextDocumentParams, DidOpenTextDocumentParams, ExecuteCommandOptions,
|
DidChangeTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams,
|
||||||
FoldingRangeProviderCapability, HoverProviderCapability, ImplementationProviderCapability,
|
ExecuteCommandOptions, FoldingRangeProviderCapability, HoverProviderCapability,
|
||||||
InitializeParams, InitializeResult, InlayHintOptions, InlayHintServerCapabilities, OneOf,
|
ImplementationProviderCapability, InitializeParams, InitializeResult, InlayHintOptions,
|
||||||
Position, SemanticTokenType, SemanticTokensFullOptions, SemanticTokensLegend,
|
InlayHintServerCapabilities, OneOf, Position, SemanticTokenType, SemanticTokensFullOptions,
|
||||||
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
|
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensServerCapabilities,
|
||||||
SignatureHelpOptions, WorkDoneProgressOptions,
|
ServerCapabilities, SignatureHelpOptions, WorkDoneProgressOptions,
|
||||||
};
|
};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
@ -325,6 +325,9 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
let msg = _self.read_message().unwrap();
|
let msg = _self.read_message().unwrap();
|
||||||
if let Err(err) = _self.dispatch(msg) {
|
if let Err(err) = _self.dispatch(msg) {
|
||||||
lsp_log!("error: {err}");
|
lsp_log!("error: {err}");
|
||||||
|
if err.to_string().contains("sending on a closed channel") {
|
||||||
|
_self.restart();
|
||||||
|
};
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
fn_name!(),
|
fn_name!(),
|
||||||
|
@ -333,7 +336,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
// recover from crash
|
// recover from crash
|
||||||
if handle.is_finished() {
|
if handle.is_finished() {
|
||||||
self.send_error_info("The compiler has crashed. Restarting...")
|
self.send_error_info("The compiler has crashed. Restarting...")
|
||||||
.unwrap();
|
.expect("failed to send error info to client");
|
||||||
self.restart();
|
self.restart();
|
||||||
let mut _self = self.clone();
|
let mut _self = self.clone();
|
||||||
handle = spawn_new_thread(
|
handle = spawn_new_thread(
|
||||||
|
@ -341,6 +344,9 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
let msg = _self.read_message().unwrap();
|
let msg = _self.read_message().unwrap();
|
||||||
if let Err(err) = _self.dispatch(msg) {
|
if let Err(err) = _self.dispatch(msg) {
|
||||||
lsp_log!("error: {err}");
|
lsp_log!("error: {err}");
|
||||||
|
if err.to_string().contains("sending on a closed channel") {
|
||||||
|
_self.restart();
|
||||||
|
};
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
fn_name!(),
|
fn_name!(),
|
||||||
|
@ -600,8 +606,10 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Restart the server. Clear caches and close & reopen channels.
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub(crate) fn restart(&mut self) {
|
pub(crate) fn restart(&mut self) {
|
||||||
|
lsp_log!("restarting ELS");
|
||||||
self.file_cache.clear();
|
self.file_cache.clear();
|
||||||
self.comp_cache.clear();
|
self.comp_cache.clear();
|
||||||
self.channels.as_ref().unwrap().close();
|
self.channels.as_ref().unwrap().close();
|
||||||
|
@ -696,7 +704,7 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
Server<Checker, Parser>: Sendable<R>,
|
Server<Checker, Parser>: Sendable<R>,
|
||||||
{
|
{
|
||||||
let params = R::Params::deserialize(&msg["params"])?;
|
let params = R::Params::deserialize(&msg["params"])?;
|
||||||
self.send(id, params);
|
self.send(id, params)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -801,8 +809,8 @@ impl<Checker: BuildRunnable, Parser: Parsable> Server<Checker, Parser> {
|
||||||
self.check_file(uri, code)
|
self.check_file(uri, code)
|
||||||
}
|
}
|
||||||
"textDocument/didSave" => {
|
"textDocument/didSave" => {
|
||||||
let uri =
|
let params = DidSaveTextDocumentParams::deserialize(msg["params"].clone())?;
|
||||||
NormalizedUrl::parse(msg["params"]["textDocument"]["uri"].as_str().unwrap())?;
|
let uri = NormalizedUrl::new(params.text_document.uri);
|
||||||
self.send_log(format!("{method}: {uri}"))?;
|
self.send_log(format!("{method}: {uri}"))?;
|
||||||
let code = self.file_cache.get_entire_code(&uri)?;
|
let code = self.file_cache.get_entire_code(&uri)?;
|
||||||
self.recheck_file(uri, code)
|
self.recheck_file(uri, code)
|
||||||
|
|
|
@ -163,7 +163,9 @@ pub(crate) fn get_token_from_stream(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_metadata_from_uri(uri: &Url) -> ELSResult<Metadata> {
|
pub(crate) fn get_metadata_from_uri(uri: &Url) -> ELSResult<Metadata> {
|
||||||
let path = uri.to_file_path().unwrap();
|
let path = uri
|
||||||
|
.to_file_path()
|
||||||
|
.map_err(|_| "failed to convert uri to path")?;
|
||||||
Ok(metadata(path)?)
|
Ok(metadata(path)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -376,7 +376,7 @@ impl ErgConfig {
|
||||||
.parse::<String>()
|
.parse::<String>()
|
||||||
.expect("the value of `-py-command` is not a valid Python command");
|
.expect("the value of `-py-command` is not a valid Python command");
|
||||||
cfg.py_magic_num = Some(detect_magic_number(&py_command));
|
cfg.py_magic_num = Some(detect_magic_number(&py_command));
|
||||||
cfg.target_version = Some(get_python_version(&py_command));
|
cfg.target_version = get_python_version(&py_command);
|
||||||
cfg.py_command = Some(Box::leak(py_command.into_boxed_str()));
|
cfg.py_command = Some(Box::leak(py_command.into_boxed_str()));
|
||||||
}
|
}
|
||||||
"--hex-py-magic-num" | "--hex-python-magic-number" => {
|
"--hex-py-magic-num" | "--hex-python-magic-number" => {
|
||||||
|
|
|
@ -693,7 +693,7 @@ impl std::str::FromStr for PythonVersion {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_python_version(py_command: &str) -> PythonVersion {
|
pub fn get_python_version(py_command: &str) -> Option<PythonVersion> {
|
||||||
let out = if cfg!(windows) {
|
let out = if cfg!(windows) {
|
||||||
Command::new("cmd")
|
Command::new("cmd")
|
||||||
.arg("/C")
|
.arg("/C")
|
||||||
|
@ -710,19 +710,19 @@ pub fn get_python_version(py_command: &str) -> PythonVersion {
|
||||||
.expect("cannot get the python version")
|
.expect("cannot get the python version")
|
||||||
};
|
};
|
||||||
let s_version = String::from_utf8(out.stdout).unwrap();
|
let s_version = String::from_utf8(out.stdout).unwrap();
|
||||||
let mut iter = s_version.split(' ');
|
let iter = s_version.split(' ').nth(1)?;
|
||||||
let mut iter = iter.nth(1).unwrap().split('.');
|
let mut iter = iter.split('.');
|
||||||
let major = iter.next().and_then(|i| i.parse().ok()).unwrap_or(3);
|
let major = iter.next().and_then(|i| i.parse().ok()).unwrap_or(3);
|
||||||
let minor = iter.next().and_then(|i| i.parse().ok());
|
let minor = iter.next().and_then(|i| i.parse().ok());
|
||||||
let micro = iter.next().and_then(|i| i.trim_end().parse().ok());
|
let micro = iter.next().and_then(|i| i.trim_end().parse().ok());
|
||||||
PythonVersion {
|
Some(PythonVersion {
|
||||||
major,
|
major,
|
||||||
minor,
|
minor,
|
||||||
micro,
|
micro,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn env_python_version() -> PythonVersion {
|
pub fn env_python_version() -> Option<PythonVersion> {
|
||||||
get_python_version(&which_python())
|
get_python_version(&which_python())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -228,7 +228,12 @@ pub struct PyCodeGenerator {
|
||||||
impl PyCodeGenerator {
|
impl PyCodeGenerator {
|
||||||
pub fn new(cfg: ErgConfig) -> Self {
|
pub fn new(cfg: ErgConfig) -> Self {
|
||||||
Self {
|
Self {
|
||||||
py_version: cfg.target_version.unwrap_or_else(env_python_version),
|
py_version: cfg.target_version.unwrap_or_else(|| {
|
||||||
|
let Some(version) = env_python_version() else {
|
||||||
|
panic!("Failed to get python version");
|
||||||
|
};
|
||||||
|
version
|
||||||
|
}),
|
||||||
cfg,
|
cfg,
|
||||||
str_cache: CacheSet::new(),
|
str_cache: CacheSet::new(),
|
||||||
prelude_loaded: false,
|
prelude_loaded: false,
|
||||||
|
|
|
@ -69,7 +69,7 @@ while! do! c < 7, do!:
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_transpiler_embedding3() -> Result<(), ()> {
|
fn test_transpiler_embedding3() -> Result<(), ()> {
|
||||||
if env_python_version().minor < Some(10) {
|
if env_python_version().unwrap().minor < Some(10) {
|
||||||
println!("skipped: {}", fn_name!());
|
println!("skipped: {}", fn_name!());
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -96,7 +96,7 @@ print!(i, end:=\"\")
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_transpiler_embedding4() -> Result<(), ()> {
|
fn test_transpiler_embedding4() -> Result<(), ()> {
|
||||||
if env_python_version().minor < Some(10) {
|
if env_python_version().unwrap().minor < Some(10) {
|
||||||
println!("skipped: {}", fn_name!());
|
println!("skipped: {}", fn_name!());
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
|
@ -172,7 +172,7 @@ fn exec_fib() -> Result<(), ()> {
|
||||||
#[test]
|
#[test]
|
||||||
fn exec_helloworld() -> Result<(), ()> {
|
fn exec_helloworld() -> Result<(), ()> {
|
||||||
// HACK: When running the test with Windows, the exit code is 1 (the cause is unknown)
|
// HACK: When running the test with Windows, the exit code is 1 (the cause is unknown)
|
||||||
if cfg!(windows) && env_python_version().minor >= Some(8) {
|
if cfg!(windows) && env_python_version().unwrap().minor >= Some(8) {
|
||||||
expect_end_with("examples/helloworld.er", 0, 1)
|
expect_end_with("examples/helloworld.er", 0, 1)
|
||||||
} else {
|
} else {
|
||||||
expect_success("examples/helloworld.er", 0)
|
expect_success("examples/helloworld.er", 0)
|
||||||
|
@ -323,7 +323,7 @@ fn exec_pattern() -> Result<(), ()> {
|
||||||
#[test]
|
#[test]
|
||||||
fn exec_pyimport_test() -> Result<(), ()> {
|
fn exec_pyimport_test() -> Result<(), ()> {
|
||||||
// HACK: When running the test with Windows, the exit code is 1 (the cause is unknown)
|
// HACK: When running the test with Windows, the exit code is 1 (the cause is unknown)
|
||||||
if cfg!(windows) && env_python_version().minor < Some(8) {
|
if cfg!(windows) && env_python_version().unwrap().minor < Some(8) {
|
||||||
expect_end_with("tests/should_ok/pyimport.er", 2, 1)
|
expect_end_with("tests/should_ok/pyimport.er", 2, 1)
|
||||||
} else {
|
} else {
|
||||||
expect_success("tests/should_ok/pyimport.er", 2)
|
expect_success("tests/should_ok/pyimport.er", 2)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue