mirror of
https://github.com/astral-sh/ruff.git
synced 2025-07-30 08:23:53 +00:00
Create ruff_notebook
crate (#7039)
## Summary This PR moves `ruff/jupyter` into its own `ruff_notebook` crate. Beyond the move itself, there were a few challenges: 1. `ruff_notebook` relies on the source map abstraction. I've moved the source map into `ruff_diagnostics`, since it doesn't have any dependencies on its own and is used alongside diagnostics. 2. `ruff_notebook` has a couple tests for end-to-end linting and autofixing. I had to leave these tests in `ruff` itself. 3. We had code in `ruff/jupyter` that relied on Python lexing, in order to provide a more targeted error message in the event that a user saves a `.py` file with a `.ipynb` extension. I removed this in order to avoid a dependency on the parser, it felt like it wasn't worth retaining just for that dependency. ## Test Plan `cargo test`
This commit is contained in:
parent
08e246764f
commit
afcd00da56
48 changed files with 274 additions and 253 deletions
|
@ -129,6 +129,7 @@ At time of writing, the repository includes the following crates:
|
||||||
intermediate representation. The backend for `ruff_python_formatter`.
|
intermediate representation. The backend for `ruff_python_formatter`.
|
||||||
- `crates/ruff_index`: library crate inspired by `rustc_index`.
|
- `crates/ruff_index`: library crate inspired by `rustc_index`.
|
||||||
- `crates/ruff_macros`: proc macro crate containing macros used by Ruff.
|
- `crates/ruff_macros`: proc macro crate containing macros used by Ruff.
|
||||||
|
- `crates/ruff_notebook`: library crate for parsing and manipulating Jupyter notebooks.
|
||||||
- `crates/ruff_python_ast`: library crate containing Python-specific AST types and utilities.
|
- `crates/ruff_python_ast`: library crate containing Python-specific AST types and utilities.
|
||||||
- `crates/ruff_python_codegen`: library crate containing utilities for generating Python source code.
|
- `crates/ruff_python_codegen`: library crate containing utilities for generating Python source code.
|
||||||
- `crates/ruff_python_formatter`: library crate implementing the Python formatter. Emits an
|
- `crates/ruff_python_formatter`: library crate implementing the Python formatter. Emits an
|
||||||
|
|
24
Cargo.lock
generated
24
Cargo.lock
generated
|
@ -2088,6 +2088,7 @@ dependencies = [
|
||||||
"ruff_diagnostics",
|
"ruff_diagnostics",
|
||||||
"ruff_index",
|
"ruff_index",
|
||||||
"ruff_macros",
|
"ruff_macros",
|
||||||
|
"ruff_notebook",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
"ruff_python_codegen",
|
"ruff_python_codegen",
|
||||||
"ruff_python_index",
|
"ruff_python_index",
|
||||||
|
@ -2103,7 +2104,6 @@ dependencies = [
|
||||||
"semver",
|
"semver",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_with",
|
|
||||||
"similar",
|
"similar",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"strum",
|
"strum",
|
||||||
|
@ -2115,7 +2115,6 @@ dependencies = [
|
||||||
"typed-arena",
|
"typed-arena",
|
||||||
"unicode-width",
|
"unicode-width",
|
||||||
"unicode_names2",
|
"unicode_names2",
|
||||||
"uuid",
|
|
||||||
"wsl",
|
"wsl",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -2185,6 +2184,7 @@ dependencies = [
|
||||||
"ruff_diagnostics",
|
"ruff_diagnostics",
|
||||||
"ruff_formatter",
|
"ruff_formatter",
|
||||||
"ruff_macros",
|
"ruff_macros",
|
||||||
|
"ruff_notebook",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
"ruff_python_formatter",
|
"ruff_python_formatter",
|
||||||
"ruff_python_stdlib",
|
"ruff_python_stdlib",
|
||||||
|
@ -2227,6 +2227,7 @@ dependencies = [
|
||||||
"ruff_cli",
|
"ruff_cli",
|
||||||
"ruff_diagnostics",
|
"ruff_diagnostics",
|
||||||
"ruff_formatter",
|
"ruff_formatter",
|
||||||
|
"ruff_notebook",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
"ruff_python_codegen",
|
"ruff_python_codegen",
|
||||||
"ruff_python_formatter",
|
"ruff_python_formatter",
|
||||||
|
@ -2292,6 +2293,25 @@ dependencies = [
|
||||||
"syn 2.0.29",
|
"syn 2.0.29",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ruff_notebook"
|
||||||
|
version = "0.0.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"insta",
|
||||||
|
"itertools",
|
||||||
|
"once_cell",
|
||||||
|
"ruff_diagnostics",
|
||||||
|
"ruff_source_file",
|
||||||
|
"ruff_text_size",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"serde_with",
|
||||||
|
"test-case",
|
||||||
|
"thiserror",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff_python_ast"
|
name = "ruff_python_ast"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
|
|
@ -18,6 +18,7 @@ name = "ruff"
|
||||||
ruff_cache = { path = "../ruff_cache" }
|
ruff_cache = { path = "../ruff_cache" }
|
||||||
ruff_diagnostics = { path = "../ruff_diagnostics", features = ["serde"] }
|
ruff_diagnostics = { path = "../ruff_diagnostics", features = ["serde"] }
|
||||||
ruff_index = { path = "../ruff_index" }
|
ruff_index = { path = "../ruff_index" }
|
||||||
|
ruff_notebook = { path = "../ruff_notebook" }
|
||||||
ruff_macros = { path = "../ruff_macros" }
|
ruff_macros = { path = "../ruff_macros" }
|
||||||
ruff_python_ast = { path = "../ruff_python_ast", features = ["serde"] }
|
ruff_python_ast = { path = "../ruff_python_ast", features = ["serde"] }
|
||||||
ruff_python_codegen = { path = "../ruff_python_codegen" }
|
ruff_python_codegen = { path = "../ruff_python_codegen" }
|
||||||
|
@ -64,17 +65,15 @@ schemars = { workspace = true, optional = true }
|
||||||
semver = { version = "1.0.16" }
|
semver = { version = "1.0.16" }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
serde_with = { version = "3.0.0" }
|
|
||||||
similar = { workspace = true }
|
similar = { workspace = true }
|
||||||
smallvec = { workspace = true }
|
smallvec = { workspace = true }
|
||||||
strum = { workspace = true }
|
strum = { workspace = true }
|
||||||
strum_macros = { workspace = true }
|
strum_macros = { workspace = true }
|
||||||
thiserror = { version = "1.0.43" }
|
thiserror = { workspace = true }
|
||||||
toml = { workspace = true }
|
toml = { workspace = true }
|
||||||
typed-arena = { version = "2.0.2" }
|
typed-arena = { version = "2.0.2" }
|
||||||
unicode-width = { workspace = true }
|
unicode-width = { workspace = true }
|
||||||
unicode_names2 = { version = "0.6.0", git = "https://github.com/youknowone/unicode_names2.git", rev = "4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde" }
|
unicode_names2 = { version = "0.6.0", git = "https://github.com/youknowone/unicode_names2.git", rev = "4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde" }
|
||||||
uuid = { workspace = true, features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
|
||||||
wsl = { version = "0.1.0" }
|
wsl = { version = "0.1.0" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
|
@ -4,17 +4,15 @@ use std::collections::BTreeSet;
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel};
|
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel, SourceMap};
|
||||||
use ruff_source_file::Locator;
|
use ruff_source_file::Locator;
|
||||||
|
|
||||||
use crate::autofix::source_map::SourceMap;
|
|
||||||
use crate::linter::FixTable;
|
use crate::linter::FixTable;
|
||||||
use crate::registry::{AsRule, Rule};
|
use crate::registry::{AsRule, Rule};
|
||||||
|
|
||||||
pub(crate) mod codemods;
|
pub(crate) mod codemods;
|
||||||
pub(crate) mod edits;
|
pub(crate) mod edits;
|
||||||
pub(crate) mod snippet;
|
pub(crate) mod snippet;
|
||||||
pub(crate) mod source_map;
|
|
||||||
|
|
||||||
pub(crate) struct FixResult {
|
pub(crate) struct FixResult {
|
||||||
/// The resulting source code, after applying all fixes.
|
/// The resulting source code, after applying all fixes.
|
||||||
|
@ -140,10 +138,9 @@ fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Orderi
|
||||||
mod tests {
|
mod tests {
|
||||||
use ruff_text_size::{Ranged, TextSize};
|
use ruff_text_size::{Ranged, TextSize};
|
||||||
|
|
||||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
use ruff_diagnostics::{Diagnostic, Edit, Fix, SourceMarker};
|
||||||
use ruff_source_file::Locator;
|
use ruff_source_file::Locator;
|
||||||
|
|
||||||
use crate::autofix::source_map::SourceMarker;
|
|
||||||
use crate::autofix::{apply_fixes, FixResult};
|
use crate::autofix::{apply_fixes, FixResult};
|
||||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||||
|
|
||||||
|
@ -207,14 +204,8 @@ print("hello world")
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
source_map.markers(),
|
source_map.markers(),
|
||||||
&[
|
&[
|
||||||
SourceMarker {
|
SourceMarker::new(10.into(), 10.into(),),
|
||||||
source: 10.into(),
|
SourceMarker::new(10.into(), 21.into(),),
|
||||||
dest: 10.into(),
|
|
||||||
},
|
|
||||||
SourceMarker {
|
|
||||||
source: 10.into(),
|
|
||||||
dest: 21.into(),
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -250,14 +241,8 @@ class A(Bar):
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
source_map.markers(),
|
source_map.markers(),
|
||||||
&[
|
&[
|
||||||
SourceMarker {
|
SourceMarker::new(8.into(), 8.into(),),
|
||||||
source: 8.into(),
|
SourceMarker::new(14.into(), 11.into(),),
|
||||||
dest: 8.into(),
|
|
||||||
},
|
|
||||||
SourceMarker {
|
|
||||||
source: 14.into(),
|
|
||||||
dest: 11.into(),
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -289,14 +274,8 @@ class A:
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
source_map.markers(),
|
source_map.markers(),
|
||||||
&[
|
&[
|
||||||
SourceMarker {
|
SourceMarker::new(7.into(), 7.into()),
|
||||||
source: 7.into(),
|
SourceMarker::new(15.into(), 7.into()),
|
||||||
dest: 7.into()
|
|
||||||
},
|
|
||||||
SourceMarker {
|
|
||||||
source: 15.into(),
|
|
||||||
dest: 7.into()
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -332,22 +311,10 @@ class A(object):
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
source_map.markers(),
|
source_map.markers(),
|
||||||
&[
|
&[
|
||||||
SourceMarker {
|
SourceMarker::new(8.into(), 8.into()),
|
||||||
source: 8.into(),
|
SourceMarker::new(16.into(), 8.into()),
|
||||||
dest: 8.into()
|
SourceMarker::new(22.into(), 14.into(),),
|
||||||
},
|
SourceMarker::new(30.into(), 14.into(),),
|
||||||
SourceMarker {
|
|
||||||
source: 16.into(),
|
|
||||||
dest: 8.into()
|
|
||||||
},
|
|
||||||
SourceMarker {
|
|
||||||
source: 22.into(),
|
|
||||||
dest: 14.into(),
|
|
||||||
},
|
|
||||||
SourceMarker {
|
|
||||||
source: 30.into(),
|
|
||||||
dest: 14.into(),
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -382,14 +349,8 @@ class A:
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
source_map.markers(),
|
source_map.markers(),
|
||||||
&[
|
&[
|
||||||
SourceMarker {
|
SourceMarker::new(7.into(), 7.into(),),
|
||||||
source: 7.into(),
|
SourceMarker::new(15.into(), 7.into(),),
|
||||||
dest: 7.into(),
|
|
||||||
},
|
|
||||||
SourceMarker {
|
|
||||||
source: 15.into(),
|
|
||||||
dest: 7.into(),
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@ mod doc_lines;
|
||||||
mod docstrings;
|
mod docstrings;
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
mod importer;
|
mod importer;
|
||||||
pub mod jupyter;
|
|
||||||
mod lex;
|
mod lex;
|
||||||
pub mod line_width;
|
pub mod line_width;
|
||||||
pub mod linter;
|
pub mod linter;
|
||||||
|
|
|
@ -6,8 +6,6 @@ use anyhow::{anyhow, Result};
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use log::error;
|
use log::error;
|
||||||
use ruff_python_parser::lexer::LexResult;
|
|
||||||
use ruff_python_parser::{AsMode, ParseError};
|
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use ruff_diagnostics::Diagnostic;
|
use ruff_diagnostics::Diagnostic;
|
||||||
|
@ -15,7 +13,8 @@ use ruff_python_ast::imports::ImportMap;
|
||||||
use ruff_python_ast::PySourceType;
|
use ruff_python_ast::PySourceType;
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
|
use ruff_python_parser::lexer::LexResult;
|
||||||
|
use ruff_python_parser::{AsMode, ParseError};
|
||||||
use ruff_source_file::{Locator, SourceFileBuilder};
|
use ruff_source_file::{Locator, SourceFileBuilder};
|
||||||
use ruff_text_size::Ranged;
|
use ruff_text_size::Ranged;
|
||||||
|
|
||||||
|
@ -609,3 +608,133 @@ This indicates a bug in `{}`. If you could open an issue at:
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use test_case::test_case;
|
||||||
|
|
||||||
|
use ruff_notebook::{Notebook, NotebookError};
|
||||||
|
|
||||||
|
use crate::registry::Rule;
|
||||||
|
use crate::source_kind::SourceKind;
|
||||||
|
use crate::test::{test_contents, test_notebook_path, TestedNotebook};
|
||||||
|
use crate::{assert_messages, settings};
|
||||||
|
|
||||||
|
/// Construct a path to a Jupyter notebook in the `resources/test/fixtures/jupyter` directory.
|
||||||
|
fn notebook_path(path: impl AsRef<Path>) -> std::path::PathBuf {
|
||||||
|
Path::new("../ruff_notebook/resources/test/fixtures/jupyter").join(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_import_sorting() -> Result<(), NotebookError> {
|
||||||
|
let actual = notebook_path("isort.ipynb");
|
||||||
|
let expected = notebook_path("isort_expected.ipynb");
|
||||||
|
let TestedNotebook {
|
||||||
|
messages,
|
||||||
|
source_notebook,
|
||||||
|
..
|
||||||
|
} = test_notebook_path(
|
||||||
|
&actual,
|
||||||
|
expected,
|
||||||
|
&settings::Settings::for_rule(Rule::UnsortedImports),
|
||||||
|
)?;
|
||||||
|
assert_messages!(messages, actual, source_notebook);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ipy_escape_command() -> Result<(), NotebookError> {
|
||||||
|
let actual = notebook_path("ipy_escape_command.ipynb");
|
||||||
|
let expected = notebook_path("ipy_escape_command_expected.ipynb");
|
||||||
|
let TestedNotebook {
|
||||||
|
messages,
|
||||||
|
source_notebook,
|
||||||
|
..
|
||||||
|
} = test_notebook_path(
|
||||||
|
&actual,
|
||||||
|
expected,
|
||||||
|
&settings::Settings::for_rule(Rule::UnusedImport),
|
||||||
|
)?;
|
||||||
|
assert_messages!(messages, actual, source_notebook);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_unused_variable() -> Result<(), NotebookError> {
|
||||||
|
let actual = notebook_path("unused_variable.ipynb");
|
||||||
|
let expected = notebook_path("unused_variable_expected.ipynb");
|
||||||
|
let TestedNotebook {
|
||||||
|
messages,
|
||||||
|
source_notebook,
|
||||||
|
..
|
||||||
|
} = test_notebook_path(
|
||||||
|
&actual,
|
||||||
|
expected,
|
||||||
|
&settings::Settings::for_rule(Rule::UnusedVariable),
|
||||||
|
)?;
|
||||||
|
assert_messages!(messages, actual, source_notebook);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_json_consistency() -> Result<()> {
|
||||||
|
let actual_path = notebook_path("before_fix.ipynb");
|
||||||
|
let expected_path = notebook_path("after_fix.ipynb");
|
||||||
|
|
||||||
|
let TestedNotebook {
|
||||||
|
linted_notebook: fixed_notebook,
|
||||||
|
..
|
||||||
|
} = test_notebook_path(
|
||||||
|
actual_path,
|
||||||
|
&expected_path,
|
||||||
|
&settings::Settings::for_rule(Rule::UnusedImport),
|
||||||
|
)?;
|
||||||
|
let mut writer = Vec::new();
|
||||||
|
fixed_notebook.write(&mut writer)?;
|
||||||
|
let actual = String::from_utf8(writer)?;
|
||||||
|
let expected = std::fs::read_to_string(expected_path)?;
|
||||||
|
assert_eq!(actual, expected);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test_case(Path::new("before_fix.ipynb"), true; "trailing_newline")]
|
||||||
|
#[test_case(Path::new("no_trailing_newline.ipynb"), false; "no_trailing_newline")]
|
||||||
|
fn test_trailing_newline(path: &Path, trailing_newline: bool) -> Result<()> {
|
||||||
|
let notebook = Notebook::from_path(¬ebook_path(path))?;
|
||||||
|
assert_eq!(notebook.trailing_newline(), trailing_newline);
|
||||||
|
|
||||||
|
let mut writer = Vec::new();
|
||||||
|
notebook.write(&mut writer)?;
|
||||||
|
let string = String::from_utf8(writer)?;
|
||||||
|
assert_eq!(string.ends_with('\n'), trailing_newline);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Version <4.5, don't emit cell ids
|
||||||
|
#[test_case(Path::new("no_cell_id.ipynb"), false; "no_cell_id")]
|
||||||
|
// Version 4.5, cell ids are missing and need to be added
|
||||||
|
#[test_case(Path::new("add_missing_cell_id.ipynb"), true; "add_missing_cell_id")]
|
||||||
|
fn test_cell_id(path: &Path, has_id: bool) -> Result<()> {
|
||||||
|
let source_notebook = Notebook::from_path(¬ebook_path(path))?;
|
||||||
|
let source_kind = SourceKind::IpyNotebook(source_notebook);
|
||||||
|
let (_, transformed) = test_contents(
|
||||||
|
&source_kind,
|
||||||
|
path,
|
||||||
|
&settings::Settings::for_rule(Rule::UnusedImport),
|
||||||
|
);
|
||||||
|
let linted_notebook = transformed.into_owned().expect_ipy_notebook();
|
||||||
|
let mut writer = Vec::new();
|
||||||
|
linted_notebook.write(&mut writer)?;
|
||||||
|
let actual = String::from_utf8(writer)?;
|
||||||
|
if has_id {
|
||||||
|
assert!(actual.contains(r#""id": ""#));
|
||||||
|
} else {
|
||||||
|
assert!(!actual.contains(r#""id":"#));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -12,8 +12,8 @@ use ruff_python_parser::{ParseError, ParseErrorType};
|
||||||
use ruff_source_file::{OneIndexed, SourceCode, SourceLocation};
|
use ruff_source_file::{OneIndexed, SourceCode, SourceLocation};
|
||||||
|
|
||||||
use crate::fs;
|
use crate::fs;
|
||||||
use crate::jupyter::Notebook;
|
|
||||||
use crate::source_kind::SourceKind;
|
use crate::source_kind::SourceKind;
|
||||||
|
use ruff_notebook::Notebook;
|
||||||
|
|
||||||
pub static WARNINGS: Lazy<Mutex<Vec<&'static str>>> = Lazy::new(Mutex::default);
|
pub static WARNINGS: Lazy<Mutex<Vec<&'static str>>> = Lazy::new(Mutex::default);
|
||||||
|
|
||||||
|
|
|
@ -4,10 +4,10 @@ use std::num::NonZeroUsize;
|
||||||
|
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
|
||||||
|
use ruff_notebook::{Notebook, NotebookIndex};
|
||||||
use ruff_source_file::OneIndexed;
|
use ruff_source_file::OneIndexed;
|
||||||
|
|
||||||
use crate::fs::relativize_path;
|
use crate::fs::relativize_path;
|
||||||
use crate::jupyter::{Notebook, NotebookIndex};
|
|
||||||
use crate::message::diff::calculate_print_width;
|
use crate::message::diff::calculate_print_width;
|
||||||
use crate::message::text::{MessageCodeFrame, RuleCodeAndBody};
|
use crate::message::text::{MessageCodeFrame, RuleCodeAndBody};
|
||||||
use crate::message::{
|
use crate::message::{
|
||||||
|
|
|
@ -14,12 +14,11 @@ pub use json_lines::JsonLinesEmitter;
|
||||||
pub use junit::JunitEmitter;
|
pub use junit::JunitEmitter;
|
||||||
pub use pylint::PylintEmitter;
|
pub use pylint::PylintEmitter;
|
||||||
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
|
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
|
||||||
|
use ruff_notebook::Notebook;
|
||||||
use ruff_source_file::{SourceFile, SourceLocation};
|
use ruff_source_file::{SourceFile, SourceLocation};
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
pub use text::TextEmitter;
|
pub use text::TextEmitter;
|
||||||
|
|
||||||
use crate::jupyter::Notebook;
|
|
||||||
|
|
||||||
mod azure;
|
mod azure;
|
||||||
mod diff;
|
mod diff;
|
||||||
mod github;
|
mod github;
|
||||||
|
|
|
@ -7,11 +7,11 @@ use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, Sou
|
||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
|
||||||
|
use ruff_notebook::{Notebook, NotebookIndex};
|
||||||
use ruff_source_file::{OneIndexed, SourceLocation};
|
use ruff_source_file::{OneIndexed, SourceLocation};
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
|
|
||||||
use crate::fs::relativize_path;
|
use crate::fs::relativize_path;
|
||||||
use crate::jupyter::{Notebook, NotebookIndex};
|
|
||||||
use crate::line_width::{LineWidthBuilder, TabSize};
|
use crate::line_width::{LineWidthBuilder, TabSize};
|
||||||
use crate::message::diff::Diff;
|
use crate::message::diff::Diff;
|
||||||
use crate::message::{Emitter, EmitterContext, Message};
|
use crate::message::{Emitter, EmitterContext, Message};
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
use ruff_python_ast::{self as ast, ElifElseClause, ExceptHandler, MatchCase, Stmt};
|
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
use std::slice;
|
use std::slice;
|
||||||
|
|
||||||
|
use ruff_notebook::Notebook;
|
||||||
use ruff_python_ast::statement_visitor::StatementVisitor;
|
use ruff_python_ast::statement_visitor::StatementVisitor;
|
||||||
|
use ruff_python_ast::{self as ast, ElifElseClause, ExceptHandler, MatchCase, Stmt};
|
||||||
use ruff_source_file::Locator;
|
use ruff_source_file::Locator;
|
||||||
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
|
|
||||||
use crate::directives::IsortDirectives;
|
use crate::directives::IsortDirectives;
|
||||||
use crate::jupyter::Notebook;
|
|
||||||
use crate::rules::isort::helpers;
|
use crate::rules::isort::helpers;
|
||||||
use crate::source_kind::SourceKind;
|
use crate::source_kind::SourceKind;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff/src/jupyter/notebook.rs
|
source: crates/ruff/src/linter.rs
|
||||||
---
|
---
|
||||||
isort.ipynb:cell 1:1:1: I001 [*] Import block is un-sorted or un-formatted
|
isort.ipynb:cell 1:1:1: I001 [*] Import block is un-sorted or un-formatted
|
||||||
|
|
|
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff/src/jupyter/notebook.rs
|
source: crates/ruff/src/linter.rs
|
||||||
---
|
---
|
||||||
ipy_escape_command.ipynb:cell 1:5:8: F401 [*] `os` imported but unused
|
ipy_escape_command.ipynb:cell 1:5:8: F401 [*] `os` imported but unused
|
||||||
|
|
|
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff/src/jupyter/notebook.rs
|
source: crates/ruff/src/linter.rs
|
||||||
---
|
---
|
||||||
unused_variable.ipynb:cell 1:2:5: F841 [*] Local variable `foo1` is assigned to but never used
|
unused_variable.ipynb:cell 1:2:5: F841 [*] Local variable `foo1` is assigned to but never used
|
||||||
|
|
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::autofix::source_map::SourceMap;
|
use ruff_diagnostics::SourceMap;
|
||||||
use crate::jupyter::Notebook;
|
use ruff_notebook::Notebook;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, is_macro::Is)]
|
#[derive(Clone, Debug, PartialEq, is_macro::Is)]
|
||||||
pub enum SourceKind {
|
pub enum SourceKind {
|
||||||
|
|
|
@ -21,7 +21,6 @@ use ruff_text_size::Ranged;
|
||||||
|
|
||||||
use crate::autofix::{fix_file, FixResult};
|
use crate::autofix::{fix_file, FixResult};
|
||||||
use crate::directives;
|
use crate::directives;
|
||||||
use crate::jupyter::{Notebook, NotebookError};
|
|
||||||
use crate::linter::{check_path, LinterResult};
|
use crate::linter::{check_path, LinterResult};
|
||||||
use crate::message::{Emitter, EmitterContext, Message, TextEmitter};
|
use crate::message::{Emitter, EmitterContext, Message, TextEmitter};
|
||||||
use crate::packaging::detect_package_root;
|
use crate::packaging::detect_package_root;
|
||||||
|
@ -29,6 +28,7 @@ use crate::registry::AsRule;
|
||||||
use crate::rules::pycodestyle::rules::syntax_error;
|
use crate::rules::pycodestyle::rules::syntax_error;
|
||||||
use crate::settings::{flags, Settings};
|
use crate::settings::{flags, Settings};
|
||||||
use crate::source_kind::SourceKind;
|
use crate::source_kind::SourceKind;
|
||||||
|
use ruff_notebook::{Notebook, NotebookError};
|
||||||
|
|
||||||
#[cfg(not(fuzzing))]
|
#[cfg(not(fuzzing))]
|
||||||
pub(crate) fn test_resource_path(path: impl AsRef<Path>) -> std::path::PathBuf {
|
pub(crate) fn test_resource_path(path: impl AsRef<Path>) -> std::path::PathBuf {
|
||||||
|
|
|
@ -25,6 +25,7 @@ ruff = { path = "../ruff", features = ["clap"] }
|
||||||
ruff_cache = { path = "../ruff_cache" }
|
ruff_cache = { path = "../ruff_cache" }
|
||||||
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
||||||
ruff_formatter = { path = "../ruff_formatter" }
|
ruff_formatter = { path = "../ruff_formatter" }
|
||||||
|
ruff_notebook = { path = "../ruff_notebook" }
|
||||||
ruff_macros = { path = "../ruff_macros" }
|
ruff_macros = { path = "../ruff_macros" }
|
||||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
#![cfg_attr(target_family = "wasm", allow(dead_code))]
|
#![cfg_attr(target_family = "wasm", allow(dead_code))]
|
||||||
|
|
||||||
use std::fs::write;
|
use std::fs::{write, File};
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::io::Write;
|
use std::io::{BufWriter, Write};
|
||||||
use std::ops::AddAssign;
|
use std::ops::AddAssign;
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
use std::os::unix::fs::PermissionsExt;
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
@ -16,7 +16,6 @@ use rustc_hash::FxHashMap;
|
||||||
use similar::TextDiff;
|
use similar::TextDiff;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use ruff::jupyter::{Cell, Notebook, NotebookError};
|
|
||||||
use ruff::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult};
|
use ruff::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult};
|
||||||
use ruff::logging::DisplayParseError;
|
use ruff::logging::DisplayParseError;
|
||||||
use ruff::message::Message;
|
use ruff::message::Message;
|
||||||
|
@ -27,6 +26,7 @@ use ruff::source_kind::SourceKind;
|
||||||
use ruff::{fs, IOError, SyntaxError};
|
use ruff::{fs, IOError, SyntaxError};
|
||||||
use ruff_diagnostics::Diagnostic;
|
use ruff_diagnostics::Diagnostic;
|
||||||
use ruff_macros::CacheKey;
|
use ruff_macros::CacheKey;
|
||||||
|
use ruff_notebook::{Cell, Notebook, NotebookError};
|
||||||
use ruff_python_ast::imports::ImportMap;
|
use ruff_python_ast::imports::ImportMap;
|
||||||
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||||
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
|
use ruff_source_file::{LineIndex, SourceCode, SourceFileBuilder};
|
||||||
|
@ -243,7 +243,8 @@ pub(crate) fn lint_path(
|
||||||
write(path, transformed.as_bytes())?;
|
write(path, transformed.as_bytes())?;
|
||||||
}
|
}
|
||||||
SourceKind::IpyNotebook(notebook) => {
|
SourceKind::IpyNotebook(notebook) => {
|
||||||
notebook.write(path)?;
|
let mut writer = BufWriter::new(File::create(path)?);
|
||||||
|
notebook.write(&mut writer)?;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
flags::FixMode::Diff => {
|
flags::FixMode::Diff => {
|
||||||
|
@ -565,8 +566,7 @@ impl From<&SourceExtractionError> for Diagnostic {
|
||||||
}
|
}
|
||||||
// Syntax errors.
|
// Syntax errors.
|
||||||
SourceExtractionError::Notebook(
|
SourceExtractionError::Notebook(
|
||||||
NotebookError::PythonSource(_)
|
NotebookError::InvalidJson(_)
|
||||||
| NotebookError::InvalidJson(_)
|
|
||||||
| NotebookError::InvalidSchema(_)
|
| NotebookError::InvalidSchema(_)
|
||||||
| NotebookError::InvalidFormat(_),
|
| NotebookError::InvalidFormat(_),
|
||||||
) => Diagnostic::new(
|
) => Diagnostic::new(
|
||||||
|
|
|
@ -18,6 +18,7 @@ ruff_formatter = { path = "../ruff_formatter" }
|
||||||
ruff_python_ast = { path = "../ruff_python_ast" }
|
ruff_python_ast = { path = "../ruff_python_ast" }
|
||||||
ruff_python_codegen = { path = "../ruff_python_codegen" }
|
ruff_python_codegen = { path = "../ruff_python_codegen" }
|
||||||
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||||
|
ruff_notebook = { path = "../ruff_notebook" }
|
||||||
ruff_python_literal = { path = "../ruff_python_literal" }
|
ruff_python_literal = { path = "../ruff_python_literal" }
|
||||||
ruff_python_parser = { path = "../ruff_python_parser" }
|
ruff_python_parser = { path = "../ruff_python_parser" }
|
||||||
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
|
ruff_python_stdlib = { path = "../ruff_python_stdlib" }
|
||||||
|
|
|
@ -6,7 +6,6 @@ use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
use ruff::jupyter;
|
|
||||||
use ruff_python_codegen::round_trip;
|
use ruff_python_codegen::round_trip;
|
||||||
use ruff_python_stdlib::path::is_jupyter_notebook;
|
use ruff_python_stdlib::path::is_jupyter_notebook;
|
||||||
|
|
||||||
|
@ -20,7 +19,7 @@ pub(crate) struct Args {
|
||||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||||
let path = args.file.as_path();
|
let path = args.file.as_path();
|
||||||
if is_jupyter_notebook(path) {
|
if is_jupyter_notebook(path) {
|
||||||
println!("{}", jupyter::round_trip(path)?);
|
println!("{}", ruff_notebook::round_trip(path)?);
|
||||||
} else {
|
} else {
|
||||||
let contents = fs::read_to_string(&args.file)?;
|
let contents = fs::read_to_string(&args.file)?;
|
||||||
println!("{}", round_trip(&contents, &args.file.to_string_lossy())?);
|
println!("{}", round_trip(&contents, &args.file.to_string_lossy())?);
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
pub use diagnostic::{Diagnostic, DiagnosticKind};
|
pub use diagnostic::{Diagnostic, DiagnosticKind};
|
||||||
pub use edit::Edit;
|
pub use edit::Edit;
|
||||||
pub use fix::{Applicability, Fix, IsolationLevel};
|
pub use fix::{Applicability, Fix, IsolationLevel};
|
||||||
|
pub use source_map::{SourceMap, SourceMarker};
|
||||||
pub use violation::{AlwaysAutofixableViolation, AutofixKind, Violation};
|
pub use violation::{AlwaysAutofixableViolation, AutofixKind, Violation};
|
||||||
|
|
||||||
mod diagnostic;
|
mod diagnostic;
|
||||||
mod edit;
|
mod edit;
|
||||||
mod fix;
|
mod fix;
|
||||||
|
mod source_map;
|
||||||
mod violation;
|
mod violation;
|
||||||
|
|
|
@ -1,15 +1,29 @@
|
||||||
use ruff_text_size::{Ranged, TextSize};
|
use ruff_text_size::{Ranged, TextSize};
|
||||||
|
|
||||||
use ruff_diagnostics::Edit;
|
use crate::Edit;
|
||||||
|
|
||||||
/// Lightweight sourcemap marker representing the source and destination
|
/// Lightweight sourcemap marker representing the source and destination
|
||||||
/// position for an [`Edit`].
|
/// position for an [`Edit`].
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub(crate) struct SourceMarker {
|
pub struct SourceMarker {
|
||||||
/// Position of the marker in the original source.
|
/// Position of the marker in the original source.
|
||||||
pub(crate) source: TextSize,
|
source: TextSize,
|
||||||
/// Position of the marker in the transformed code.
|
/// Position of the marker in the transformed code.
|
||||||
pub(crate) dest: TextSize,
|
dest: TextSize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SourceMarker {
|
||||||
|
pub fn new(source: TextSize, dest: TextSize) -> Self {
|
||||||
|
Self { source, dest }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn source(&self) -> TextSize {
|
||||||
|
self.source
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn dest(&self) -> TextSize {
|
||||||
|
self.dest
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A collection of [`SourceMarker`].
|
/// A collection of [`SourceMarker`].
|
||||||
|
@ -18,12 +32,12 @@ pub(crate) struct SourceMarker {
|
||||||
/// the transformed code. Here, only the boundaries of edits are tracked instead
|
/// the transformed code. Here, only the boundaries of edits are tracked instead
|
||||||
/// of every single character.
|
/// of every single character.
|
||||||
#[derive(Default, PartialEq, Eq)]
|
#[derive(Default, PartialEq, Eq)]
|
||||||
pub(crate) struct SourceMap(Vec<SourceMarker>);
|
pub struct SourceMap(Vec<SourceMarker>);
|
||||||
|
|
||||||
impl SourceMap {
|
impl SourceMap {
|
||||||
/// Returns a slice of all the markers in the sourcemap in the order they
|
/// Returns a slice of all the markers in the sourcemap in the order they
|
||||||
/// were added.
|
/// were added.
|
||||||
pub(crate) fn markers(&self) -> &[SourceMarker] {
|
pub fn markers(&self) -> &[SourceMarker] {
|
||||||
&self.0
|
&self.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,7 +45,7 @@ impl SourceMap {
|
||||||
///
|
///
|
||||||
/// The `output_length` is the length of the transformed string before the
|
/// The `output_length` is the length of the transformed string before the
|
||||||
/// edit is applied.
|
/// edit is applied.
|
||||||
pub(crate) fn push_start_marker(&mut self, edit: &Edit, output_length: TextSize) {
|
pub fn push_start_marker(&mut self, edit: &Edit, output_length: TextSize) {
|
||||||
self.0.push(SourceMarker {
|
self.0.push(SourceMarker {
|
||||||
source: edit.start(),
|
source: edit.start(),
|
||||||
dest: output_length,
|
dest: output_length,
|
||||||
|
@ -42,7 +56,7 @@ impl SourceMap {
|
||||||
///
|
///
|
||||||
/// The `output_length` is the length of the transformed string after the
|
/// The `output_length` is the length of the transformed string after the
|
||||||
/// edit has been applied.
|
/// edit has been applied.
|
||||||
pub(crate) fn push_end_marker(&mut self, edit: &Edit, output_length: TextSize) {
|
pub fn push_end_marker(&mut self, edit: &Edit, output_length: TextSize) {
|
||||||
if edit.is_insertion() {
|
if edit.is_insertion() {
|
||||||
self.0.push(SourceMarker {
|
self.0.push(SourceMarker {
|
||||||
source: edit.start(),
|
source: edit.start(),
|
31
crates/ruff_notebook/Cargo.toml
Normal file
31
crates/ruff_notebook/Cargo.toml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
[package]
|
||||||
|
name = "ruff_notebook"
|
||||||
|
version = "0.0.0"
|
||||||
|
publish = false
|
||||||
|
authors = { workspace = true }
|
||||||
|
edition = { workspace = true }
|
||||||
|
rust-version = { workspace = true }
|
||||||
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
|
repository = { workspace = true }
|
||||||
|
license = { workspace = true }
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
ruff_diagnostics = { path = "../ruff_diagnostics" }
|
||||||
|
ruff_source_file = { path = "../ruff_source_file" }
|
||||||
|
ruff_text_size = { path = "../ruff_text_size" }
|
||||||
|
|
||||||
|
anyhow = { workspace = true }
|
||||||
|
itertools = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
|
serde_with = { version = "3.0.0" }
|
||||||
|
thiserror = { workspace = true }
|
||||||
|
uuid = { workspace = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
insta = { workspace = true }
|
||||||
|
test-case = { workspace = true }
|
|
@ -1,7 +1,7 @@
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{BufReader, BufWriter, Cursor, Read, Seek, SeekFrom, Write};
|
use std::io::{BufReader, Cursor, Read, Seek, SeekFrom, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::{io, iter};
|
use std::{io, iter};
|
||||||
|
|
||||||
|
@ -12,14 +12,12 @@ use serde_json::error::Category;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use ruff_python_parser::lexer::lex;
|
use ruff_diagnostics::{SourceMap, SourceMarker};
|
||||||
use ruff_python_parser::Mode;
|
|
||||||
use ruff_source_file::{NewlineWithTrailingNewline, UniversalNewlineIterator};
|
use ruff_source_file::{NewlineWithTrailingNewline, UniversalNewlineIterator};
|
||||||
use ruff_text_size::TextSize;
|
use ruff_text_size::TextSize;
|
||||||
|
|
||||||
use crate::autofix::source_map::{SourceMap, SourceMarker};
|
use crate::index::NotebookIndex;
|
||||||
use crate::jupyter::index::NotebookIndex;
|
use crate::schema::{Cell, RawNotebook, SortAlphabetically, SourceValue};
|
||||||
use crate::jupyter::schema::{Cell, RawNotebook, SortAlphabetically, SourceValue};
|
|
||||||
|
|
||||||
/// Run round-trip source code generation on a given Jupyter notebook file path.
|
/// Run round-trip source code generation on a given Jupyter notebook file path.
|
||||||
pub fn round_trip(path: &Path) -> anyhow::Result<String> {
|
pub fn round_trip(path: &Path) -> anyhow::Result<String> {
|
||||||
|
@ -33,7 +31,7 @@ pub fn round_trip(path: &Path) -> anyhow::Result<String> {
|
||||||
let code = notebook.source_code().to_string();
|
let code = notebook.source_code().to_string();
|
||||||
notebook.update_cell_content(&code);
|
notebook.update_cell_content(&code);
|
||||||
let mut writer = Vec::new();
|
let mut writer = Vec::new();
|
||||||
notebook.write_inner(&mut writer)?;
|
notebook.write(&mut writer)?;
|
||||||
Ok(String::from_utf8(writer)?)
|
Ok(String::from_utf8(writer)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,8 +97,6 @@ pub enum NotebookError {
|
||||||
Io(#[from] io::Error),
|
Io(#[from] io::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Json(serde_json::Error),
|
Json(serde_json::Error),
|
||||||
#[error("Expected a Jupyter Notebook, which must be internally stored as JSON, but found a Python source file: {0}")]
|
|
||||||
PythonSource(serde_json::Error),
|
|
||||||
#[error("Expected a Jupyter Notebook, which must be internally stored as JSON, but this file isn't valid JSON: {0}")]
|
#[error("Expected a Jupyter Notebook, which must be internally stored as JSON, but this file isn't valid JSON: {0}")]
|
||||||
InvalidJson(serde_json::Error),
|
InvalidJson(serde_json::Error),
|
||||||
#[error("This file does not match the schema expected of Jupyter Notebooks: {0}")]
|
#[error("This file does not match the schema expected of Jupyter Notebooks: {0}")]
|
||||||
|
@ -162,24 +158,10 @@ impl Notebook {
|
||||||
// Translate the error into a diagnostic
|
// Translate the error into a diagnostic
|
||||||
return Err(match err.classify() {
|
return Err(match err.classify() {
|
||||||
Category::Io => NotebookError::Json(err),
|
Category::Io => NotebookError::Json(err),
|
||||||
Category::Syntax | Category::Eof => {
|
Category::Syntax | Category::Eof => NotebookError::InvalidJson(err),
|
||||||
// Maybe someone saved the python sources (those with the `# %%` separator)
|
|
||||||
// as jupyter notebook instead. Let's help them.
|
|
||||||
let mut contents = String::new();
|
|
||||||
reader
|
|
||||||
.rewind()
|
|
||||||
.and_then(|_| reader.read_to_string(&mut contents))?;
|
|
||||||
|
|
||||||
// Check if tokenizing was successful and the file is non-empty
|
|
||||||
if lex(&contents, Mode::Module).any(|result| result.is_err()) {
|
|
||||||
NotebookError::InvalidJson(err)
|
|
||||||
} else {
|
|
||||||
NotebookError::PythonSource(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Category::Data => {
|
Category::Data => {
|
||||||
// We could try to read the schema version here but if this fails it's
|
// We could try to read the schema version here but if this fails it's
|
||||||
// a bug anyway
|
// a bug anyway.
|
||||||
NotebookError::InvalidSchema(err)
|
NotebookError::InvalidSchema(err)
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -256,13 +238,13 @@ impl Notebook {
|
||||||
// The first offset is always going to be at 0, so skip it.
|
// The first offset is always going to be at 0, so skip it.
|
||||||
for offset in self.cell_offsets.iter_mut().skip(1).rev() {
|
for offset in self.cell_offsets.iter_mut().skip(1).rev() {
|
||||||
let closest_marker = match last_marker {
|
let closest_marker = match last_marker {
|
||||||
Some(marker) if marker.source <= *offset => marker,
|
Some(marker) if marker.source() <= *offset => marker,
|
||||||
_ => {
|
_ => {
|
||||||
let Some(marker) = source_map
|
let Some(marker) = source_map
|
||||||
.markers()
|
.markers()
|
||||||
.iter()
|
.iter()
|
||||||
.rev()
|
.rev()
|
||||||
.find(|m| m.source <= *offset)
|
.find(|marker| marker.source() <= *offset)
|
||||||
else {
|
else {
|
||||||
// There are no markers above the current offset, so we can
|
// There are no markers above the current offset, so we can
|
||||||
// stop here.
|
// stop here.
|
||||||
|
@ -273,9 +255,9 @@ impl Notebook {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match closest_marker.source.cmp(&closest_marker.dest) {
|
match closest_marker.source().cmp(&closest_marker.dest()) {
|
||||||
Ordering::Less => *offset += closest_marker.dest - closest_marker.source,
|
Ordering::Less => *offset += closest_marker.dest() - closest_marker.source(),
|
||||||
Ordering::Greater => *offset -= closest_marker.source - closest_marker.dest,
|
Ordering::Greater => *offset -= closest_marker.source() - closest_marker.dest(),
|
||||||
Ordering::Equal => (),
|
Ordering::Equal => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -383,18 +365,23 @@ impl Notebook {
|
||||||
/// The index is built only once when required. This is only used to
|
/// The index is built only once when required. This is only used to
|
||||||
/// report diagnostics, so by that time all of the autofixes must have
|
/// report diagnostics, so by that time all of the autofixes must have
|
||||||
/// been applied if `--fix` was passed.
|
/// been applied if `--fix` was passed.
|
||||||
pub(crate) fn index(&self) -> &NotebookIndex {
|
pub fn index(&self) -> &NotebookIndex {
|
||||||
self.index.get_or_init(|| self.build_index())
|
self.index.get_or_init(|| self.build_index())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the cell offsets for the concatenated source code corresponding
|
/// Return the cell offsets for the concatenated source code corresponding
|
||||||
/// the Jupyter notebook.
|
/// the Jupyter notebook.
|
||||||
pub(crate) fn cell_offsets(&self) -> &[TextSize] {
|
pub fn cell_offsets(&self) -> &[TextSize] {
|
||||||
&self.cell_offsets
|
&self.cell_offsets
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return `true` if the notebook has a trailing newline, `false` otherwise.
|
||||||
|
pub fn trailing_newline(&self) -> bool {
|
||||||
|
self.trailing_newline
|
||||||
|
}
|
||||||
|
|
||||||
/// Update the notebook with the given sourcemap and transformed content.
|
/// Update the notebook with the given sourcemap and transformed content.
|
||||||
pub(crate) fn update(&mut self, source_map: &SourceMap, transformed: String) {
|
pub fn update(&mut self, source_map: &SourceMap, transformed: String) {
|
||||||
// Cell offsets must be updated before updating the cell content as
|
// Cell offsets must be updated before updating the cell content as
|
||||||
// it depends on the offsets to extract the cell content.
|
// it depends on the offsets to extract the cell content.
|
||||||
self.index.take();
|
self.index.take();
|
||||||
|
@ -417,7 +404,8 @@ impl Notebook {
|
||||||
.map_or(true, |language| language.name == "python")
|
.map_or(true, |language| language.name == "python")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_inner(&self, writer: &mut impl Write) -> anyhow::Result<()> {
|
/// Write the notebook back to the given [`Write`] implementor.
|
||||||
|
pub fn write(&self, writer: &mut dyn Write) -> anyhow::Result<()> {
|
||||||
// https://github.com/psf/black/blob/69ca0a4c7a365c5f5eea519a90980bab72cab764/src/black/__init__.py#LL1041
|
// https://github.com/psf/black/blob/69ca0a4c7a365c5f5eea519a90980bab72cab764/src/black/__init__.py#LL1041
|
||||||
let formatter = serde_json::ser::PrettyFormatter::with_indent(b" ");
|
let formatter = serde_json::ser::PrettyFormatter::with_indent(b" ");
|
||||||
let mut serializer = serde_json::Serializer::with_formatter(writer, formatter);
|
let mut serializer = serde_json::Serializer::with_formatter(writer, formatter);
|
||||||
|
@ -427,13 +415,6 @@ impl Notebook {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write back with an indent of 1, just like black
|
|
||||||
pub fn write(&self, path: &Path) -> anyhow::Result<()> {
|
|
||||||
let mut writer = BufWriter::new(File::create(path)?);
|
|
||||||
self.write_inner(&mut writer)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -443,17 +424,11 @@ mod tests {
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
|
||||||
use crate::jupyter::index::NotebookIndex;
|
use crate::{Cell, Notebook, NotebookError, NotebookIndex};
|
||||||
use crate::jupyter::schema::Cell;
|
|
||||||
use crate::jupyter::{Notebook, NotebookError};
|
|
||||||
use crate::registry::Rule;
|
|
||||||
use crate::source_kind::SourceKind;
|
|
||||||
use crate::test::{test_contents, test_notebook_path, test_resource_path, TestedNotebook};
|
|
||||||
use crate::{assert_messages, settings};
|
|
||||||
|
|
||||||
/// Construct a path to a Jupyter notebook in the `resources/test/fixtures/jupyter` directory.
|
/// Construct a path to a Jupyter notebook in the `resources/test/fixtures/jupyter` directory.
|
||||||
fn notebook_path(path: impl AsRef<Path>) -> std::path::PathBuf {
|
fn notebook_path(path: impl AsRef<Path>) -> std::path::PathBuf {
|
||||||
test_resource_path("fixtures/jupyter").join(path)
|
Path::new("./resources/test/fixtures/jupyter").join(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -474,7 +449,7 @@ mod tests {
|
||||||
fn test_invalid() {
|
fn test_invalid() {
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
Notebook::from_path(¬ebook_path("invalid_extension.ipynb")),
|
Notebook::from_path(¬ebook_path("invalid_extension.ipynb")),
|
||||||
Err(NotebookError::PythonSource(_))
|
Err(NotebookError::InvalidJson(_))
|
||||||
));
|
));
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
Notebook::from_path(¬ebook_path("not_json.ipynb")),
|
Notebook::from_path(¬ebook_path("not_json.ipynb")),
|
||||||
|
@ -545,114 +520,4 @@ print("after empty cells")
|
||||||
);
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_import_sorting() -> Result<(), NotebookError> {
|
|
||||||
let actual = notebook_path("isort.ipynb");
|
|
||||||
let expected = notebook_path("isort_expected.ipynb");
|
|
||||||
let TestedNotebook {
|
|
||||||
messages,
|
|
||||||
source_notebook,
|
|
||||||
..
|
|
||||||
} = test_notebook_path(
|
|
||||||
&actual,
|
|
||||||
expected,
|
|
||||||
&settings::Settings::for_rule(Rule::UnsortedImports),
|
|
||||||
)?;
|
|
||||||
assert_messages!(messages, actual, source_notebook);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ipy_escape_command() -> Result<(), NotebookError> {
|
|
||||||
let actual = notebook_path("ipy_escape_command.ipynb");
|
|
||||||
let expected = notebook_path("ipy_escape_command_expected.ipynb");
|
|
||||||
let TestedNotebook {
|
|
||||||
messages,
|
|
||||||
source_notebook,
|
|
||||||
..
|
|
||||||
} = test_notebook_path(
|
|
||||||
&actual,
|
|
||||||
expected,
|
|
||||||
&settings::Settings::for_rule(Rule::UnusedImport),
|
|
||||||
)?;
|
|
||||||
assert_messages!(messages, actual, source_notebook);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_unused_variable() -> Result<(), NotebookError> {
|
|
||||||
let actual = notebook_path("unused_variable.ipynb");
|
|
||||||
let expected = notebook_path("unused_variable_expected.ipynb");
|
|
||||||
let TestedNotebook {
|
|
||||||
messages,
|
|
||||||
source_notebook,
|
|
||||||
..
|
|
||||||
} = test_notebook_path(
|
|
||||||
&actual,
|
|
||||||
expected,
|
|
||||||
&settings::Settings::for_rule(Rule::UnusedVariable),
|
|
||||||
)?;
|
|
||||||
assert_messages!(messages, actual, source_notebook);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_json_consistency() -> Result<()> {
|
|
||||||
let actual_path = notebook_path("before_fix.ipynb");
|
|
||||||
let expected_path = notebook_path("after_fix.ipynb");
|
|
||||||
|
|
||||||
let TestedNotebook {
|
|
||||||
linted_notebook: fixed_notebook,
|
|
||||||
..
|
|
||||||
} = test_notebook_path(
|
|
||||||
actual_path,
|
|
||||||
&expected_path,
|
|
||||||
&settings::Settings::for_rule(Rule::UnusedImport),
|
|
||||||
)?;
|
|
||||||
let mut writer = Vec::new();
|
|
||||||
fixed_notebook.write_inner(&mut writer)?;
|
|
||||||
let actual = String::from_utf8(writer)?;
|
|
||||||
let expected = std::fs::read_to_string(expected_path)?;
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test_case(Path::new("before_fix.ipynb"), true; "trailing_newline")]
|
|
||||||
#[test_case(Path::new("no_trailing_newline.ipynb"), false; "no_trailing_newline")]
|
|
||||||
fn test_trailing_newline(path: &Path, trailing_newline: bool) -> Result<()> {
|
|
||||||
let notebook = Notebook::from_path(¬ebook_path(path))?;
|
|
||||||
assert_eq!(notebook.trailing_newline, trailing_newline);
|
|
||||||
|
|
||||||
let mut writer = Vec::new();
|
|
||||||
notebook.write_inner(&mut writer)?;
|
|
||||||
let string = String::from_utf8(writer)?;
|
|
||||||
assert_eq!(string.ends_with('\n'), trailing_newline);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Version <4.5, don't emit cell ids
|
|
||||||
#[test_case(Path::new("no_cell_id.ipynb"), false; "no_cell_id")]
|
|
||||||
// Version 4.5, cell ids are missing and need to be added
|
|
||||||
#[test_case(Path::new("add_missing_cell_id.ipynb"), true; "add_missing_cell_id")]
|
|
||||||
fn test_cell_id(path: &Path, has_id: bool) -> Result<()> {
|
|
||||||
let source_notebook = Notebook::from_path(¬ebook_path(path))?;
|
|
||||||
let source_kind = SourceKind::IpyNotebook(source_notebook);
|
|
||||||
let (_, transformed) = test_contents(
|
|
||||||
&source_kind,
|
|
||||||
path,
|
|
||||||
&settings::Settings::for_rule(Rule::UnusedImport),
|
|
||||||
);
|
|
||||||
let linted_notebook = transformed.into_owned().expect_ipy_notebook();
|
|
||||||
let mut writer = Vec::new();
|
|
||||||
linted_notebook.write_inner(&mut writer)?;
|
|
||||||
let actual = String::from_utf8(writer)?;
|
|
||||||
if has_id {
|
|
||||||
assert!(actual.contains(r#""id": ""#));
|
|
||||||
} else {
|
|
||||||
assert!(!actual.contains(r#""id":"#));
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -46,7 +46,7 @@ fn sort_alphabetically<T: Serialize, S: serde::Serializer>(
|
||||||
///
|
///
|
||||||
/// use serde::Serialize;
|
/// use serde::Serialize;
|
||||||
///
|
///
|
||||||
/// use ruff::jupyter::SortAlphabetically;
|
/// use ruff_notebook::SortAlphabetically;
|
||||||
///
|
///
|
||||||
/// #[derive(Serialize)]
|
/// #[derive(Serialize)]
|
||||||
/// struct MyStruct {
|
/// struct MyStruct {
|
Loading…
Add table
Add a link
Reference in a new issue