ruff server: Support Jupyter Notebook (*.ipynb) files (#11206)

## Summary

Closes https://github.com/astral-sh/ruff/issues/10858.

`ruff server` now supports `*.ipynb` (aka Jupyter Notebook) files.
Extensive internal changes have been made to facilitate this, which I've
done some work to contextualize with documentation and an pre-review
that highlights notable sections of the code.

`*.ipynb` cells should behave similarly to `*.py` documents, with one
major exception. The format command `ruff.applyFormat` will only apply
to the currently selected notebook cell - if you want to format an
entire notebook document, use `Format Notebook` from the VS Code context
menu.

## Test Plan

The VS Code extension does not yet have Jupyter Notebook support
enabled, so you'll first need to enable it manually. To do this,
checkout the `pre-release` branch and modify `src/common/server.ts` as
follows:

Before:
![Screenshot 2024-05-13 at 10 59
06 PM](c6a3c604-c405-4968-b8a2-5d670de89172)

After:
![Screenshot 2024-05-13 at 10 58
24 PM](94ab2e3d-0609-448d-9c8c-cd07c69a513b)

I recommend testing this PR with large, complicated notebook files. I
used notebook files from [this popular
repository](https://github.com/jakevdp/PythonDataScienceHandbook/tree/master/notebooks)
in my preliminary testing.

The main thing to test is ensuring that notebook cells behave the same
as Python documents, besides the aforementioned issue with
`ruff.applyFormat`. You should also test adding and deleting cells (in
particular, deleting all the code cells and ensure that doesn't break
anything), changing the kind of a cell (i.e. from markup -> code or vice
versa), and creating a new notebook file from scratch. Finally, you
should also test that source actions work as expected (and across the
entire notebook).

Note: `ruff.applyAutofix` and `ruff.applyOrganizeImports` are currently
broken for notebook files, and I suspect it has something to do with
https://github.com/astral-sh/ruff/issues/11248. Once this is fixed, I
will update the test plan accordingly.

---------

Co-authored-by: nolan <nolan.king90@gmail.com>
This commit is contained in:
Jane Lewis 2024-05-21 15:29:30 -07:00 committed by GitHub
parent 84531d1644
commit b0731ef9cb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
39 changed files with 1584 additions and 622 deletions

4
Cargo.lock generated
View file

@ -1300,8 +1300,7 @@ dependencies = [
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.95.1" version = "0.95.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/astral-sh/lsp-types.git?rev=3512a9f#3512a9f33eadc5402cfab1b8f7340824c8ca1439"
checksum = "8e34d33a8e9b006cd3fc4fe69a921affa097bae4bb65f76271f4644f9a334365"
dependencies = [ dependencies = [
"bitflags 1.3.2", "bitflags 1.3.2",
"serde", "serde",
@ -2377,6 +2376,7 @@ dependencies = [
"ruff_diagnostics", "ruff_diagnostics",
"ruff_formatter", "ruff_formatter",
"ruff_linter", "ruff_linter",
"ruff_notebook",
"ruff_python_ast", "ruff_python_ast",
"ruff_python_codegen", "ruff_python_codegen",
"ruff_python_formatter", "ruff_python_formatter",

View file

@ -81,7 +81,7 @@ libc = { version = "0.2.153" }
libcst = { version = "1.1.0", default-features = false } libcst = { version = "1.1.0", default-features = false }
log = { version = "0.4.17" } log = { version = "0.4.17" }
lsp-server = { version = "0.7.6" } lsp-server = { version = "0.7.6" }
lsp-types = { version = "0.95.0", features = ["proposed"] } lsp-types = { git="https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = ["proposed"] }
matchit = { version = "0.8.1" } matchit = { version = "0.8.1" }
memchr = { version = "2.7.1" } memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" } mimalloc = { version = "0.1.39" }

View file

@ -23,7 +23,7 @@ impl fmt::Display for SourceValue {
impl Cell { impl Cell {
/// Return the [`SourceValue`] of the cell. /// Return the [`SourceValue`] of the cell.
pub(crate) fn source(&self) -> &SourceValue { pub fn source(&self) -> &SourceValue {
match self { match self {
Cell::Code(cell) => &cell.source, Cell::Code(cell) => &cell.source,
Cell::Markdown(cell) => &cell.source, Cell::Markdown(cell) => &cell.source,

View file

@ -98,7 +98,7 @@ impl Notebook {
reader.read_exact(&mut buf).is_ok_and(|()| buf[0] == b'\n') reader.read_exact(&mut buf).is_ok_and(|()| buf[0] == b'\n')
}); });
reader.rewind()?; reader.rewind()?;
let mut raw_notebook: RawNotebook = match serde_json::from_reader(reader.by_ref()) { let raw_notebook: RawNotebook = match serde_json::from_reader(reader.by_ref()) {
Ok(notebook) => notebook, Ok(notebook) => notebook,
Err(err) => { Err(err) => {
// Translate the error into a diagnostic // Translate the error into a diagnostic
@ -113,7 +113,13 @@ impl Notebook {
}); });
} }
}; };
Self::from_raw_notebook(raw_notebook, trailing_newline)
}
pub fn from_raw_notebook(
mut raw_notebook: RawNotebook,
trailing_newline: bool,
) -> Result<Self, NotebookError> {
// v4 is what everybody uses // v4 is what everybody uses
if raw_notebook.nbformat != 4 { if raw_notebook.nbformat != 4 {
// bail because we should have already failed at the json schema stage // bail because we should have already failed at the json schema stage

View file

@ -21,6 +21,7 @@ ruff_python_codegen = { workspace = true }
ruff_python_formatter = { workspace = true } ruff_python_formatter = { workspace = true }
ruff_python_index = { workspace = true } ruff_python_index = { workspace = true }
ruff_python_parser = { workspace = true } ruff_python_parser = { workspace = true }
ruff_notebook = { path = "../ruff_notebook" }
ruff_source_file = { workspace = true } ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true } ruff_text_size = { workspace = true }
ruff_workspace = { workspace = true } ruff_workspace = { workspace = true }

View file

@ -1,18 +1,20 @@
//! Types and utilities for working with text, modifying source files, and `Ruff <-> LSP` type conversion. //! Types and utilities for working with text, modifying source files, and `Ruff <-> LSP` type conversion.
mod document; mod document;
mod notebook;
mod range; mod range;
mod replacement; mod replacement;
use std::collections::HashMap; use std::{collections::HashMap, path::PathBuf};
pub use document::Document;
pub(crate) use document::DocumentVersion; pub(crate) use document::DocumentVersion;
pub use document::TextDocument;
use lsp_types::PositionEncodingKind; use lsp_types::PositionEncodingKind;
pub(crate) use range::{RangeExt, ToRangeExt}; pub(crate) use notebook::NotebookDocument;
pub(crate) use range::{NotebookRange, RangeExt, ToRangeExt};
pub(crate) use replacement::Replacement; pub(crate) use replacement::Replacement;
use crate::session::ResolvedClientCapabilities; use crate::{fix::Fixes, session::ResolvedClientCapabilities};
/// A convenient enumeration for supported text encodings. Can be converted to [`lsp_types::PositionEncodingKind`]. /// A convenient enumeration for supported text encodings. Can be converted to [`lsp_types::PositionEncodingKind`].
// Please maintain the order from least to greatest priority for the derived `Ord` impl. // Please maintain the order from least to greatest priority for the derived `Ord` impl.
@ -29,6 +31,37 @@ pub enum PositionEncoding {
UTF8, UTF8,
} }
/// A unique document ID, derived from a URL passed as part of an LSP request.
/// This document ID can point to either be a standalone Python file, a full notebook, or a cell within a notebook.
#[derive(Clone, Debug)]
pub(crate) enum DocumentKey {
Notebook(PathBuf),
NotebookCell(lsp_types::Url),
Text(PathBuf),
}
impl DocumentKey {
/// Converts the key back into its original URL.
pub(crate) fn into_url(self) -> lsp_types::Url {
match self {
DocumentKey::NotebookCell(url) => url,
DocumentKey::Notebook(path) | DocumentKey::Text(path) => {
lsp_types::Url::from_file_path(path)
.expect("file path originally from URL should convert back to URL")
}
}
}
}
impl std::fmt::Display for DocumentKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::NotebookCell(url) => url.fmt(f),
Self::Notebook(path) | Self::Text(path) => path.display().fmt(f),
}
}
}
/// Tracks multi-document edits to eventually merge into a `WorkspaceEdit`. /// Tracks multi-document edits to eventually merge into a `WorkspaceEdit`.
/// Compatible with clients that don't support `workspace.workspaceEdit.documentChanges`. /// Compatible with clients that don't support `workspace.workspaceEdit.documentChanges`.
#[derive(Debug)] #[derive(Debug)]
@ -72,13 +105,25 @@ impl WorkspaceEditTracker {
} }
} }
/// Sets a series of [`Fixes`] for a text or notebook document.
pub(crate) fn set_fixes_for_document(
&mut self,
fixes: Fixes,
version: DocumentVersion,
) -> crate::Result<()> {
for (uri, edits) in fixes {
self.set_edits_for_document(uri, version, edits)?;
}
Ok(())
}
/// Sets the edits made to a specific document. This should only be called /// Sets the edits made to a specific document. This should only be called
/// once for each document `uri`, and will fail if this is called for the same `uri` /// once for each document `uri`, and will fail if this is called for the same `uri`
/// multiple times. /// multiple times.
pub(crate) fn set_edits_for_document( pub(crate) fn set_edits_for_document(
&mut self, &mut self,
uri: lsp_types::Url, uri: lsp_types::Url,
version: DocumentVersion, _version: DocumentVersion,
edits: Vec<lsp_types::TextEdit>, edits: Vec<lsp_types::TextEdit>,
) -> crate::Result<()> { ) -> crate::Result<()> {
match self { match self {
@ -94,7 +139,8 @@ impl WorkspaceEditTracker {
document_edits.push(lsp_types::TextDocumentEdit { document_edits.push(lsp_types::TextDocumentEdit {
text_document: lsp_types::OptionalVersionedTextDocumentIdentifier { text_document: lsp_types::OptionalVersionedTextDocumentIdentifier {
uri, uri,
version: Some(version), // TODO(jane): Re-enable versioned edits after investigating whether it could work with notebook cells
version: None,
}, },
edits: edits.into_iter().map(lsp_types::OneOf::Left).collect(), edits: edits.into_iter().map(lsp_types::OneOf::Left).collect(),
}); });

View file

@ -7,10 +7,10 @@ use super::RangeExt;
pub(crate) type DocumentVersion = i32; pub(crate) type DocumentVersion = i32;
/// The state for an individual document in the server. Stays up-to-date /// The state of an individual document in the server. Stays up-to-date
/// with changes made by the user, including unsaved changes. /// with changes made by the user, including unsaved changes.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Document { pub struct TextDocument {
/// The string contents of the document. /// The string contents of the document.
contents: String, contents: String,
/// A computed line index for the document. This should always reflect /// A computed line index for the document. This should always reflect
@ -22,7 +22,7 @@ pub struct Document {
version: DocumentVersion, version: DocumentVersion,
} }
impl Document { impl TextDocument {
pub fn new(contents: String, version: DocumentVersion) -> Self { pub fn new(contents: String, version: DocumentVersion) -> Self {
let index = LineIndex::from_source_text(&contents); let index = LineIndex::from_source_text(&contents);
Self { Self {

View file

@ -0,0 +1,202 @@
use std::{collections::HashMap, hash::BuildHasherDefault};
use anyhow::Ok;
use lsp_types::{NotebookCellKind, Url};
use rustc_hash::FxHashMap;
use crate::{PositionEncoding, TextDocument};
use super::DocumentVersion;
pub(super) type CellId = usize;
/// The state of a notebook document in the server. Contains an array of cells whose
/// contents are internally represented by [`TextDocument`]s.
#[derive(Clone, Debug)]
pub(crate) struct NotebookDocument {
cells: Vec<NotebookCell>,
metadata: ruff_notebook::RawNotebookMetadata,
version: DocumentVersion,
// Used to quickly find the index of a cell for a given URL.
cell_index: FxHashMap<lsp_types::Url, CellId>,
}
/// A single cell within a notebook, which has text contents represented as a `TextDocument`.
#[derive(Clone, Debug)]
struct NotebookCell {
url: Url,
kind: NotebookCellKind,
document: TextDocument,
}
impl NotebookDocument {
pub(crate) fn new(
version: DocumentVersion,
cells: Vec<lsp_types::NotebookCell>,
metadata: serde_json::Map<String, serde_json::Value>,
cell_documents: Vec<lsp_types::TextDocumentItem>,
) -> crate::Result<Self> {
let mut cell_contents: FxHashMap<_, _> = cell_documents
.into_iter()
.map(|document| (document.uri, document.text))
.collect();
let cells: Vec<_> = cells
.into_iter()
.map(|cell| {
let contents = cell_contents.remove(&cell.document).unwrap_or_default();
NotebookCell::new(cell, contents, version)
})
.collect();
Ok(Self {
version,
cell_index: Self::make_cell_index(cells.as_slice()),
metadata: serde_json::from_value(serde_json::Value::Object(metadata))?,
cells,
})
}
/// Generates a pseudo-representation of a notebook that lacks per-cell metadata and contextual information
/// but should still work with Ruff's linter.
pub(crate) fn make_ruff_notebook(&self) -> ruff_notebook::Notebook {
let cells = self
.cells
.iter()
.map(|cell| match cell.kind {
NotebookCellKind::Code => ruff_notebook::Cell::Code(ruff_notebook::CodeCell {
execution_count: None,
id: None,
metadata: serde_json::Value::Null,
outputs: vec![],
source: ruff_notebook::SourceValue::String(
cell.document.contents().to_string(),
),
}),
NotebookCellKind::Markup => {
ruff_notebook::Cell::Markdown(ruff_notebook::MarkdownCell {
attachments: None,
id: None,
metadata: serde_json::Value::Null,
source: ruff_notebook::SourceValue::String(
cell.document.contents().to_string(),
),
})
}
})
.collect();
let raw_notebook = ruff_notebook::RawNotebook {
cells,
metadata: self.metadata.clone(),
nbformat: 4,
nbformat_minor: 5,
};
ruff_notebook::Notebook::from_raw_notebook(raw_notebook, false)
.unwrap_or_else(|err| panic!("Server notebook document could not be converted to Ruff's notebook document format: {err}"))
}
pub(crate) fn update(
&mut self,
cells: Option<lsp_types::NotebookDocumentCellChange>,
metadata_change: Option<serde_json::Map<String, serde_json::Value>>,
version: DocumentVersion,
encoding: PositionEncoding,
) -> crate::Result<()> {
self.version = version;
if let Some(lsp_types::NotebookDocumentCellChange {
structure,
data,
text_content,
}) = cells
{
if let Some(structure) = structure {
let start = structure.array.start as usize;
let delete = structure.array.delete_count as usize;
if delete > 0 {
for cell in self.cells.drain(start..start + delete) {
self.cell_index.remove(&cell.url);
}
}
for cell in structure.array.cells.into_iter().flatten().rev() {
self.cells
.insert(start, NotebookCell::new(cell, String::new(), version));
}
// register any new cells in the index and update existing ones that came after the insertion
for (i, cell) in self.cells.iter().enumerate().skip(start) {
self.cell_index.insert(cell.url.clone(), i);
}
}
if let Some(cell_data) = data {
for cell in cell_data {
if let Some(existing_cell) = self.cell_by_uri_mut(&cell.document) {
existing_cell.kind = cell.kind;
}
}
}
if let Some(content_changes) = text_content {
for content_change in content_changes {
if let Some(cell) = self.cell_by_uri_mut(&content_change.document.uri) {
cell.document
.apply_changes(content_change.changes, version, encoding);
}
}
}
}
if let Some(metadata_change) = metadata_change {
self.metadata = serde_json::from_value(serde_json::Value::Object(metadata_change))?;
}
Ok(())
}
/// Get the current version of the notebook document.
pub(crate) fn version(&self) -> DocumentVersion {
self.version
}
/// Get the URI for a cell by its index within the cell array.
pub(crate) fn cell_uri_by_index(&self, index: CellId) -> Option<&lsp_types::Url> {
self.cells.get(index).map(|cell| &cell.url)
}
/// Get the text document representing the contents of a cell by the cell URI.
pub(crate) fn cell_document_by_uri(&self, uri: &lsp_types::Url) -> Option<&TextDocument> {
self.cells
.get(*self.cell_index.get(uri)?)
.map(|cell| &cell.document)
}
/// Returns a list of cell URIs in the order they appear in the array.
pub(crate) fn urls(&self) -> impl Iterator<Item = &lsp_types::Url> {
self.cells.iter().map(|cell| &cell.url)
}
fn cell_by_uri_mut(&mut self, uri: &lsp_types::Url) -> Option<&mut NotebookCell> {
self.cells.get_mut(*self.cell_index.get(uri)?)
}
fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap<lsp_types::Url, CellId> {
let mut index =
HashMap::with_capacity_and_hasher(cells.len(), BuildHasherDefault::default());
for (i, cell) in cells.iter().enumerate() {
index.insert(cell.url.clone(), i);
}
index
}
}
impl NotebookCell {
pub(crate) fn new(
cell: lsp_types::NotebookCell,
contents: String,
version: DocumentVersion,
) -> Self {
Self {
url: cell.document,
kind: cell.kind,
document: TextDocument::new(contents, version),
}
}
}

View file

@ -1,9 +1,16 @@
use super::notebook;
use super::PositionEncoding; use super::PositionEncoding;
use lsp_types as types; use lsp_types as types;
use ruff_notebook::NotebookIndex;
use ruff_source_file::OneIndexed; use ruff_source_file::OneIndexed;
use ruff_source_file::{LineIndex, SourceLocation}; use ruff_source_file::{LineIndex, SourceLocation};
use ruff_text_size::{TextRange, TextSize}; use ruff_text_size::{TextRange, TextSize};
pub(crate) struct NotebookRange {
pub(crate) cell: notebook::CellId,
pub(crate) range: types::Range,
}
pub(crate) trait RangeExt { pub(crate) trait RangeExt {
fn to_text_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) fn to_text_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding)
-> TextRange; -> TextRange;
@ -11,6 +18,13 @@ pub(crate) trait RangeExt {
pub(crate) trait ToRangeExt { pub(crate) trait ToRangeExt {
fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range; fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range;
fn to_notebook_range(
&self,
text: &str,
source_index: &LineIndex,
notebook_index: &NotebookIndex,
encoding: PositionEncoding,
) -> NotebookRange;
} }
fn u32_index_to_usize(index: u32) -> usize { fn u32_index_to_usize(index: u32) -> usize {
@ -83,8 +97,54 @@ impl RangeExt for lsp_types::Range {
impl ToRangeExt for TextRange { impl ToRangeExt for TextRange {
fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range { fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range {
types::Range { types::Range {
start: offset_to_position(self.start(), text, index, encoding), start: source_location_to_position(&offset_to_source_location(
end: offset_to_position(self.end(), text, index, encoding), self.start(),
text,
index,
encoding,
)),
end: source_location_to_position(&offset_to_source_location(
self.end(),
text,
index,
encoding,
)),
}
}
fn to_notebook_range(
&self,
text: &str,
source_index: &LineIndex,
notebook_index: &NotebookIndex,
encoding: PositionEncoding,
) -> NotebookRange {
let start = offset_to_source_location(self.start(), text, source_index, encoding);
let mut end = offset_to_source_location(self.end(), text, source_index, encoding);
let starting_cell = notebook_index.cell(start.row);
// weird edge case here - if the end of the range is where the newline after the cell got added (making it 'out of bounds')
// we need to move it one character back (which should place it at the end of the last line).
// we test this by checking if the ending offset is in a different (or nonexistent) cell compared to the cell of the starting offset.
if notebook_index.cell(end.row) != starting_cell {
end.row = end.row.saturating_sub(1);
end.column = offset_to_source_location(
self.end().checked_sub(1.into()).unwrap_or_default(),
text,
source_index,
encoding,
)
.column;
}
let start = source_location_to_position(&notebook_index.translate_location(&start));
let end = source_location_to_position(&notebook_index.translate_location(&end));
NotebookRange {
cell: starting_cell
.map(OneIndexed::to_zero_indexed)
.unwrap_or_default(),
range: types::Range { start, end },
} }
} }
} }
@ -111,13 +171,13 @@ fn utf8_column_offset(utf16_code_unit_offset: u32, line: &str) -> TextSize {
utf8_code_unit_offset utf8_code_unit_offset
} }
fn offset_to_position( fn offset_to_source_location(
offset: TextSize, offset: TextSize,
text: &str, text: &str,
index: &LineIndex, index: &LineIndex,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> types::Position { ) -> SourceLocation {
let location = match encoding { match encoding {
PositionEncoding::UTF8 => { PositionEncoding::UTF8 => {
let row = index.line_index(offset); let row = index.line_index(offset);
let column = offset - index.line_start(row, text); let column = offset - index.line_start(row, text);
@ -143,8 +203,10 @@ fn offset_to_position(
} }
} }
PositionEncoding::UTF32 => index.source_location(offset, text), PositionEncoding::UTF32 => index.source_location(offset, text),
}; }
}
fn source_location_to_position(location: &SourceLocation) -> types::Position {
types::Position { types::Position {
line: u32::try_from(location.row.to_zero_indexed()).expect("row usize fits in u32"), line: u32::try_from(location.row.to_zero_indexed()).expect("row usize fits in u32"),
character: u32::try_from(location.column.to_zero_indexed()) character: u32::try_from(location.column.to_zero_indexed())

View file

@ -2,28 +2,29 @@ use ruff_linter::{
linter::{FixerResult, LinterResult}, linter::{FixerResult, LinterResult},
packaging::detect_package_root, packaging::detect_package_root,
settings::{flags, types::UnsafeFixes, LinterSettings}, settings::{flags, types::UnsafeFixes, LinterSettings},
source_kind::SourceKind,
}; };
use ruff_python_ast::PySourceType; use ruff_notebook::SourceValue;
use ruff_source_file::LineIndex; use ruff_source_file::LineIndex;
use rustc_hash::FxHashMap;
use std::borrow::Cow; use std::borrow::Cow;
use crate::{ use crate::{
edit::{Replacement, ToRangeExt}, edit::{Replacement, ToRangeExt},
session::DocumentQuery,
PositionEncoding, PositionEncoding,
}; };
/// A simultaneous fix made across a single text document or among an arbitrary
/// number of notebook cells.
pub(crate) type Fixes = FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>;
pub(crate) fn fix_all( pub(crate) fn fix_all(
document: &crate::edit::Document, query: &DocumentQuery,
document_url: &lsp_types::Url,
linter_settings: &LinterSettings, linter_settings: &LinterSettings,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> crate::Result<Vec<lsp_types::TextEdit>> { ) -> crate::Result<Fixes> {
let source = document.contents(); let document_path = query.file_path();
let source_kind = query.make_source_kind();
let document_path = document_url
.to_file_path()
.expect("document URL should be a valid file path");
let package = detect_package_root( let package = detect_package_root(
document_path document_path
@ -32,10 +33,7 @@ pub(crate) fn fix_all(
&linter_settings.namespace_packages, &linter_settings.namespace_packages,
); );
let source_type = PySourceType::default(); let source_type = query.source_type();
// TODO(jane): Support Jupyter Notebooks
let source_kind = SourceKind::Python(source.to_string());
// We need to iteratively apply all safe fixes onto a single file and then // We need to iteratively apply all safe fixes onto a single file and then
// create a diff between the modified file and the original source to use as a single workspace // create a diff between the modified file and the original source to use as a single workspace
@ -48,7 +46,7 @@ pub(crate) fn fix_all(
result: LinterResult { error, .. }, result: LinterResult { error, .. },
.. ..
} = ruff_linter::linter::lint_fix( } = ruff_linter::linter::lint_fix(
&document_path, document_path,
package, package,
flags::Noqa::Enabled, flags::Noqa::Enabled,
UnsafeFixes::Disabled, UnsafeFixes::Disabled,
@ -66,27 +64,79 @@ pub(crate) fn fix_all(
// fast path: if `transformed` is still borrowed, no changes were made and we can return early // fast path: if `transformed` is still borrowed, no changes were made and we can return early
if let Cow::Borrowed(_) = transformed { if let Cow::Borrowed(_) = transformed {
return Ok(vec![]); return Ok(Fixes::default());
} }
let modified = transformed.source_code(); if let (Some(source_notebook), Some(modified_notebook)) =
(source_kind.as_ipy_notebook(), transformed.as_ipy_notebook())
{
fn cell_source(cell: &ruff_notebook::Cell) -> String {
match cell.source() {
SourceValue::String(string) => string.clone(),
SourceValue::StringArray(array) => array.join(""),
}
}
let modified_index = LineIndex::from_source_text(modified); let Some(notebook) = query.as_notebook() else {
anyhow::bail!("Notebook document expected from notebook source kind");
let source_index = document.index(); };
let mut fixes = Fixes::default();
for ((source, modified), url) in source_notebook
.cells()
.iter()
.map(cell_source)
.zip(modified_notebook.cells().iter().map(cell_source))
.zip(notebook.urls())
{
let source_index = LineIndex::from_source_text(&source);
let modified_index = LineIndex::from_source_text(&modified);
let Replacement { let Replacement {
source_range, source_range,
modified_range, modified_range,
} = Replacement::between( } = Replacement::between(
source, &source,
source_index.line_starts(),
&modified,
modified_index.line_starts(),
);
fixes.insert(
url.clone(),
vec![lsp_types::TextEdit {
range: source_range.to_range(
source_kind.source_code(),
&source_index,
encoding,
),
new_text: modified[modified_range].to_owned(),
}],
);
}
Ok(fixes)
} else {
let source_index = LineIndex::from_source_text(source_kind.source_code());
let modified = transformed.source_code();
let modified_index = LineIndex::from_source_text(modified);
let Replacement {
source_range,
modified_range,
} = Replacement::between(
source_kind.source_code(),
source_index.line_starts(), source_index.line_starts(),
modified, modified,
modified_index.line_starts(), modified_index.line_starts(),
); );
Ok([(
Ok(vec![lsp_types::TextEdit { query.make_key().into_url(),
range: source_range.to_range(source, source_index, encoding), vec![lsp_types::TextEdit {
range: source_range.to_range(source_kind.source_code(), &source_index, encoding),
new_text: modified[modified_range].to_owned(), new_text: modified[modified_range].to_owned(),
}]) }],
)]
.into_iter()
.collect())
}
} }

View file

@ -1,29 +1,28 @@
use ruff_formatter::PrintedRange; use ruff_formatter::PrintedRange;
use ruff_python_ast::PySourceType;
use ruff_python_formatter::format_module_source; use ruff_python_formatter::format_module_source;
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
use ruff_workspace::FormatterSettings; use ruff_workspace::FormatterSettings;
use crate::edit::Document; use crate::edit::TextDocument;
pub(crate) fn format( pub(crate) fn format(
document: &Document, document: &TextDocument,
source_type: PySourceType,
formatter_settings: &FormatterSettings, formatter_settings: &FormatterSettings,
) -> crate::Result<String> { ) -> crate::Result<String> {
// TODO(jane): support Jupyter Notebook let format_options = formatter_settings.to_format_options(source_type, document.contents());
let format_options = formatter_settings
.to_format_options(ruff_python_ast::PySourceType::Python, document.contents());
let formatted = format_module_source(document.contents(), format_options)?; let formatted = format_module_source(document.contents(), format_options)?;
Ok(formatted.into_code()) Ok(formatted.into_code())
} }
pub(crate) fn format_range( pub(crate) fn format_range(
document: &Document, document: &TextDocument,
source_type: PySourceType,
formatter_settings: &FormatterSettings, formatter_settings: &FormatterSettings,
range: TextRange, range: TextRange,
) -> crate::Result<PrintedRange> { ) -> crate::Result<PrintedRange> {
// TODO(jane): support Jupyter Notebook let format_options = formatter_settings.to_format_options(source_type, document.contents());
let format_options = formatter_settings
.to_format_options(ruff_python_ast::PySourceType::Python, document.contents());
Ok(ruff_python_formatter::format_range( Ok(ruff_python_formatter::format_range(
document.contents(), document.contents(),

View file

@ -1,6 +1,6 @@
//! ## The Ruff Language Server //! ## The Ruff Language Server
pub use edit::{Document, PositionEncoding}; pub use edit::{PositionEncoding, TextDocument};
use lsp_types::CodeActionKind; use lsp_types::CodeActionKind;
pub use server::Server; pub use server::Server;
@ -19,6 +19,10 @@ pub(crate) const DIAGNOSTIC_NAME: &str = "Ruff";
pub(crate) const SOURCE_FIX_ALL_RUFF: CodeActionKind = CodeActionKind::new("source.fixAll.ruff"); pub(crate) const SOURCE_FIX_ALL_RUFF: CodeActionKind = CodeActionKind::new("source.fixAll.ruff");
pub(crate) const SOURCE_ORGANIZE_IMPORTS_RUFF: CodeActionKind = pub(crate) const SOURCE_ORGANIZE_IMPORTS_RUFF: CodeActionKind =
CodeActionKind::new("source.organizeImports.ruff"); CodeActionKind::new("source.organizeImports.ruff");
pub(crate) const NOTEBOOK_SOURCE_FIX_ALL_RUFF: CodeActionKind =
CodeActionKind::new("notebook.source.fixAll.ruff");
pub(crate) const NOTEBOOK_SOURCE_ORGANIZE_IMPORTS_RUFF: CodeActionKind =
CodeActionKind::new("notebook.source.organizeImports.ruff");
/// A common result type used in most cases where a /// A common result type used in most cases where a
/// result type is needed. /// result type is needed.

View file

@ -10,26 +10,32 @@ use ruff_linter::{
settings::{flags, LinterSettings}, settings::{flags, LinterSettings},
source_kind::SourceKind, source_kind::SourceKind,
}; };
use ruff_python_ast::PySourceType; use ruff_notebook::Notebook;
use ruff_python_codegen::Stylist; use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer; use ruff_python_index::Indexer;
use ruff_python_parser::AsMode; use ruff_python_parser::AsMode;
use ruff_source_file::Locator; use ruff_source_file::{LineIndex, Locator};
use ruff_text_size::Ranged; use ruff_text_size::{Ranged, TextRange};
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{edit::ToRangeExt, PositionEncoding, DIAGNOSTIC_NAME}; use crate::{
edit::{NotebookRange, ToRangeExt},
session::DocumentQuery,
PositionEncoding, DIAGNOSTIC_NAME,
};
/// This is serialized on the diagnostic `data` field. /// This is serialized on the diagnostic `data` field.
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub(crate) struct AssociatedDiagnosticData { pub(crate) struct AssociatedDiagnosticData {
pub(crate) kind: DiagnosticKind, pub(crate) kind: DiagnosticKind,
/// A possible fix for the associated diagnostic. /// Edits to fix the diagnostic. If this is empty, a fix
pub(crate) fix: Option<Fix>, /// does not exist.
pub(crate) edits: Vec<lsp_types::TextEdit>,
/// The NOQA code for the diagnostic. /// The NOQA code for the diagnostic.
pub(crate) code: String, pub(crate) code: String,
/// Possible edit to add a `noqa` comment which will disable this diagnostic. /// Possible edit to add a `noqa` comment which will disable this diagnostic.
pub(crate) noqa_edit: Option<ruff_diagnostics::Edit>, pub(crate) noqa_edit: Option<lsp_types::TextEdit>,
} }
/// Describes a fix for `fixed_diagnostic` that may have quick fix /// Describes a fix for `fixed_diagnostic` that may have quick fix
@ -49,18 +55,16 @@ pub(crate) struct DiagnosticFix {
pub(crate) noqa_edit: Option<lsp_types::TextEdit>, pub(crate) noqa_edit: Option<lsp_types::TextEdit>,
} }
/// A series of diagnostics across a single text document or an arbitrary number of notebook cells.
pub(crate) type Diagnostics = FxHashMap<lsp_types::Url, Vec<lsp_types::Diagnostic>>;
pub(crate) fn check( pub(crate) fn check(
document: &crate::edit::Document, query: &DocumentQuery,
document_url: &lsp_types::Url,
linter_settings: &LinterSettings, linter_settings: &LinterSettings,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> Vec<lsp_types::Diagnostic> { ) -> Diagnostics {
let contents = document.contents(); let document_path = query.file_path();
let index = document.index().clone(); let source_kind = query.make_source_kind();
let document_path = document_url
.to_file_path()
.expect("document URL should be a valid file path");
let package = detect_package_root( let package = detect_package_root(
document_path document_path
@ -69,16 +73,15 @@ pub(crate) fn check(
&linter_settings.namespace_packages, &linter_settings.namespace_packages,
); );
let source_type = PySourceType::default(); let source_type = query.source_type();
// TODO(jane): Support Jupyter Notebooks
let source_kind = SourceKind::Python(contents.to_string());
// Tokenize once. // Tokenize once.
let tokens = ruff_python_parser::tokenize(contents, source_type.as_mode()); let tokens = ruff_python_parser::tokenize(source_kind.source_code(), source_type.as_mode());
let index = LineIndex::from_source_text(source_kind.source_code());
// Map row and column locations to byte slices (lazily). // Map row and column locations to byte slices (lazily).
let locator = Locator::with_index(contents, index); let locator = Locator::with_index(source_kind.source_code(), index.clone());
// Detect the current code style (lazily). // Detect the current code style (lazily).
let stylist = Stylist::from_tokens(&tokens, &locator); let stylist = Stylist::from_tokens(&tokens, &locator);
@ -90,10 +93,8 @@ pub(crate) fn check(
let directives = extract_directives(&tokens, Flags::all(), &locator, &indexer); let directives = extract_directives(&tokens, Flags::all(), &locator, &indexer);
// Generate checks. // Generate checks.
let LinterResult { let LinterResult { data, .. } = check_path(
data: diagnostics, .. document_path,
} = check_path(
&document_path,
package, package,
&locator, &locator,
&stylist, &stylist,
@ -107,8 +108,8 @@ pub(crate) fn check(
); );
let noqa_edits = generate_noqa_edits( let noqa_edits = generate_noqa_edits(
&document_path, document_path,
diagnostics.as_slice(), data.as_slice(),
&locator, &locator,
indexer.comment_ranges(), indexer.comment_ranges(),
&linter_settings.external, &linter_settings.external,
@ -116,16 +117,45 @@ pub(crate) fn check(
stylist.line_ending(), stylist.line_ending(),
); );
diagnostics let mut diagnostics = Diagnostics::default();
.into_iter()
.zip(noqa_edits) // Populate all cell URLs with an empty diagnostic list.
.map(|(diagnostic, noqa_edit)| to_lsp_diagnostic(diagnostic, noqa_edit, document, encoding)) // This ensures that cells without diagnostics still get updated.
.collect() if let Some(notebook) = query.as_notebook() {
for url in notebook.urls() {
diagnostics.entry(url.clone()).or_default();
}
} }
let lsp_diagnostics = data
.into_iter()
.zip(noqa_edits)
.map(|(diagnostic, noqa_edit)| {
to_lsp_diagnostic(diagnostic, &noqa_edit, &source_kind, &index, encoding)
});
if let Some(notebook) = query.as_notebook() {
for (index, diagnostic) in lsp_diagnostics {
let Some(uri) = notebook.cell_uri_by_index(index) else {
tracing::warn!("Unable to find notebook cell at index {index}.");
continue;
};
diagnostics.entry(uri.clone()).or_default().push(diagnostic);
}
} else {
for (_, diagnostic) in lsp_diagnostics {
diagnostics
.entry(query.make_key().into_url())
.or_default()
.push(diagnostic);
}
}
diagnostics
}
/// Converts LSP diagnostics to a list of `DiagnosticFix`es by deserializing associated data on each diagnostic.
pub(crate) fn fixes_for_diagnostics( pub(crate) fn fixes_for_diagnostics(
document: &crate::edit::Document,
encoding: PositionEncoding,
diagnostics: Vec<lsp_types::Diagnostic>, diagnostics: Vec<lsp_types::Diagnostic>,
) -> crate::Result<Vec<DiagnosticFix>> { ) -> crate::Result<Vec<DiagnosticFix>> {
diagnostics diagnostics
@ -139,36 +169,6 @@ pub(crate) fn fixes_for_diagnostics(
serde_json::from_value(data).map_err(|err| { serde_json::from_value(data).map_err(|err| {
anyhow::anyhow!("failed to deserialize diagnostic data: {err}") anyhow::anyhow!("failed to deserialize diagnostic data: {err}")
})?; })?;
let edits = associated_data
.fix
.map(|fix| {
fix.edits()
.iter()
.map(|edit| lsp_types::TextEdit {
range: edit.range().to_range(
document.contents(),
document.index(),
encoding,
),
new_text: edit.content().unwrap_or_default().to_string(),
})
.collect()
})
.unwrap_or_default();
let noqa_edit =
associated_data
.noqa_edit
.as_ref()
.map(|noqa_edit| lsp_types::TextEdit {
range: noqa_edit.range().to_range(
document.contents(),
document.index(),
encoding,
),
new_text: noqa_edit.content().unwrap_or_default().to_string(),
});
Ok(Some(DiagnosticFix { Ok(Some(DiagnosticFix {
fixed_diagnostic, fixed_diagnostic,
code: associated_data.code, code: associated_data.code,
@ -176,22 +176,28 @@ pub(crate) fn fixes_for_diagnostics(
.kind .kind
.suggestion .suggestion
.unwrap_or(associated_data.kind.name), .unwrap_or(associated_data.kind.name),
edits, noqa_edit: associated_data.noqa_edit,
noqa_edit, edits: associated_data.edits,
})) }))
}) })
.filter_map(crate::Result::transpose) .filter_map(crate::Result::transpose)
.collect() .collect()
} }
/// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in.
/// If the source kind is a text document, the cell index will always be `0`.
fn to_lsp_diagnostic( fn to_lsp_diagnostic(
diagnostic: Diagnostic, diagnostic: Diagnostic,
noqa_edit: Option<Edit>, noqa_edit: &Option<Edit>,
document: &crate::edit::Document, source_kind: &SourceKind,
index: &LineIndex,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> lsp_types::Diagnostic { ) -> (usize, lsp_types::Diagnostic) {
let Diagnostic { let Diagnostic {
kind, range, fix, .. kind,
range: diagnostic_range,
fix,
..
} = diagnostic; } = diagnostic;
let rule = kind.rule(); let rule = kind.rule();
@ -200,11 +206,24 @@ fn to_lsp_diagnostic(
let data = (fix.is_some() || noqa_edit.is_some()) let data = (fix.is_some() || noqa_edit.is_some())
.then(|| { .then(|| {
serde_json::to_value(&AssociatedDiagnosticData { let edits = fix
.as_ref()
.into_iter()
.flat_map(Fix::edits)
.map(|edit| lsp_types::TextEdit {
range: diagnostic_edit_range(edit.range(), source_kind, index, encoding),
new_text: edit.content().unwrap_or_default().to_string(),
})
.collect();
let noqa_edit = noqa_edit.as_ref().map(|noqa_edit| lsp_types::TextEdit {
range: diagnostic_edit_range(noqa_edit.range(), source_kind, index, encoding),
new_text: noqa_edit.content().unwrap_or_default().to_string(),
});
serde_json::to_value(AssociatedDiagnosticData {
kind: kind.clone(), kind: kind.clone(),
fix,
code: rule.noqa_code().to_string(),
noqa_edit, noqa_edit,
edits,
code: rule.noqa_code().to_string(),
}) })
.ok() .ok()
}) })
@ -212,8 +231,25 @@ fn to_lsp_diagnostic(
let code = rule.noqa_code().to_string(); let code = rule.noqa_code().to_string();
let range: lsp_types::Range;
let cell: usize;
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
NotebookRange { cell, range } = diagnostic_range.to_notebook_range(
source_kind.source_code(),
index,
notebook_index,
encoding,
);
} else {
cell = usize::default();
range = diagnostic_range.to_range(source_kind.source_code(), index, encoding);
}
(
cell,
lsp_types::Diagnostic { lsp_types::Diagnostic {
range: range.to_range(document.contents(), document.index(), encoding), range,
severity: Some(severity(&code)), severity: Some(severity(&code)),
tags: tags(&code), tags: tags(&code),
code: Some(lsp_types::NumberOrString::String(code)), code: Some(lsp_types::NumberOrString::String(code)),
@ -226,6 +262,22 @@ fn to_lsp_diagnostic(
message: kind.body, message: kind.body,
related_information: None, related_information: None,
data, data,
},
)
}
fn diagnostic_edit_range(
range: TextRange,
source_kind: &SourceKind,
index: &LineIndex,
encoding: PositionEncoding,
) -> lsp_types::Range {
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
range
.to_notebook_range(source_kind.source_code(), index, notebook_index, encoding)
.range
} else {
range.to_range(source_kind.source_code(), index, encoding)
} }
} }

View file

@ -1,6 +1,7 @@
//! Scheduling, I/O, and API endpoints. //! Scheduling, I/O, and API endpoints.
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::path::PathBuf;
use lsp_server as lsp; use lsp_server as lsp;
use lsp_types as types; use lsp_types as types;
@ -10,6 +11,9 @@ use types::CodeActionOptions;
use types::DiagnosticOptions; use types::DiagnosticOptions;
use types::DidChangeWatchedFilesRegistrationOptions; use types::DidChangeWatchedFilesRegistrationOptions;
use types::FileSystemWatcher; use types::FileSystemWatcher;
use types::NotebookCellSelector;
use types::NotebookDocumentSyncOptions;
use types::NotebookSelector;
use types::OneOf; use types::OneOf;
use types::TextDocumentSyncCapability; use types::TextDocumentSyncCapability;
use types::TextDocumentSyncKind; use types::TextDocumentSyncKind;
@ -67,26 +71,26 @@ impl Server {
mut workspace_settings, mut workspace_settings,
} = AllSettings::from_value(init_params.initialization_options.unwrap_or_default()); } = AllSettings::from_value(init_params.initialization_options.unwrap_or_default());
let mut workspace_for_uri = |uri| { let mut workspace_for_path = |path: PathBuf| {
let Some(workspace_settings) = workspace_settings.as_mut() else { let Some(workspace_settings) = workspace_settings.as_mut() else {
return (uri, ClientSettings::default()); return (path, ClientSettings::default());
}; };
let settings = workspace_settings.remove(&uri).unwrap_or_else(|| { let settings = workspace_settings.remove(&path).unwrap_or_else(|| {
tracing::warn!("No workspace settings found for {uri}"); tracing::warn!("No workspace settings found for {}", path.display());
ClientSettings::default() ClientSettings::default()
}); });
(uri, settings) (path, settings)
}; };
let workspaces = init_params let workspaces = init_params
.workspace_folders .workspace_folders
.map(|folders| folders.into_iter().map(|folder| { .map(|folders| folders.into_iter().map(|folder| {
workspace_for_uri(folder.uri) workspace_for_path(folder.uri.to_file_path().unwrap())
}).collect()) }).collect())
.or_else(|| { .or_else(|| {
tracing::debug!("No workspace(s) were provided during initialization. Using the current working directory as a default workspace..."); tracing::debug!("No workspace(s) were provided during initialization. Using the current working directory as a default workspace...");
let uri = types::Url::from_file_path(std::env::current_dir().ok()?).ok()?; let uri = types::Url::from_file_path(std::env::current_dir().ok()?).ok()?;
Some(vec![workspace_for_uri(uri)]) Some(vec![workspace_for_path(uri.to_file_path().unwrap())])
}) })
.ok_or_else(|| { .ok_or_else(|| {
anyhow::anyhow!("Failed to get the current working directory while creating a default workspace.") anyhow::anyhow!("Failed to get the current working directory while creating a default workspace.")
@ -100,7 +104,7 @@ impl Server {
position_encoding, position_encoding,
global_settings, global_settings,
workspaces, workspaces,
)?, ),
client_capabilities, client_capabilities,
}) })
} }
@ -252,6 +256,16 @@ impl Server {
}, },
)), )),
hover_provider: Some(types::HoverProviderCapability::Simple(true)), hover_provider: Some(types::HoverProviderCapability::Simple(true)),
notebook_document_sync: Some(types::OneOf::Left(NotebookDocumentSyncOptions {
save: Some(false),
notebook_selector: [NotebookSelector::ByCells {
notebook: None,
cells: vec![NotebookCellSelector {
language: "python".to_string(),
}],
}]
.to_vec(),
})),
text_document_sync: Some(TextDocumentSyncCapability::Options( text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions { TextDocumentSyncOptions {
open_close: Some(true), open_close: Some(true),
@ -278,8 +292,15 @@ pub(crate) enum SupportedCodeAction {
SourceFixAll, SourceFixAll,
/// Maps to `source.organizeImports` and `source.organizeImports.ruff` code action kinds. /// Maps to `source.organizeImports` and `source.organizeImports.ruff` code action kinds.
/// This is a source action that applies import sorting fixes to the currently open document. /// This is a source action that applies import sorting fixes to the currently open document.
#[allow(dead_code)] // TODO: remove
SourceOrganizeImports, SourceOrganizeImports,
/// Maps to the `notebook.source.fixAll` and `notebook.source.fixAll.ruff` code action kinds.
/// This is a source action, specifically for notebooks, that applies all safe fixes
/// to the currently open document.
NotebookSourceFixAll,
/// Maps to `source.organizeImports` and `source.organizeImports.ruff` code action kinds.
/// This is a source action, specifically for notebooks, that applies import sorting fixes
/// to the currently open document.
NotebookSourceOrganizeImports,
} }
impl SupportedCodeAction { impl SupportedCodeAction {
@ -289,6 +310,8 @@ impl SupportedCodeAction {
Self::QuickFix => CodeActionKind::QUICKFIX, Self::QuickFix => CodeActionKind::QUICKFIX,
Self::SourceFixAll => crate::SOURCE_FIX_ALL_RUFF, Self::SourceFixAll => crate::SOURCE_FIX_ALL_RUFF,
Self::SourceOrganizeImports => crate::SOURCE_ORGANIZE_IMPORTS_RUFF, Self::SourceOrganizeImports => crate::SOURCE_ORGANIZE_IMPORTS_RUFF,
Self::NotebookSourceFixAll => crate::NOTEBOOK_SOURCE_FIX_ALL_RUFF,
Self::NotebookSourceOrganizeImports => crate::NOTEBOOK_SOURCE_ORGANIZE_IMPORTS_RUFF,
} }
} }
@ -304,6 +327,8 @@ impl SupportedCodeAction {
Self::QuickFix, Self::QuickFix,
Self::SourceFixAll, Self::SourceFixAll,
Self::SourceOrganizeImports, Self::SourceOrganizeImports,
Self::NotebookSourceFixAll,
Self::NotebookSourceOrganizeImports,
] ]
.into_iter() .into_iter()
} }

View file

@ -84,6 +84,15 @@ pub(super) fn notification<'a>(notif: server::Notification) -> Task<'a> {
} }
notification::DidClose::METHOD => local_notification_task::<notification::DidClose>(notif), notification::DidClose::METHOD => local_notification_task::<notification::DidClose>(notif),
notification::DidOpen::METHOD => local_notification_task::<notification::DidOpen>(notif), notification::DidOpen::METHOD => local_notification_task::<notification::DidOpen>(notif),
notification::DidOpenNotebook::METHOD => {
local_notification_task::<notification::DidOpenNotebook>(notif)
}
notification::DidChangeNotebook::METHOD => {
local_notification_task::<notification::DidChangeNotebook>(notif)
}
notification::DidCloseNotebook::METHOD => {
local_notification_task::<notification::DidCloseNotebook>(notif)
}
method => { method => {
tracing::warn!("Received notification {method} which does not have a handler."); tracing::warn!("Received notification {method} which does not have a handler.");
return Task::nothing(); return Task::nothing();

View file

@ -1,17 +1,20 @@
use crate::{server::client::Notifier, session::DocumentSnapshot}; use crate::{
lint::Diagnostics,
server::client::Notifier,
session::{DocumentQuery, DocumentSnapshot},
};
use super::LSPResult; use super::LSPResult;
pub(super) fn generate_diagnostics(snapshot: &DocumentSnapshot) -> Vec<lsp_types::Diagnostic> { pub(super) fn generate_diagnostics(snapshot: &DocumentSnapshot) -> Diagnostics {
if snapshot.client_settings().lint() { if snapshot.client_settings().lint() {
crate::lint::check( crate::lint::check(
snapshot.document(), snapshot.query(),
snapshot.url(), snapshot.query().settings().linter(),
snapshot.settings().linter(),
snapshot.encoding(), snapshot.encoding(),
) )
} else { } else {
vec![] Diagnostics::default()
} }
} }
@ -19,31 +22,31 @@ pub(super) fn publish_diagnostics_for_document(
snapshot: &DocumentSnapshot, snapshot: &DocumentSnapshot,
notifier: &Notifier, notifier: &Notifier,
) -> crate::server::Result<()> { ) -> crate::server::Result<()> {
let diagnostics = generate_diagnostics(snapshot); for (uri, diagnostics) in generate_diagnostics(snapshot) {
notifier notifier
.notify::<lsp_types::notification::PublishDiagnostics>( .notify::<lsp_types::notification::PublishDiagnostics>(
lsp_types::PublishDiagnosticsParams { lsp_types::PublishDiagnosticsParams {
uri: snapshot.url().clone(), uri,
diagnostics, diagnostics,
version: Some(snapshot.document().version()), version: Some(snapshot.query().version()),
}, },
) )
.with_failure_code(lsp_server::ErrorCode::InternalError)?; .with_failure_code(lsp_server::ErrorCode::InternalError)?;
}
Ok(()) Ok(())
} }
pub(super) fn clear_diagnostics_for_document( pub(super) fn clear_diagnostics_for_document(
snapshot: &DocumentSnapshot, query: &DocumentQuery,
notifier: &Notifier, notifier: &Notifier,
) -> crate::server::Result<()> { ) -> crate::server::Result<()> {
notifier notifier
.notify::<lsp_types::notification::PublishDiagnostics>( .notify::<lsp_types::notification::PublishDiagnostics>(
lsp_types::PublishDiagnosticsParams { lsp_types::PublishDiagnosticsParams {
uri: snapshot.url().clone(), uri: query.make_key().into_url(),
diagnostics: vec![], diagnostics: vec![],
version: Some(snapshot.document().version()), version: Some(query.version()),
}, },
) )
.with_failure_code(lsp_server::ErrorCode::InternalError)?; .with_failure_code(lsp_server::ErrorCode::InternalError)?;

View file

@ -1,16 +1,22 @@
mod cancel; mod cancel;
mod did_change; mod did_change;
mod did_change_configuration; mod did_change_configuration;
mod did_change_notebook;
mod did_change_watched_files; mod did_change_watched_files;
mod did_change_workspace; mod did_change_workspace;
mod did_close; mod did_close;
mod did_close_notebook;
mod did_open; mod did_open;
mod did_open_notebook;
use super::traits::{NotificationHandler, SyncNotificationHandler}; use super::traits::{NotificationHandler, SyncNotificationHandler};
pub(super) use cancel::Cancel; pub(super) use cancel::Cancel;
pub(super) use did_change::DidChange; pub(super) use did_change::DidChange;
pub(super) use did_change_configuration::DidChangeConfiguration; pub(super) use did_change_configuration::DidChangeConfiguration;
pub(super) use did_change_notebook::DidChangeNotebook;
pub(super) use did_change_watched_files::DidChangeWatchedFiles; pub(super) use did_change_watched_files::DidChangeWatchedFiles;
pub(super) use did_change_workspace::DidChangeWorkspace; pub(super) use did_change_workspace::DidChangeWorkspace;
pub(super) use did_close::DidClose; pub(super) use did_close::DidClose;
pub(super) use did_close_notebook::DidCloseNotebook;
pub(super) use did_open::DidOpen; pub(super) use did_open::DidOpen;
pub(super) use did_open_notebook::DidOpenNotebook;

View file

@ -11,7 +11,6 @@ impl super::NotificationHandler for Cancel {
} }
impl super::SyncNotificationHandler for Cancel { impl super::SyncNotificationHandler for Cancel {
#[tracing::instrument(skip_all)]
fn run( fn run(
_session: &mut Session, _session: &mut Session,
_notifier: Notifier, _notifier: Notifier,

View file

@ -3,6 +3,7 @@ use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester}; use crate::server::client::{Notifier, Requester};
use crate::server::Result; use crate::server::Result;
use crate::session::Session; use crate::session::Session;
use lsp_server::ErrorCode;
use lsp_types as types; use lsp_types as types;
use lsp_types::notification as notif; use lsp_types::notification as notif;
@ -13,7 +14,6 @@ impl super::NotificationHandler for DidChange {
} }
impl super::SyncNotificationHandler for DidChange { impl super::SyncNotificationHandler for DidChange {
#[tracing::instrument(skip_all, fields(file=%uri))]
fn run( fn run(
session: &mut Session, session: &mut Session,
notifier: Notifier, notifier: Notifier,
@ -27,19 +27,13 @@ impl super::SyncNotificationHandler for DidChange {
content_changes, content_changes,
}: types::DidChangeTextDocumentParams, }: types::DidChangeTextDocumentParams,
) -> Result<()> { ) -> Result<()> {
let encoding = session.encoding(); let key = session
let document = session .key_from_url(&uri)
.document_controller(&uri) .with_failure_code(ErrorCode::InternalError)?;
.with_failure_code(lsp_server::ErrorCode::InvalidParams)?;
if content_changes.is_empty() { session
document.make_mut().update_version(new_version); .update_text_document(&key, content_changes, new_version)
return Ok(()); .with_failure_code(ErrorCode::InternalError)?;
}
document
.make_mut()
.apply_changes(content_changes, new_version, encoding);
// Publish diagnostics if the client doesnt support pull diagnostics // Publish diagnostics if the client doesnt support pull diagnostics
if !session.resolved_client_capabilities().pull_diagnostics { if !session.resolved_client_capabilities().pull_diagnostics {

View file

@ -0,0 +1,41 @@
use crate::server::api::diagnostics::publish_diagnostics_for_document;
use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use lsp_server::ErrorCode;
use lsp_types as types;
use lsp_types::notification as notif;
pub(crate) struct DidChangeNotebook;
impl super::NotificationHandler for DidChangeNotebook {
type NotificationType = notif::DidChangeNotebookDocument;
}
impl super::SyncNotificationHandler for DidChangeNotebook {
fn run(
session: &mut Session,
notifier: Notifier,
_requester: &mut Requester,
types::DidChangeNotebookDocumentParams {
notebook_document: types::VersionedNotebookDocumentIdentifier { uri, version },
change: types::NotebookDocumentChangeEvent { cells, metadata },
}: types::DidChangeNotebookDocumentParams,
) -> Result<()> {
let key = session
.key_from_url(&uri)
.with_failure_code(ErrorCode::InternalError)?;
session
.update_notebook_document(&key, cells, metadata, version)
.with_failure_code(ErrorCode::InternalError)?;
// publish new diagnostics
let snapshot = session
.take_snapshot(&uri)
.expect("snapshot should be available");
publish_diagnostics_for_document(&snapshot, &notifier)?;
Ok(())
}
}

View file

@ -20,9 +20,7 @@ impl super::SyncNotificationHandler for DidChangeWatchedFiles {
params: types::DidChangeWatchedFilesParams, params: types::DidChangeWatchedFilesParams,
) -> Result<()> { ) -> Result<()> {
for change in &params.changes { for change in &params.changes {
session session.reload_settings(&change.uri.to_file_path().unwrap());
.reload_settings(&change.uri)
.with_failure_code(lsp_server::ErrorCode::InternalError)?;
} }
if session.resolved_client_capabilities().workspace_refresh && !params.changes.is_empty() { if session.resolved_client_capabilities().workspace_refresh && !params.changes.is_empty() {

View file

@ -2,6 +2,8 @@ use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester}; use crate::server::client::{Notifier, Requester};
use crate::server::Result; use crate::server::Result;
use crate::session::Session; use crate::session::Session;
use anyhow::anyhow;
use lsp_server::ErrorCode;
use lsp_types as types; use lsp_types as types;
use lsp_types::notification as notif; use lsp_types::notification as notif;
@ -18,14 +20,21 @@ impl super::SyncNotificationHandler for DidChangeWorkspace {
_requester: &mut Requester, _requester: &mut Requester,
params: types::DidChangeWorkspaceFoldersParams, params: types::DidChangeWorkspaceFoldersParams,
) -> Result<()> { ) -> Result<()> {
for new in params.event.added { for types::WorkspaceFolder { ref uri, .. } in params.event.added {
session let workspace_path = uri
.open_workspace_folder(&new.uri) .to_file_path()
.with_failure_code(lsp_server::ErrorCode::InvalidParams)?; .map_err(|()| anyhow!("expected document URI {uri} to be a valid file path"))
.with_failure_code(ErrorCode::InvalidParams)?;
session.open_workspace_folder(workspace_path);
} }
for removed in params.event.removed { for types::WorkspaceFolder { ref uri, .. } in params.event.removed {
let workspace_path = uri
.to_file_path()
.map_err(|()| anyhow!("expected document URI {uri} to be a valid file path"))
.with_failure_code(ErrorCode::InvalidParams)?;
session session
.close_workspace_folder(&removed.uri) .close_workspace_folder(&workspace_path)
.with_failure_code(lsp_server::ErrorCode::InvalidParams)?; .with_failure_code(lsp_server::ErrorCode::InvalidParams)?;
} }
Ok(()) Ok(())

View file

@ -1,3 +1,4 @@
use crate::edit::DocumentKey;
use crate::server::api::diagnostics::clear_diagnostics_for_document; use crate::server::api::diagnostics::clear_diagnostics_for_document;
use crate::server::api::LSPResult; use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester}; use crate::server::client::{Notifier, Requester};
@ -13,7 +14,6 @@ impl super::NotificationHandler for DidClose {
} }
impl super::SyncNotificationHandler for DidClose { impl super::SyncNotificationHandler for DidClose {
#[tracing::instrument(skip_all, fields(file=%uri))]
fn run( fn run(
session: &mut Session, session: &mut Session,
notifier: Notifier, notifier: Notifier,
@ -22,20 +22,24 @@ impl super::SyncNotificationHandler for DidClose {
text_document: types::TextDocumentIdentifier { uri }, text_document: types::TextDocumentIdentifier { uri },
}: types::DidCloseTextDocumentParams, }: types::DidCloseTextDocumentParams,
) -> Result<()> { ) -> Result<()> {
// Publish an empty diagnostic report for the document if the client does not support pull diagnostics. // Publish an empty diagnostic report for the document. This will de-register any existing diagnostics.
// This will de-register any existing diagnostics.
if !session.resolved_client_capabilities().pull_diagnostics {
let snapshot = session let snapshot = session
.take_snapshot(&uri) .take_snapshot(&uri)
.ok_or_else(|| { .ok_or_else(|| anyhow::anyhow!("Unable to take snapshot for document with URL {uri}"))
anyhow::anyhow!("Unable to take snapshot for document with URL {uri}")
})
.with_failure_code(lsp_server::ErrorCode::InternalError)?; .with_failure_code(lsp_server::ErrorCode::InternalError)?;
clear_diagnostics_for_document(&snapshot, &notifier)?; clear_diagnostics_for_document(snapshot.query(), &notifier)?;
let key = snapshot.query().make_key();
// Notebook cells will go through the `textDocument/didClose` path.
// We still want to publish empty diagnostics for them, but we
// shouldn't call `session.close_document` on them.
if matches!(key, DocumentKey::NotebookCell(_)) {
return Ok(());
} }
session session
.close_document(&uri) .close_document(&key)
.with_failure_code(lsp_server::ErrorCode::InternalError) .with_failure_code(lsp_server::ErrorCode::InternalError)
} }
} }

View file

@ -0,0 +1,35 @@
use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use lsp_server::ErrorCode;
use lsp_types as types;
use lsp_types::notification as notif;
pub(crate) struct DidCloseNotebook;
impl super::NotificationHandler for DidCloseNotebook {
type NotificationType = notif::DidCloseNotebookDocument;
}
impl super::SyncNotificationHandler for DidCloseNotebook {
fn run(
session: &mut Session,
_notifier: Notifier,
_requester: &mut Requester,
types::DidCloseNotebookDocumentParams {
notebook_document: types::NotebookDocumentIdentifier { uri },
..
}: types::DidCloseNotebookDocumentParams,
) -> Result<()> {
let key = session
.key_from_url(&uri)
.with_failure_code(ErrorCode::InternalError)?;
session
.close_document(&key)
.with_failure_code(ErrorCode::InternalError)?;
Ok(())
}
}

View file

@ -3,6 +3,9 @@ use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester}; use crate::server::client::{Notifier, Requester};
use crate::server::Result; use crate::server::Result;
use crate::session::Session; use crate::session::Session;
use crate::TextDocument;
use anyhow::anyhow;
use lsp_server::ErrorCode;
use lsp_types as types; use lsp_types as types;
use lsp_types::notification as notif; use lsp_types::notification as notif;
@ -13,7 +16,6 @@ impl super::NotificationHandler for DidOpen {
} }
impl super::SyncNotificationHandler for DidOpen { impl super::SyncNotificationHandler for DidOpen {
#[tracing::instrument(skip_all, fields(file=%url))]
fn run( fn run(
session: &mut Session, session: &mut Session,
notifier: Notifier, notifier: Notifier,
@ -21,21 +23,28 @@ impl super::SyncNotificationHandler for DidOpen {
types::DidOpenTextDocumentParams { types::DidOpenTextDocumentParams {
text_document: text_document:
types::TextDocumentItem { types::TextDocumentItem {
uri: ref url, ref uri,
text, text,
version, version,
.. ..
}, },
}: types::DidOpenTextDocumentParams, }: types::DidOpenTextDocumentParams,
) -> Result<()> { ) -> Result<()> {
session.open_document(url, text, version); let document_path: std::path::PathBuf = uri
.to_file_path()
.map_err(|()| anyhow!("expected document URI {uri} to be a valid file path"))
.with_failure_code(ErrorCode::InvalidParams)?;
let document = TextDocument::new(text, version);
session.open_text_document(document_path, document);
// Publish diagnostics if the client doesnt support pull diagnostics // Publish diagnostics if the client doesnt support pull diagnostics
if !session.resolved_client_capabilities().pull_diagnostics { if !session.resolved_client_capabilities().pull_diagnostics {
let snapshot = session let snapshot = session
.take_snapshot(url) .take_snapshot(uri)
.ok_or_else(|| { .ok_or_else(|| {
anyhow::anyhow!("Unable to take snapshot for document with URL {url}") anyhow::anyhow!("Unable to take snapshot for document with URL {uri}")
}) })
.with_failure_code(lsp_server::ErrorCode::InternalError)?; .with_failure_code(lsp_server::ErrorCode::InternalError)?;
publish_diagnostics_for_document(&snapshot, &notifier)?; publish_diagnostics_for_document(&snapshot, &notifier)?;

View file

@ -0,0 +1,58 @@
use crate::edit::NotebookDocument;
use crate::server::api::diagnostics::publish_diagnostics_for_document;
use crate::server::api::LSPResult;
use crate::server::client::{Notifier, Requester};
use crate::server::Result;
use crate::session::Session;
use anyhow::anyhow;
use lsp_server::ErrorCode;
use lsp_types as types;
use lsp_types::notification as notif;
pub(crate) struct DidOpenNotebook;
impl super::NotificationHandler for DidOpenNotebook {
type NotificationType = notif::DidOpenNotebookDocument;
}
impl super::SyncNotificationHandler for DidOpenNotebook {
fn run(
session: &mut Session,
notifier: Notifier,
_requester: &mut Requester,
types::DidOpenNotebookDocumentParams {
notebook_document:
types::NotebookDocument {
uri,
version,
cells,
metadata,
..
},
cell_text_documents,
}: types::DidOpenNotebookDocumentParams,
) -> Result<()> {
let notebook = NotebookDocument::new(
version,
cells,
metadata.unwrap_or_default(),
cell_text_documents,
)
.with_failure_code(ErrorCode::InternalError)?;
let notebook_path = uri
.to_file_path()
.map_err(|()| anyhow!("expected notebook URI {uri} to be a valid file path"))
.with_failure_code(ErrorCode::InvalidParams)?;
session.open_notebook_document(notebook_path, notebook);
// publish diagnostics
let snapshot = session
.take_snapshot(&uri)
.expect("snapshot should be available");
publish_diagnostics_for_document(&snapshot, &notifier)?;
Ok(())
}
}

View file

@ -29,11 +29,7 @@ impl super::BackgroundDocumentRequestHandler for CodeActions {
let supported_code_actions = supported_code_actions(params.context.only.clone()); let supported_code_actions = supported_code_actions(params.context.only.clone());
let fixes = fixes_for_diagnostics( let fixes = fixes_for_diagnostics(params.context.diagnostics)
snapshot.document(),
snapshot.encoding(),
params.context.diagnostics,
)
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
if snapshot.client_settings().fix_violation() if snapshot.client_settings().fix_violation()
@ -61,6 +57,20 @@ impl super::BackgroundDocumentRequestHandler for CodeActions {
response.push(organize_imports(&snapshot).with_failure_code(ErrorCode::InternalError)?); response.push(organize_imports(&snapshot).with_failure_code(ErrorCode::InternalError)?);
} }
if snapshot.client_settings().fix_all()
&& supported_code_actions.contains(&SupportedCodeAction::NotebookSourceFixAll)
{
response.push(notebook_fix_all(&snapshot).with_failure_code(ErrorCode::InternalError)?);
}
if snapshot.client_settings().organize_imports()
&& supported_code_actions.contains(&SupportedCodeAction::NotebookSourceOrganizeImports)
{
response.push(
notebook_organize_imports(&snapshot).with_failure_code(ErrorCode::InternalError)?,
);
}
Ok(Some(response)) Ok(Some(response))
} }
} }
@ -69,7 +79,8 @@ fn quick_fix(
snapshot: &DocumentSnapshot, snapshot: &DocumentSnapshot,
fixes: &[DiagnosticFix], fixes: &[DiagnosticFix],
) -> crate::Result<Vec<CodeActionOrCommand>> { ) -> crate::Result<Vec<CodeActionOrCommand>> {
let document = snapshot.document(); let document = snapshot.query();
fixes fixes
.iter() .iter()
.filter(|fix| !fix.edits.is_empty()) .filter(|fix| !fix.edits.is_empty())
@ -77,7 +88,7 @@ fn quick_fix(
let mut tracker = WorkspaceEditTracker::new(snapshot.resolved_client_capabilities()); let mut tracker = WorkspaceEditTracker::new(snapshot.resolved_client_capabilities());
tracker.set_edits_for_document( tracker.set_edits_for_document(
snapshot.url().clone(), snapshot.query().make_key().into_url(),
document.version(), document.version(),
fix.edits.clone(), fix.edits.clone(),
)?; )?;
@ -88,7 +99,8 @@ fn quick_fix(
edit: Some(tracker.into_workspace_edit()), edit: Some(tracker.into_workspace_edit()),
diagnostics: Some(vec![fix.fixed_diagnostic.clone()]), diagnostics: Some(vec![fix.fixed_diagnostic.clone()]),
data: Some( data: Some(
serde_json::to_value(snapshot.url()).expect("document url to serialize"), serde_json::to_value(snapshot.query().make_key().into_url())
.expect("document url should serialize"),
), ),
..Default::default() ..Default::default()
})) }))
@ -106,8 +118,8 @@ fn noqa_comments(snapshot: &DocumentSnapshot, fixes: &[DiagnosticFix]) -> Vec<Co
tracker tracker
.set_edits_for_document( .set_edits_for_document(
snapshot.url().clone(), snapshot.query().make_key().into_url(),
snapshot.document().version(), snapshot.query().version(),
vec![edit], vec![edit],
) )
.ok()?; .ok()?;
@ -118,7 +130,8 @@ fn noqa_comments(snapshot: &DocumentSnapshot, fixes: &[DiagnosticFix]) -> Vec<Co
edit: Some(tracker.into_workspace_edit()), edit: Some(tracker.into_workspace_edit()),
diagnostics: Some(vec![fix.fixed_diagnostic.clone()]), diagnostics: Some(vec![fix.fixed_diagnostic.clone()]),
data: Some( data: Some(
serde_json::to_value(snapshot.url()).expect("document url to serialize"), serde_json::to_value(snapshot.query().make_key().into_url())
.expect("document url should serialize"),
), ),
..Default::default() ..Default::default()
})) }))
@ -127,7 +140,7 @@ fn noqa_comments(snapshot: &DocumentSnapshot, fixes: &[DiagnosticFix]) -> Vec<Co
} }
fn fix_all(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> { fn fix_all(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> {
let document = snapshot.document(); let document = snapshot.query();
let (edit, data) = if snapshot let (edit, data) = if snapshot
.resolved_client_capabilities() .resolved_client_capabilities()
@ -136,17 +149,18 @@ fn fix_all(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> {
// The editor will request the edit in a `CodeActionsResolve` request // The editor will request the edit in a `CodeActionsResolve` request
( (
None, None,
Some(serde_json::to_value(snapshot.url()).expect("document url to serialize")), Some(
serde_json::to_value(snapshot.query().make_key().into_url())
.expect("document url should serialize"),
),
) )
} else { } else {
( (
Some(resolve_edit_for_fix_all( Some(resolve_edit_for_fix_all(
document, document,
snapshot.resolved_client_capabilities(), snapshot.resolved_client_capabilities(),
snapshot.url(), snapshot.query().settings().linter(),
snapshot.settings().linter(),
snapshot.encoding(), snapshot.encoding(),
document.version(),
)?), )?),
None, None,
) )
@ -161,8 +175,44 @@ fn fix_all(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> {
})) }))
} }
fn notebook_fix_all(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> {
let document = snapshot.query();
let (edit, data) = if snapshot
.resolved_client_capabilities()
.code_action_deferred_edit_resolution
{
// The editor will request the edit in a `CodeActionsResolve` request
(
None,
Some(
serde_json::to_value(snapshot.query().make_key().into_url())
.expect("document url should serialize"),
),
)
} else {
(
Some(resolve_edit_for_fix_all(
document,
snapshot.resolved_client_capabilities(),
snapshot.query().settings().linter(),
snapshot.encoding(),
)?),
None,
)
};
Ok(CodeActionOrCommand::CodeAction(types::CodeAction {
title: format!("{DIAGNOSTIC_NAME}: Fix all auto-fixable problems"),
kind: Some(crate::NOTEBOOK_SOURCE_FIX_ALL_RUFF),
edit,
data,
..Default::default()
}))
}
fn organize_imports(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> { fn organize_imports(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> {
let document = snapshot.document(); let document = snapshot.query();
let (edit, data) = if snapshot let (edit, data) = if snapshot
.resolved_client_capabilities() .resolved_client_capabilities()
@ -171,17 +221,18 @@ fn organize_imports(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCo
// The edit will be resolved later in the `CodeActionsResolve` request // The edit will be resolved later in the `CodeActionsResolve` request
( (
None, None,
Some(serde_json::to_value(snapshot.url()).expect("document url to serialize")), Some(
serde_json::to_value(snapshot.query().make_key().into_url())
.expect("document url should serialize"),
),
) )
} else { } else {
( (
Some(resolve_edit_for_organize_imports( Some(resolve_edit_for_organize_imports(
document, document,
snapshot.resolved_client_capabilities(), snapshot.resolved_client_capabilities(),
snapshot.url(), snapshot.query().settings().linter(),
snapshot.settings().linter(),
snapshot.encoding(), snapshot.encoding(),
document.version(),
)?), )?),
None, None,
) )
@ -196,6 +247,42 @@ fn organize_imports(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCo
})) }))
} }
fn notebook_organize_imports(snapshot: &DocumentSnapshot) -> crate::Result<CodeActionOrCommand> {
let document = snapshot.query();
let (edit, data) = if snapshot
.resolved_client_capabilities()
.code_action_deferred_edit_resolution
{
// The edit will be resolved later in the `CodeActionsResolve` request
(
None,
Some(
serde_json::to_value(snapshot.query().make_key().into_url())
.expect("document url should serialize"),
),
)
} else {
(
Some(resolve_edit_for_organize_imports(
document,
snapshot.resolved_client_capabilities(),
snapshot.query().settings().linter(),
snapshot.encoding(),
)?),
None,
)
};
Ok(CodeActionOrCommand::CodeAction(types::CodeAction {
title: format!("{DIAGNOSTIC_NAME}: Organize imports"),
kind: Some(crate::NOTEBOOK_SOURCE_ORGANIZE_IMPORTS_RUFF),
edit,
data,
..Default::default()
}))
}
/// If `action_filter` is `None`, this returns [`SupportedCodeActionKind::all()`]. Otherwise, /// If `action_filter` is `None`, this returns [`SupportedCodeActionKind::all()`]. Otherwise,
/// the list is filtered. /// the list is filtered.
fn supported_code_actions( fn supported_code_actions(

View file

@ -1,10 +1,11 @@
use std::borrow::Cow; use std::borrow::Cow;
use crate::edit::{DocumentVersion, WorkspaceEditTracker}; use crate::edit::WorkspaceEditTracker;
use crate::fix::Fixes;
use crate::server::api::LSPResult; use crate::server::api::LSPResult;
use crate::server::SupportedCodeAction; use crate::server::SupportedCodeAction;
use crate::server::{client::Notifier, Result}; use crate::server::{client::Notifier, Result};
use crate::session::{DocumentSnapshot, ResolvedClientCapabilities}; use crate::session::{DocumentQuery, DocumentSnapshot, ResolvedClientCapabilities};
use crate::PositionEncoding; use crate::PositionEncoding;
use lsp_server::ErrorCode; use lsp_server::ErrorCode;
use lsp_types::{self as types, request as req}; use lsp_types::{self as types, request as req};
@ -28,7 +29,7 @@ impl super::BackgroundDocumentRequestHandler for CodeActionResolve {
_notifier: Notifier, _notifier: Notifier,
mut action: types::CodeAction, mut action: types::CodeAction,
) -> Result<types::CodeAction> { ) -> Result<types::CodeAction> {
let document = snapshot.document(); let query = snapshot.query();
let code_actions = SupportedCodeAction::from_kind( let code_actions = SupportedCodeAction::from_kind(
action action
@ -49,25 +50,22 @@ impl super::BackgroundDocumentRequestHandler for CodeActionResolve {
}; };
action.edit = match action_kind { action.edit = match action_kind {
SupportedCodeAction::SourceFixAll => Some( SupportedCodeAction::SourceFixAll | SupportedCodeAction::NotebookSourceFixAll => Some(
resolve_edit_for_fix_all( resolve_edit_for_fix_all(
document, query,
snapshot.resolved_client_capabilities(), snapshot.resolved_client_capabilities(),
snapshot.url(), query.settings().linter(),
snapshot.settings().linter(),
snapshot.encoding(), snapshot.encoding(),
document.version(),
) )
.with_failure_code(ErrorCode::InternalError)?, .with_failure_code(ErrorCode::InternalError)?,
), ),
SupportedCodeAction::SourceOrganizeImports => Some( SupportedCodeAction::SourceOrganizeImports
| SupportedCodeAction::NotebookSourceOrganizeImports => Some(
resolve_edit_for_organize_imports( resolve_edit_for_organize_imports(
document, query,
snapshot.resolved_client_capabilities(), snapshot.resolved_client_capabilities(),
snapshot.url(), query.settings().linter(),
snapshot.settings().linter(),
snapshot.encoding(), snapshot.encoding(),
document.version(),
) )
.with_failure_code(ErrorCode::InternalError)?, .with_failure_code(ErrorCode::InternalError)?,
), ),
@ -84,54 +82,46 @@ impl super::BackgroundDocumentRequestHandler for CodeActionResolve {
} }
pub(super) fn resolve_edit_for_fix_all( pub(super) fn resolve_edit_for_fix_all(
document: &crate::edit::Document, query: &DocumentQuery,
client_capabilities: &ResolvedClientCapabilities, client_capabilities: &ResolvedClientCapabilities,
url: &types::Url,
linter_settings: &LinterSettings, linter_settings: &LinterSettings,
encoding: PositionEncoding, encoding: PositionEncoding,
version: DocumentVersion,
) -> crate::Result<types::WorkspaceEdit> { ) -> crate::Result<types::WorkspaceEdit> {
let mut tracker = WorkspaceEditTracker::new(client_capabilities); let mut tracker = WorkspaceEditTracker::new(client_capabilities);
tracker.set_edits_for_document( tracker.set_fixes_for_document(
url.clone(), fix_all_edit(query, linter_settings, encoding)?,
version, query.version(),
fix_all_edit(document, url, linter_settings, encoding)?,
)?; )?;
Ok(tracker.into_workspace_edit()) Ok(tracker.into_workspace_edit())
} }
pub(super) fn fix_all_edit( pub(super) fn fix_all_edit(
document: &crate::edit::Document, query: &DocumentQuery,
document_url: &types::Url,
linter_settings: &LinterSettings, linter_settings: &LinterSettings,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> crate::Result<Vec<types::TextEdit>> { ) -> crate::Result<Fixes> {
crate::fix::fix_all(document, document_url, linter_settings, encoding) crate::fix::fix_all(query, linter_settings, encoding)
} }
pub(super) fn resolve_edit_for_organize_imports( pub(super) fn resolve_edit_for_organize_imports(
document: &crate::edit::Document, query: &DocumentQuery,
client_capabilities: &ResolvedClientCapabilities, client_capabilities: &ResolvedClientCapabilities,
url: &types::Url,
linter_settings: &ruff_linter::settings::LinterSettings, linter_settings: &ruff_linter::settings::LinterSettings,
encoding: PositionEncoding, encoding: PositionEncoding,
version: DocumentVersion,
) -> crate::Result<types::WorkspaceEdit> { ) -> crate::Result<types::WorkspaceEdit> {
let mut tracker = WorkspaceEditTracker::new(client_capabilities); let mut tracker = WorkspaceEditTracker::new(client_capabilities);
tracker.set_edits_for_document( tracker.set_fixes_for_document(
url.clone(), organize_imports_edit(query, linter_settings, encoding)?,
version, query.version(),
organize_imports_edit(document, url, linter_settings, encoding)?,
)?; )?;
Ok(tracker.into_workspace_edit()) Ok(tracker.into_workspace_edit())
} }
pub(super) fn organize_imports_edit( pub(super) fn organize_imports_edit(
document: &crate::edit::Document, query: &DocumentQuery,
document_url: &types::Url,
linter_settings: &LinterSettings, linter_settings: &LinterSettings,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> crate::Result<Vec<types::TextEdit>> { ) -> crate::Result<Fixes> {
let mut linter_settings = linter_settings.clone(); let mut linter_settings = linter_settings.clone();
linter_settings.rules = [ linter_settings.rules = [
Rule::UnsortedImports, // I001 Rule::UnsortedImports, // I001
@ -140,5 +130,5 @@ pub(super) fn organize_imports_edit(
.into_iter() .into_iter()
.collect(); .collect();
crate::fix::fix_all(document, document_url, &linter_settings, encoding) crate::fix::fix_all(query, &linter_settings, encoding)
} }

View file

@ -26,7 +26,13 @@ impl super::BackgroundDocumentRequestHandler for DocumentDiagnostic {
full_document_diagnostic_report: FullDocumentDiagnosticReport { full_document_diagnostic_report: FullDocumentDiagnosticReport {
// TODO(jane): eventually this will be important for caching diagnostic information. // TODO(jane): eventually this will be important for caching diagnostic information.
result_id: None, result_id: None,
items: generate_diagnostics(&snapshot), // Pull diagnostic requests are only called for text documents.
// Since diagnostic requests generate
items: generate_diagnostics(&snapshot)
.into_iter()
.next()
.map(|(_, diagnostics)| diagnostics)
.unwrap_or_default(),
}, },
}), }),
)) ))

View file

@ -21,7 +21,7 @@ enum Command {
pub(crate) struct ExecuteCommand; pub(crate) struct ExecuteCommand;
#[derive(Deserialize)] #[derive(Deserialize)]
struct TextDocumentArgument { struct Argument {
uri: types::Url, uri: types::Url,
version: DocumentVersion, version: DocumentVersion,
} }
@ -45,7 +45,7 @@ impl super::SyncRequestHandler for ExecuteCommand {
return Err(anyhow::anyhow!("Cannot execute the '{}' command: the client does not support `workspace/applyEdit`", command.label())).with_failure_code(ErrorCode::InternalError); return Err(anyhow::anyhow!("Cannot execute the '{}' command: the client does not support `workspace/applyEdit`", command.label())).with_failure_code(ErrorCode::InternalError);
} }
let mut arguments: Vec<TextDocumentArgument> = params let mut arguments: Vec<Argument> = params
.arguments .arguments
.into_iter() .into_iter()
.map(|value| serde_json::from_value(value).with_failure_code(ErrorCode::InvalidParams)) .map(|value| serde_json::from_value(value).with_failure_code(ErrorCode::InvalidParams))
@ -55,22 +55,21 @@ impl super::SyncRequestHandler for ExecuteCommand {
arguments.dedup_by(|a, b| a.uri == b.uri); arguments.dedup_by(|a, b| a.uri == b.uri);
let mut edit_tracker = WorkspaceEditTracker::new(session.resolved_client_capabilities()); let mut edit_tracker = WorkspaceEditTracker::new(session.resolved_client_capabilities());
for TextDocumentArgument { uri, version } in arguments { for Argument { uri, version } in arguments {
let snapshot = session let snapshot = session
.take_snapshot(&uri) .take_snapshot(&uri)
.ok_or(anyhow::anyhow!("Document snapshot not available for {uri}",)) .ok_or(anyhow::anyhow!("Document snapshot not available for {uri}",))
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
match command { match command {
Command::FixAll => { Command::FixAll => {
let edits = super::code_action_resolve::fix_all_edit( let fixes = super::code_action_resolve::fix_all_edit(
snapshot.document(), snapshot.query(),
snapshot.url(), snapshot.query().settings().linter(),
snapshot.settings().linter(),
snapshot.encoding(), snapshot.encoding(),
) )
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
edit_tracker edit_tracker
.set_edits_for_document(uri, version, edits) .set_fixes_for_document(fixes, snapshot.query().version())
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
} }
Command::Format => { Command::Format => {
@ -82,15 +81,14 @@ impl super::SyncRequestHandler for ExecuteCommand {
} }
} }
Command::OrganizeImports => { Command::OrganizeImports => {
let edits = super::code_action_resolve::organize_imports_edit( let fixes = super::code_action_resolve::organize_imports_edit(
snapshot.document(), snapshot.query(),
snapshot.url(), snapshot.query().settings().linter(),
snapshot.settings().linter(),
snapshot.encoding(), snapshot.encoding(),
) )
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
edit_tracker edit_tracker
.set_edits_for_document(uri, version, edits) .set_fixes_for_document(fixes, snapshot.query().version())
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
} }
} }

View file

@ -24,14 +24,37 @@ impl super::BackgroundDocumentRequestHandler for Format {
} }
pub(super) fn format_document(snapshot: &DocumentSnapshot) -> Result<super::FormatResponse> { pub(super) fn format_document(snapshot: &DocumentSnapshot) -> Result<super::FormatResponse> {
let doc = snapshot.document(); let doc = snapshot
.query()
.as_single_document()
.expect("format should only be called on text documents or notebook cells");
let source = doc.contents(); let source = doc.contents();
let formatted = crate::format::format(doc, snapshot.settings().formatter()) let mut formatted = crate::format::format(
doc,
snapshot.query().source_type(),
snapshot.query().settings().formatter(),
)
.with_failure_code(lsp_server::ErrorCode::InternalError)?; .with_failure_code(lsp_server::ErrorCode::InternalError)?;
// fast path - if the code is the same, return early // fast path - if the code is the same, return early
if formatted == source { if formatted == source {
return Ok(None); return Ok(None);
} }
// special case - avoid adding a newline to a notebook cell if it didn't already exist
if snapshot.query().as_notebook().is_some() {
let mut trimmed = formatted.as_str();
if !source.ends_with("\r\n") {
trimmed = trimmed.trim_end_matches("\r\n");
}
if !source.ends_with('\n') {
trimmed = trimmed.trim_end_matches('\n');
}
if !source.ends_with('\r') {
trimmed = trimmed.trim_end_matches('\r');
}
formatted = trimmed.to_string();
}
let formatted_index: LineIndex = LineIndex::from_source_text(&formatted); let formatted_index: LineIndex = LineIndex::from_source_text(&formatted);
let unformatted_index = doc.index(); let unformatted_index = doc.index();

View file

@ -17,12 +17,19 @@ impl super::BackgroundDocumentRequestHandler for FormatRange {
_notifier: Notifier, _notifier: Notifier,
params: types::DocumentRangeFormattingParams, params: types::DocumentRangeFormattingParams,
) -> Result<super::FormatResponse> { ) -> Result<super::FormatResponse> {
let document = snapshot.document(); let document = snapshot
.query()
.as_single_document()
.expect("hover should only be called on text documents or notebook cells");
let text = document.contents(); let text = document.contents();
let index = document.index(); let index = document.index();
let range = params.range.to_text_range(text, index, snapshot.encoding()); let range = params.range.to_text_range(text, index, snapshot.encoding());
let formatted_range = let formatted_range = crate::format::format_range(
crate::format::format_range(document, snapshot.settings().formatter(), range) document,
snapshot.query().source_type(),
snapshot.query().settings().formatter(),
range,
)
.with_failure_code(lsp_server::ErrorCode::InternalError)?; .with_failure_code(lsp_server::ErrorCode::InternalError)?;
Ok(Some(vec![types::TextEdit { Ok(Some(vec![types::TextEdit {
range: formatted_range range: formatted_range

View file

@ -29,7 +29,11 @@ pub(crate) fn hover(
snapshot: &DocumentSnapshot, snapshot: &DocumentSnapshot,
position: &types::TextDocumentPositionParams, position: &types::TextDocumentPositionParams,
) -> Option<types::Hover> { ) -> Option<types::Hover> {
let document = snapshot.document(); // Hover only operates on text documents or notebook cells
let document = snapshot
.query()
.as_single_document()
.expect("hover should only be called on text documents or notebook cells");
let line_number: usize = position let line_number: usize = position
.position .position
.line .line

View file

@ -1,24 +1,26 @@
//! Data model, state management, and configuration resolution. //! Data model, state management, and configuration resolution.
mod capabilities; mod capabilities;
mod index;
mod settings; mod settings;
mod workspace;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use anyhow::anyhow; use anyhow::anyhow;
use lsp_types::{ClientCapabilities, Url}; use lsp_types::{ClientCapabilities, NotebookDocumentCellChange, Url};
use crate::edit::DocumentVersion; use crate::edit::{DocumentKey, DocumentVersion, NotebookDocument};
use crate::PositionEncoding; use crate::{PositionEncoding, TextDocument};
pub(crate) use self::capabilities::ResolvedClientCapabilities; pub(crate) use self::capabilities::ResolvedClientCapabilities;
pub(crate) use self::index::DocumentQuery;
pub(crate) use self::settings::{AllSettings, ClientSettings}; pub(crate) use self::settings::{AllSettings, ClientSettings};
/// The global state for the LSP /// The global state for the LSP
pub(crate) struct Session { pub(crate) struct Session {
/// Workspace folders in the current session, which contain the state of all open files. /// Used to retrieve information about open documents and settings.
workspaces: workspace::Workspaces, index: index::Index,
/// The global position encoding, negotiated during LSP initialization. /// The global position encoding, negotiated during LSP initialization.
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
/// Global settings provided by the client. /// Global settings provided by the client.
@ -32,9 +34,8 @@ pub(crate) struct Session {
pub(crate) struct DocumentSnapshot { pub(crate) struct DocumentSnapshot {
resolved_client_capabilities: Arc<ResolvedClientCapabilities>, resolved_client_capabilities: Arc<ResolvedClientCapabilities>,
client_settings: settings::ResolvedClientSettings, client_settings: settings::ResolvedClientSettings,
document_ref: workspace::DocumentRef, document_ref: index::DocumentQuery,
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
url: Url,
} }
impl Session { impl Session {
@ -42,58 +43,100 @@ impl Session {
client_capabilities: &ClientCapabilities, client_capabilities: &ClientCapabilities,
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
global_settings: ClientSettings, global_settings: ClientSettings,
workspaces: Vec<(Url, ClientSettings)>, workspace_folders: Vec<(PathBuf, ClientSettings)>,
) -> crate::Result<Self> { ) -> Self {
Ok(Self { Self {
position_encoding, position_encoding,
workspaces: workspace::Workspaces::new(workspaces, &global_settings)?, index: index::Index::new(workspace_folders, &global_settings),
global_settings, global_settings,
resolved_client_capabilities: Arc::new(ResolvedClientCapabilities::new( resolved_client_capabilities: Arc::new(ResolvedClientCapabilities::new(
client_capabilities, client_capabilities,
)), )),
}) }
} }
pub(crate) fn key_from_url(&self, url: &lsp_types::Url) -> crate::Result<DocumentKey> {
self.index
.key_from_url(url)
.ok_or_else(|| anyhow!("No document found for {url}"))
}
/// Creates a document snapshot with the URL referencing the document to snapshot.
pub(crate) fn take_snapshot(&self, url: &Url) -> Option<DocumentSnapshot> { pub(crate) fn take_snapshot(&self, url: &Url) -> Option<DocumentSnapshot> {
let key = self.key_from_url(url).ok()?;
Some(DocumentSnapshot { Some(DocumentSnapshot {
resolved_client_capabilities: self.resolved_client_capabilities.clone(), resolved_client_capabilities: self.resolved_client_capabilities.clone(),
client_settings: self.workspaces.client_settings(url, &self.global_settings), client_settings: self.index.client_settings(&key, &self.global_settings),
document_ref: self.workspaces.snapshot(url)?, document_ref: self.index.make_document_ref(key)?,
position_encoding: self.position_encoding, position_encoding: self.position_encoding,
url: url.clone(),
}) })
} }
pub(crate) fn open_document(&mut self, url: &Url, contents: String, version: DocumentVersion) { /// Updates a text document at the associated `key`.
self.workspaces.open(url, contents, version); ///
} /// The document key must point to a text document, or this will throw an error.
pub(crate) fn update_text_document(
pub(crate) fn close_document(&mut self, url: &Url) -> crate::Result<()> {
self.workspaces.close(url)?;
Ok(())
}
pub(crate) fn document_controller(
&mut self, &mut self,
url: &Url, key: &DocumentKey,
) -> crate::Result<&mut workspace::DocumentController> { content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
self.workspaces new_version: DocumentVersion,
.controller(url) ) -> crate::Result<()> {
.ok_or_else(|| anyhow!("Tried to open unavailable document `{url}`")) let encoding = self.encoding();
self.index
.update_text_document(key, content_changes, new_version, encoding)
} }
pub(crate) fn reload_settings(&mut self, url: &Url) -> crate::Result<()> { /// Updates a notebook document at the associated `key` with potentially new
self.workspaces.reload_settings(url) /// cell, metadata, and version values.
///
/// The document key must point to a notebook document or cell, or this will
/// throw an error.
pub(crate) fn update_notebook_document(
&mut self,
key: &DocumentKey,
cells: Option<NotebookDocumentCellChange>,
metadata: Option<serde_json::Map<String, serde_json::Value>>,
version: DocumentVersion,
) -> crate::Result<()> {
let encoding = self.encoding();
self.index
.update_notebook_document(key, cells, metadata, version, encoding)
} }
pub(crate) fn open_workspace_folder(&mut self, url: &Url) -> crate::Result<()> { /// Registers a notebook document at the provided `path`.
self.workspaces /// If a document is already open here, it will be overwritten.
.open_workspace_folder(url, &self.global_settings)?; pub(crate) fn open_notebook_document(&mut self, path: PathBuf, document: NotebookDocument) {
self.index.open_notebook_document(path, document);
}
/// Registers a text document at the provided `path`.
/// If a document is already open here, it will be overwritten.
pub(crate) fn open_text_document(&mut self, path: PathBuf, document: TextDocument) {
self.index.open_text_document(path, document);
}
/// De-registers a document, specified by its key.
/// Calling this multiple times for the same document is a logic error.
pub(crate) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> {
self.index.close_document(key)?;
Ok(()) Ok(())
} }
pub(crate) fn close_workspace_folder(&mut self, url: &Url) -> crate::Result<()> { /// Reloads the settings index
self.workspaces.close_workspace_folder(url)?; pub(crate) fn reload_settings(&mut self, changed_path: &PathBuf) {
self.index.reload_settings(changed_path);
}
/// Open a workspace folder at the given `path`.
pub(crate) fn open_workspace_folder(&mut self, path: PathBuf) {
self.index
.open_workspace_folder(path, &self.global_settings);
}
/// Close a workspace folder at the given `path`.
pub(crate) fn close_workspace_folder(&mut self, path: &PathBuf) -> crate::Result<()> {
self.index.close_workspace_folder(path)?;
Ok(()) Ok(())
} }
@ -107,10 +150,6 @@ impl Session {
} }
impl DocumentSnapshot { impl DocumentSnapshot {
pub(crate) fn settings(&self) -> &workspace::RuffSettings {
self.document().settings()
}
pub(crate) fn resolved_client_capabilities(&self) -> &ResolvedClientCapabilities { pub(crate) fn resolved_client_capabilities(&self) -> &ResolvedClientCapabilities {
&self.resolved_client_capabilities &self.resolved_client_capabilities
} }
@ -119,15 +158,11 @@ impl DocumentSnapshot {
&self.client_settings &self.client_settings
} }
pub(crate) fn document(&self) -> &workspace::DocumentRef { pub(crate) fn query(&self) -> &index::DocumentQuery {
&self.document_ref &self.document_ref
} }
pub(crate) fn encoding(&self) -> PositionEncoding { pub(crate) fn encoding(&self) -> PositionEncoding {
self.position_encoding self.position_encoding
} }
pub(crate) fn url(&self) -> &Url {
&self.url
}
} }

View file

@ -0,0 +1,464 @@
use anyhow::anyhow;
use rustc_hash::FxHashMap;
use std::{
collections::BTreeMap,
path::{Path, PathBuf},
sync::Arc,
};
use crate::{
edit::{DocumentKey, DocumentVersion, NotebookDocument},
PositionEncoding, TextDocument,
};
use super::{
settings::{self, ResolvedClientSettings},
ClientSettings,
};
mod ruff_settings;
pub(crate) use ruff_settings::RuffSettings;
type DocumentIndex = FxHashMap<PathBuf, DocumentController>;
type NotebookCellIndex = FxHashMap<lsp_types::Url, PathBuf>;
type SettingsIndex = BTreeMap<PathBuf, WorkspaceSettings>;
/// Stores and tracks all open documents in a session, along with their associated settings.
#[derive(Default)]
pub(crate) struct Index {
/// Maps all document file paths to the associated document controller
documents: DocumentIndex,
/// Maps opaque cell URLs to a notebook path
notebook_cells: NotebookCellIndex,
/// Maps a workspace folder root to its settings.
settings: SettingsIndex,
}
/// Settings associated with a workspace.
struct WorkspaceSettings {
client_settings: ResolvedClientSettings,
workspace_settings_index: ruff_settings::RuffSettingsIndex,
}
/// A mutable handler to an underlying document.
enum DocumentController {
Text(Arc<TextDocument>),
Notebook(Arc<NotebookDocument>),
}
/// A read-only query to an open document.
/// This query can 'select' a text document, full notebook, or a specific notebook cell.
/// It also includes document settings.
#[derive(Clone)]
pub(crate) enum DocumentQuery {
Text {
file_path: PathBuf,
document: Arc<TextDocument>,
settings: Arc<RuffSettings>,
},
Notebook {
/// The selected notebook cell, if it exists.
cell_uri: Option<lsp_types::Url>,
file_path: PathBuf,
notebook: Arc<NotebookDocument>,
settings: Arc<RuffSettings>,
},
}
impl Index {
pub(super) fn new(
workspace_folders: Vec<(PathBuf, ClientSettings)>,
global_settings: &ClientSettings,
) -> Self {
let mut settings_index = BTreeMap::new();
for (path, workspace_settings) in workspace_folders {
Self::register_workspace_settings(
&mut settings_index,
path,
Some(workspace_settings),
global_settings,
);
}
Self {
documents: FxHashMap::default(),
notebook_cells: FxHashMap::default(),
settings: settings_index,
}
}
pub(super) fn update_text_document(
&mut self,
key: &DocumentKey,
content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
new_version: DocumentVersion,
encoding: PositionEncoding,
) -> crate::Result<()> {
let controller = self.document_controller_for_key(key)?;
let Some(document) = controller.as_text_mut() else {
anyhow::bail!("Text document URI does not point to a text document");
};
if content_changes.is_empty() {
document.update_version(new_version);
return Ok(());
}
document.apply_changes(content_changes, new_version, encoding);
Ok(())
}
pub(super) fn key_from_url(&self, url: &lsp_types::Url) -> Option<DocumentKey> {
if self.notebook_cells.contains_key(url) {
return Some(DocumentKey::NotebookCell(url.clone()));
}
let path = url.to_file_path().ok()?;
Some(
match path
.extension()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
{
"ipynb" => DocumentKey::Notebook(path),
_ => DocumentKey::Text(path),
},
)
}
pub(super) fn update_notebook_document(
&mut self,
key: &DocumentKey,
cells: Option<lsp_types::NotebookDocumentCellChange>,
metadata: Option<serde_json::Map<String, serde_json::Value>>,
new_version: DocumentVersion,
encoding: PositionEncoding,
) -> crate::Result<()> {
// update notebook cell index
if let Some(lsp_types::NotebookDocumentCellChangeStructure {
did_open,
did_close,
..
}) = cells.as_ref().and_then(|cells| cells.structure.as_ref())
{
let Some(path) = self.path_for_key(key).cloned() else {
anyhow::bail!("Tried to open unavailable document `{key}`");
};
for opened_cell in did_open.iter().flatten() {
self.notebook_cells
.insert(opened_cell.uri.clone(), path.clone());
}
for closed_cell in did_close.iter().flatten() {
if self.notebook_cells.remove(&closed_cell.uri).is_none() {
tracing::warn!(
"Tried to remove a notebook cell that does not exist: {}",
closed_cell.uri
);
}
}
}
let controller = self.document_controller_for_key(key)?;
let Some(notebook) = controller.as_notebook_mut() else {
anyhow::bail!("Notebook document URI does not point to a notebook document");
};
notebook.update(cells, metadata, new_version, encoding)?;
Ok(())
}
pub(super) fn open_workspace_folder(
&mut self,
path: PathBuf,
global_settings: &ClientSettings,
) {
// TODO(jane): Find a way for workspace client settings to be added or changed dynamically.
Self::register_workspace_settings(&mut self.settings, path, None, global_settings);
}
fn register_workspace_settings(
settings_index: &mut SettingsIndex,
workspace_path: PathBuf,
workspace_settings: Option<ClientSettings>,
global_settings: &ClientSettings,
) {
let client_settings = if let Some(workspace_settings) = workspace_settings {
ResolvedClientSettings::with_workspace(&workspace_settings, global_settings)
} else {
ResolvedClientSettings::global(global_settings)
};
let workspace_settings_index = ruff_settings::RuffSettingsIndex::new(
&workspace_path,
client_settings.editor_settings(),
);
settings_index.insert(
workspace_path,
WorkspaceSettings {
client_settings,
workspace_settings_index,
},
);
}
pub(super) fn close_workspace_folder(&mut self, workspace_path: &PathBuf) -> crate::Result<()> {
self.settings.remove(workspace_path).ok_or_else(|| {
anyhow!(
"Tried to remove non-existent folder {}",
workspace_path.display()
)
})?;
// O(n) complexity, which isn't ideal... but this is an uncommon operation.
self.documents
.retain(|path, _| !path.starts_with(workspace_path));
self.notebook_cells
.retain(|_, path| !path.starts_with(workspace_path));
Ok(())
}
pub(super) fn make_document_ref(&self, key: DocumentKey) -> Option<DocumentQuery> {
let path = self.path_for_key(&key)?.clone();
let document_settings = self
.settings_for_path(&path)?
.workspace_settings_index
.get(&path);
let controller = self.documents.get(&path)?;
let cell_uri = match key {
DocumentKey::NotebookCell(uri) => Some(uri),
_ => None,
};
Some(controller.make_ref(cell_uri, path, document_settings))
}
pub(super) fn reload_settings(&mut self, changed_path: &PathBuf) {
for (root, settings) in self
.settings
.iter_mut()
.filter(|(path, _)| path.starts_with(changed_path))
{
settings.workspace_settings_index = ruff_settings::RuffSettingsIndex::new(
root,
settings.client_settings.editor_settings(),
);
}
}
pub(super) fn open_text_document(&mut self, path: PathBuf, document: TextDocument) {
self.documents
.insert(path, DocumentController::new_text(document));
}
pub(super) fn open_notebook_document(&mut self, path: PathBuf, document: NotebookDocument) {
for url in document.urls() {
self.notebook_cells.insert(url.clone(), path.clone());
}
self.documents
.insert(path, DocumentController::new_notebook(document));
}
pub(super) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> {
let Some(path) = self.path_for_key(key).cloned() else {
anyhow::bail!("Tried to open unavailable document `{key}`");
};
let Some(controller) = self.documents.remove(&path) else {
anyhow::bail!(
"tried to close document that didn't exist at {}",
path.display()
)
};
if let Some(notebook) = controller.as_notebook() {
for url in notebook.urls() {
self.notebook_cells.remove(url).ok_or_else(|| {
anyhow!("tried to de-register notebook cell with URL {url} that didn't exist")
})?;
}
}
Ok(())
}
pub(super) fn client_settings(
&self,
key: &DocumentKey,
global_settings: &ClientSettings,
) -> settings::ResolvedClientSettings {
let Some(path) = self.path_for_key(key) else {
return ResolvedClientSettings::global(global_settings);
};
let Some(WorkspaceSettings {
client_settings, ..
}) = self.settings_for_path(path)
else {
return ResolvedClientSettings::global(global_settings);
};
client_settings.clone()
}
fn document_controller_for_key(
&mut self,
key: &DocumentKey,
) -> crate::Result<&mut DocumentController> {
let Some(path) = self.path_for_key(key).cloned() else {
anyhow::bail!("Tried to open unavailable document `{key}`");
};
let Some(controller) = self.documents.get_mut(&path) else {
anyhow::bail!("Document controller not available at `{}`", path.display());
};
Ok(controller)
}
fn path_for_key<'a>(&'a self, key: &'a DocumentKey) -> Option<&'a PathBuf> {
match key {
DocumentKey::Notebook(path) | DocumentKey::Text(path) => Some(path),
DocumentKey::NotebookCell(uri) => self.notebook_cells.get(uri),
}
}
fn settings_for_path(&self, path: &Path) -> Option<&WorkspaceSettings> {
self.settings
.range(..path.to_path_buf())
.next_back()
.map(|(_, settings)| settings)
}
}
impl DocumentController {
fn new_text(document: TextDocument) -> Self {
Self::Text(Arc::new(document))
}
fn new_notebook(document: NotebookDocument) -> Self {
Self::Notebook(Arc::new(document))
}
fn make_ref(
&self,
cell_uri: Option<lsp_types::Url>,
file_path: PathBuf,
settings: Arc<RuffSettings>,
) -> DocumentQuery {
match &self {
Self::Notebook(notebook) => DocumentQuery::Notebook {
cell_uri,
file_path,
notebook: notebook.clone(),
settings,
},
Self::Text(document) => DocumentQuery::Text {
file_path,
document: document.clone(),
settings,
},
}
}
pub(crate) fn as_notebook_mut(&mut self) -> Option<&mut NotebookDocument> {
Some(match self {
Self::Notebook(notebook) => Arc::make_mut(notebook),
Self::Text(_) => return None,
})
}
pub(crate) fn as_notebook(&self) -> Option<&NotebookDocument> {
match self {
Self::Notebook(notebook) => Some(notebook),
Self::Text(_) => None,
}
}
#[allow(dead_code)]
pub(crate) fn as_text(&self) -> Option<&TextDocument> {
match self {
Self::Text(document) => Some(document),
Self::Notebook(_) => None,
}
}
pub(crate) fn as_text_mut(&mut self) -> Option<&mut TextDocument> {
Some(match self {
Self::Text(document) => Arc::make_mut(document),
Self::Notebook(_) => return None,
})
}
}
impl DocumentQuery {
/// Retrieve the original key that describes this document query.
pub(crate) fn make_key(&self) -> DocumentKey {
match self {
Self::Text { file_path, .. } => DocumentKey::Text(file_path.clone()),
Self::Notebook {
cell_uri: Some(cell_uri),
..
} => DocumentKey::NotebookCell(cell_uri.clone()),
Self::Notebook { file_path, .. } => DocumentKey::Notebook(file_path.clone()),
}
}
/// Get the document settings associated with this query.
pub(crate) fn settings(&self) -> &RuffSettings {
match self {
Self::Text { settings, .. } | Self::Notebook { settings, .. } => settings,
}
}
/// Generate a source kind used by the linter.
pub(crate) fn make_source_kind(&self) -> ruff_linter::source_kind::SourceKind {
match self {
Self::Text { document, .. } => {
ruff_linter::source_kind::SourceKind::Python(document.contents().to_string())
}
Self::Notebook { notebook, .. } => {
ruff_linter::source_kind::SourceKind::IpyNotebook(notebook.make_ruff_notebook())
}
}
}
/// Attempts to access the underlying notebook document that this query is selecting.
pub(crate) fn as_notebook(&self) -> Option<&NotebookDocument> {
match self {
Self::Notebook { notebook, .. } => Some(notebook),
Self::Text { .. } => None,
}
}
/// Get the source type of the document associated with this query.
pub(crate) fn source_type(&self) -> ruff_python_ast::PySourceType {
match self {
Self::Text { .. } => ruff_python_ast::PySourceType::Python,
Self::Notebook { .. } => ruff_python_ast::PySourceType::Ipynb,
}
}
/// Get the version of document selected by this query.
pub(crate) fn version(&self) -> DocumentVersion {
match self {
Self::Text { document, .. } => document.version(),
Self::Notebook { notebook, .. } => notebook.version(),
}
}
/// Get the underlying file path for the document selected by this query.
pub(crate) fn file_path(&self) -> &PathBuf {
match self {
Self::Text { file_path, .. } | Self::Notebook { file_path, .. } => file_path,
}
}
/// Attempt to access the single inner text document selected by the query.
/// If this query is selecting an entire notebook document, this will return `None`.
pub(crate) fn as_single_document(&self) -> Option<&TextDocument> {
match self {
Self::Text { document, .. } => Some(document),
Self::Notebook {
notebook, cell_uri, ..
} => cell_uri
.as_ref()
.and_then(|cell_uri| notebook.cell_document_by_uri(cell_uri)),
}
}
}

View file

@ -7,7 +7,7 @@ use serde::Deserialize;
use ruff_linter::{line_width::LineLength, RuleSelector}; use ruff_linter::{line_width::LineLength, RuleSelector};
/// Maps a workspace URI to its associated client settings. Used during server initialization. /// Maps a workspace URI to its associated client settings. Used during server initialization.
pub(crate) type WorkspaceSettingsMap = FxHashMap<Url, ClientSettings>; pub(crate) type WorkspaceSettingsMap = FxHashMap<PathBuf, ClientSettings>;
/// Resolved client settings for a specific document. These settings are meant to be /// Resolved client settings for a specific document. These settings are meant to be
/// used directly by the server, and are *not* a 1:1 representation with how the client /// used directly by the server, and are *not* a 1:1 representation with how the client
@ -169,7 +169,12 @@ impl AllSettings {
workspace_settings: workspace_settings.map(|workspace_settings| { workspace_settings: workspace_settings.map(|workspace_settings| {
workspace_settings workspace_settings
.into_iter() .into_iter()
.map(|settings| (settings.workspace, settings.settings)) .map(|settings| {
(
settings.workspace.to_file_path().unwrap(),
settings.settings,
)
})
.collect() .collect()
}), }),
} }
@ -360,7 +365,7 @@ mod tests {
serde_json::from_str(content).expect("test fixture JSON should deserialize") serde_json::from_str(content).expect("test fixture JSON should deserialize")
} }
#[test] #[cfg_attr(not(windows), test)]
fn test_vs_code_init_options_deserialize() { fn test_vs_code_init_options_deserialize() {
let options: InitializationOptions = deserialize_fixture(VS_CODE_INIT_OPTIONS_FIXTURE); let options: InitializationOptions = deserialize_fixture(VS_CODE_INIT_OPTIONS_FIXTURE);
@ -545,19 +550,19 @@ mod tests {
"###); "###);
} }
#[test] #[cfg_attr(not(windows), test)]
fn test_vs_code_workspace_settings_resolve() { fn test_vs_code_workspace_settings_resolve() {
let options = deserialize_fixture(VS_CODE_INIT_OPTIONS_FIXTURE); let options = deserialize_fixture(VS_CODE_INIT_OPTIONS_FIXTURE);
let AllSettings { let AllSettings {
global_settings, global_settings,
workspace_settings, workspace_settings,
} = AllSettings::from_init_options(options); } = AllSettings::from_init_options(options);
let url = Url::parse("file:///Users/test/projects/pandas").expect("url should parse"); let path = PathBuf::from_str("/Users/test/projects/pandas").expect("path should be valid");
let workspace_settings = workspace_settings.expect("workspace settings should exist"); let workspace_settings = workspace_settings.expect("workspace settings should exist");
assert_eq!( assert_eq!(
ResolvedClientSettings::with_workspace( ResolvedClientSettings::with_workspace(
workspace_settings workspace_settings
.get(&url) .get(&path)
.expect("workspace setting should exist"), .expect("workspace setting should exist"),
&global_settings &global_settings
), ),
@ -583,11 +588,11 @@ mod tests {
} }
} }
); );
let url = Url::parse("file:///Users/test/projects/scipy").expect("url should parse"); let path = PathBuf::from_str("/Users/test/projects/scipy").expect("path should be valid");
assert_eq!( assert_eq!(
ResolvedClientSettings::with_workspace( ResolvedClientSettings::with_workspace(
workspace_settings workspace_settings
.get(&url) .get(&path)
.expect("workspace setting should exist"), .expect("workspace setting should exist"),
&global_settings &global_settings
), ),

View file

@ -1,269 +0,0 @@
use anyhow::anyhow;
use lsp_types::Url;
use rustc_hash::FxHashMap;
use std::{
collections::BTreeMap,
ops::Deref,
path::{Path, PathBuf},
sync::Arc,
};
use crate::{edit::DocumentVersion, Document};
use self::ruff_settings::RuffSettingsIndex;
use super::{
settings::{self, ResolvedClientSettings, ResolvedEditorSettings},
ClientSettings,
};
mod ruff_settings;
pub(crate) use ruff_settings::RuffSettings;
#[derive(Default)]
pub(crate) struct Workspaces(BTreeMap<PathBuf, Workspace>);
pub(crate) struct Workspace {
open_documents: OpenDocuments,
settings: ResolvedClientSettings,
}
pub(crate) struct OpenDocuments {
documents: FxHashMap<Url, DocumentController>,
settings_index: ruff_settings::RuffSettingsIndex,
}
/// A mutable handler to an underlying document.
/// Handles copy-on-write mutation automatically when
/// calling `deref_mut`.
pub(crate) struct DocumentController {
document: Arc<Document>,
}
/// A read-only reference to a document.
#[derive(Clone)]
pub(crate) struct DocumentRef {
document: Arc<Document>,
settings: Arc<RuffSettings>,
}
impl Workspaces {
pub(super) fn new(
workspaces: Vec<(Url, ClientSettings)>,
global_settings: &ClientSettings,
) -> crate::Result<Self> {
Ok(Self(
workspaces
.into_iter()
.map(|(url, workspace_settings)| {
Workspace::new(&url, &workspace_settings, global_settings)
})
.collect::<crate::Result<_>>()?,
))
}
pub(super) fn open_workspace_folder(
&mut self,
folder_url: &Url,
global_settings: &ClientSettings,
) -> crate::Result<()> {
// TODO(jane): find a way to allow for workspace settings to be updated dynamically
let (path, workspace) =
Workspace::new(folder_url, &ClientSettings::default(), global_settings)?;
self.0.insert(path, workspace);
Ok(())
}
pub(super) fn close_workspace_folder(&mut self, folder_url: &Url) -> crate::Result<()> {
let path = folder_url
.to_file_path()
.map_err(|()| anyhow!("Folder URI was not a proper file path"))?;
self.0
.remove(&path)
.ok_or_else(|| anyhow!("Tried to remove non-existent folder {}", path.display()))?;
Ok(())
}
pub(super) fn snapshot(&self, document_url: &Url) -> Option<DocumentRef> {
self.workspace_for_url(document_url)?
.open_documents
.snapshot(document_url)
}
pub(super) fn controller(&mut self, document_url: &Url) -> Option<&mut DocumentController> {
self.workspace_for_url_mut(document_url)?
.open_documents
.controller(document_url)
}
pub(super) fn reload_settings(&mut self, changed_url: &Url) -> crate::Result<()> {
let (root, workspace) = self
.entry_for_url_mut(changed_url)
.ok_or_else(|| anyhow!("Workspace not found for {changed_url}"))?;
workspace.reload_settings(root);
Ok(())
}
pub(super) fn open(&mut self, url: &Url, contents: String, version: DocumentVersion) {
if let Some(workspace) = self.workspace_for_url_mut(url) {
workspace.open_documents.open(url, contents, version);
}
}
pub(super) fn close(&mut self, url: &Url) -> crate::Result<()> {
self.workspace_for_url_mut(url)
.ok_or_else(|| anyhow!("Workspace not found for {url}"))?
.open_documents
.close(url)
}
pub(super) fn client_settings(
&self,
url: &Url,
global_settings: &ClientSettings,
) -> settings::ResolvedClientSettings {
self.workspace_for_url(url).map_or_else(
|| {
tracing::warn!(
"Workspace not found for {url}. Global settings will be used for this document"
);
settings::ResolvedClientSettings::global(global_settings)
},
|workspace| workspace.settings.clone(),
)
}
fn workspace_for_url(&self, url: &Url) -> Option<&Workspace> {
Some(self.entry_for_url(url)?.1)
}
fn workspace_for_url_mut(&mut self, url: &Url) -> Option<&mut Workspace> {
Some(self.entry_for_url_mut(url)?.1)
}
fn entry_for_url(&self, url: &Url) -> Option<(&Path, &Workspace)> {
let path = url.to_file_path().ok()?;
self.0
.range(..path)
.next_back()
.map(|(path, workspace)| (path.as_path(), workspace))
}
fn entry_for_url_mut(&mut self, url: &Url) -> Option<(&Path, &mut Workspace)> {
let path = url.to_file_path().ok()?;
self.0
.range_mut(..path)
.next_back()
.map(|(path, workspace)| (path.as_path(), workspace))
}
}
impl Workspace {
pub(crate) fn new(
root: &Url,
workspace_settings: &ClientSettings,
global_settings: &ClientSettings,
) -> crate::Result<(PathBuf, Self)> {
let path = root
.to_file_path()
.map_err(|()| anyhow!("workspace URL was not a file path!"))?;
let settings = ResolvedClientSettings::with_workspace(workspace_settings, global_settings);
let workspace = Self {
open_documents: OpenDocuments::new(&path, settings.editor_settings()),
settings,
};
Ok((path, workspace))
}
fn reload_settings(&mut self, root: &Path) {
self.open_documents
.reload_settings(root, self.settings.editor_settings());
}
}
impl OpenDocuments {
fn new(path: &Path, editor_settings: &ResolvedEditorSettings) -> Self {
Self {
documents: FxHashMap::default(),
settings_index: RuffSettingsIndex::new(path, editor_settings),
}
}
fn snapshot(&self, url: &Url) -> Option<DocumentRef> {
let path = url
.to_file_path()
.expect("document URL should convert to file path: {url}");
let document_settings = self.settings_index.get(&path);
Some(self.documents.get(url)?.make_ref(document_settings))
}
fn controller(&mut self, url: &Url) -> Option<&mut DocumentController> {
self.documents.get_mut(url)
}
fn open(&mut self, url: &Url, contents: String, version: DocumentVersion) {
if self
.documents
.insert(url.clone(), DocumentController::new(contents, version))
.is_some()
{
tracing::warn!("Opening document `{url}` that is already open!");
}
}
fn close(&mut self, url: &Url) -> crate::Result<()> {
let Some(_) = self.documents.remove(url) else {
return Err(anyhow!(
"Tried to close document `{url}`, which was not open"
));
};
Ok(())
}
fn reload_settings(&mut self, root: &Path, editor_settings: &ResolvedEditorSettings) {
self.settings_index = RuffSettingsIndex::new(root, editor_settings);
}
}
impl DocumentController {
fn new(contents: String, version: DocumentVersion) -> Self {
Self {
document: Arc::new(Document::new(contents, version)),
}
}
pub(crate) fn make_ref(&self, document_settings: Arc<RuffSettings>) -> DocumentRef {
DocumentRef {
document: self.document.clone(),
settings: document_settings,
}
}
pub(crate) fn make_mut(&mut self) -> &mut Document {
Arc::make_mut(&mut self.document)
}
}
impl Deref for DocumentController {
type Target = Document;
fn deref(&self) -> &Self::Target {
&self.document
}
}
impl Deref for DocumentRef {
type Target = Document;
fn deref(&self) -> &Self::Target {
&self.document
}
}
impl DocumentRef {
pub(crate) fn settings(&self) -> &RuffSettings {
&self.settings
}
}

View file

@ -1,11 +1,11 @@
const PANDAS_HTML_SRC: &str = include_str!("../resources/test/fixtures/pandas_html.py"); const PANDAS_HTML_SRC: &str = include_str!("../resources/test/fixtures/pandas_html.py");
use lsp_types::{Position, Range, TextDocumentContentChangeEvent}; use lsp_types::{Position, Range, TextDocumentContentChangeEvent};
use ruff_server::{Document, PositionEncoding}; use ruff_server::{PositionEncoding, TextDocument};
#[test] #[test]
fn delete_lines_pandas_html() { fn delete_lines_pandas_html() {
let mut document = Document::new(PANDAS_HTML_SRC.to_string(), 1); let mut document = TextDocument::new(PANDAS_HTML_SRC.to_string(), 1);
let changes = vec![ let changes = vec![
TextDocumentContentChangeEvent { TextDocumentContentChangeEvent {