mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-04 18:58:04 +00:00
render json, except for notebooks
This commit is contained in:
parent
69176672d2
commit
0021a7e0f1
6 changed files with 260 additions and 198 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -2865,6 +2865,7 @@ dependencies = [
|
|||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
|
|
|
@ -36,7 +36,8 @@ path-slash = { workspace = true }
|
|||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
|
@ -55,6 +56,6 @@ tempfile = { workspace = true }
|
|||
[features]
|
||||
cache = ["ruff_cache"]
|
||||
os = ["ignore", "dep:etcetera"]
|
||||
serde = ["dep:serde", "camino/serde1"]
|
||||
serde = ["camino/serde1"]
|
||||
# Exposes testing utilities.
|
||||
testing = ["tracing-subscriber"]
|
||||
|
|
|
@ -6,7 +6,7 @@ use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
|||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
pub use self::render::{DisplayDiagnostic, FileResolver, Input};
|
||||
pub use self::render::{DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input};
|
||||
use crate::{Db, files::File};
|
||||
|
||||
mod render;
|
||||
|
@ -1236,6 +1236,8 @@ pub enum DiagnosticFormat {
|
|||
///
|
||||
/// [Azure Pipelines]: https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning
|
||||
Azure,
|
||||
/// Print diagnostics in JSON format.
|
||||
Json,
|
||||
}
|
||||
|
||||
impl DiagnosticFormat {
|
||||
|
@ -1244,7 +1246,7 @@ impl DiagnosticFormat {
|
|||
/// This excludes structured formats like JSON and indicates that summary messages like "All
|
||||
/// checks passed!" should be suppressed.
|
||||
pub fn is_human_readable(self) -> bool {
|
||||
!matches!(self, Self::Azure)
|
||||
!matches!(self, Self::Azure | Self::Json | Self::JsonLines)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use json::{diagnostics_to_json_value, message_to_json_value};
|
||||
use ruff_annotate_snippets::{
|
||||
Annotation as AnnotateAnnotation, Level as AnnotateLevel, Message as AnnotateMessage,
|
||||
Renderer as AnnotateRenderer, Snippet as AnnotateSnippet,
|
||||
|
@ -20,6 +21,165 @@ use super::{
|
|||
SubDiagnostic,
|
||||
};
|
||||
|
||||
mod json {
|
||||
use serde::{Serialize, Serializer, ser::SerializeSeq};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::diagnostic::Diagnostic;
|
||||
|
||||
use super::FileResolver;
|
||||
|
||||
pub(super) fn diagnostics_to_json_value(
|
||||
diagnostics: &[Diagnostic],
|
||||
resolver: &dyn FileResolver,
|
||||
) -> Value {
|
||||
let messages: Vec<_> = diagnostics
|
||||
.iter()
|
||||
.filter_map(|diag| message_to_json_value(diag, resolver))
|
||||
.collect();
|
||||
json!(messages)
|
||||
}
|
||||
|
||||
pub(super) fn message_to_json_value(
|
||||
message: &Diagnostic,
|
||||
resolver: &dyn FileResolver,
|
||||
) -> Option<Value> {
|
||||
let span = message.primary_span()?;
|
||||
let filename = span.file().path(resolver);
|
||||
let range = span.range()?;
|
||||
let diagnostic_source = span.file().diagnostic_source(resolver);
|
||||
let source_code = diagnostic_source.as_source_code();
|
||||
// Input can be a notebook for ty, but we don't have a good way of retrieving the notebook
|
||||
// index for Ruff. we might just need to pass it in
|
||||
let notebook_index = None; // TODO
|
||||
|
||||
let fix = message.fix().map(|fix| {
|
||||
json!({
|
||||
"applicability": fix.applicability(),
|
||||
"message": message.suggestion(),
|
||||
"edits": &ExpandedEdits { edits: fix.edits(), source_code: &source_code, notebook_index },
|
||||
})
|
||||
});
|
||||
|
||||
let mut start_location = source_code.line_column(range.start());
|
||||
let mut end_location = source_code.line_column(range.end());
|
||||
let mut noqa_location = message
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
let mut notebook_cell_index = None;
|
||||
|
||||
if let Some(notebook_index) = notebook_index {
|
||||
notebook_cell_index = Some(
|
||||
notebook_index
|
||||
.cell(start_location.line)
|
||||
.unwrap_or(OneIndexed::MIN),
|
||||
);
|
||||
start_location = notebook_index.translate_line_column(&start_location);
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
noqa_location =
|
||||
noqa_location.map(|location| notebook_index.translate_line_column(&location));
|
||||
}
|
||||
|
||||
Some(json!({
|
||||
"code": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
"message": message.body(),
|
||||
"fix": fix,
|
||||
"cell": notebook_cell_index,
|
||||
"location": location_to_json(start_location),
|
||||
"end_location": location_to_json(end_location),
|
||||
"filename": filename,
|
||||
"noqa_row": noqa_location.map(|location| location.line)
|
||||
}))
|
||||
}
|
||||
|
||||
fn location_to_json(location: LineColumn) -> serde_json::Value {
|
||||
json!({
|
||||
"row": location.line,
|
||||
"column": location.column
|
||||
})
|
||||
}
|
||||
|
||||
struct ExpandedEdits<'a> {
|
||||
edits: &'a [Edit],
|
||||
source_code: &'a SourceCode<'a, 'a>,
|
||||
notebook_index: Option<&'a NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedEdits<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
|
||||
|
||||
for edit in self.edits {
|
||||
let mut location = self.source_code.line_column(edit.start());
|
||||
let mut end_location = self.source_code.line_column(edit.end());
|
||||
|
||||
if let Some(notebook_index) = self.notebook_index {
|
||||
// There exists a newline between each cell's source code in the
|
||||
// concatenated source code in Ruff. This newline doesn't actually
|
||||
// exists in the JSON source field.
|
||||
//
|
||||
// Now, certain edits may try to remove this newline, which means
|
||||
// the edit will spill over to the first character of the next cell.
|
||||
// If it does, we need to translate the end location to the last
|
||||
// character of the previous cell.
|
||||
match (
|
||||
notebook_index.cell(location.line),
|
||||
notebook_index.cell(end_location.line),
|
||||
) {
|
||||
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: self
|
||||
.source_code
|
||||
.line_column(self.source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
(Some(_), None) => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: self
|
||||
.source_code
|
||||
.line_column(self.source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
}
|
||||
}
|
||||
location = notebook_index.translate_line_column(&location);
|
||||
}
|
||||
|
||||
let value = json!({
|
||||
"content": edit.content().unwrap_or_default(),
|
||||
"location": location_to_json(location),
|
||||
"end_location": location_to_json(end_location)
|
||||
});
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A type that implements `std::fmt::Display` for diagnostic rendering.
|
||||
///
|
||||
/// It is created via [`Diagnostic::display`].
|
||||
|
@ -59,6 +219,52 @@ impl<'a> DisplayDiagnostic<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// A type that implements `std::fmt::Display` for rendering a collection of diagnostics.
|
||||
///
|
||||
/// It intended for collections of diagnostics that need to be serialized together, as is the case
|
||||
/// for JSON, for example.
|
||||
///
|
||||
/// See [`DisplayDiagnostic`] for rendering individual `Diagnostic`s and details about the lifetime
|
||||
/// constraints.
|
||||
pub struct DisplayDiagnostics<'a> {
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
resolver: &'a dyn FileResolver,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
}
|
||||
|
||||
impl<'a> DisplayDiagnostics<'a> {
|
||||
pub fn new(
|
||||
resolver: &'a dyn FileResolver,
|
||||
config: &'a DisplayDiagnosticConfig,
|
||||
diagnostics: &'a [Diagnostic],
|
||||
) -> DisplayDiagnostics<'a> {
|
||||
DisplayDiagnostics {
|
||||
config,
|
||||
resolver,
|
||||
diagnostics,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayDiagnostics<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self.config.format {
|
||||
DiagnosticFormat::Concise | DiagnosticFormat::Azure | DiagnosticFormat::Full => {
|
||||
for diag in self.diagnostics {
|
||||
write!(f, "{}", diag.display(self.resolver, self.config))?;
|
||||
}
|
||||
}
|
||||
DiagnosticFormat::Json => write!(
|
||||
f,
|
||||
"{:#}",
|
||||
diagnostics_to_json_value(self.diagnostics, self.resolver)
|
||||
)?,
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayDiagnostic<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
let stylesheet = if self.config.color {
|
||||
|
@ -102,7 +308,7 @@ impl std::fmt::Display for DisplayDiagnostic<'_> {
|
|||
}
|
||||
write!(f, ":")?;
|
||||
}
|
||||
return writeln!(f, " {message}", message = self.diag.concise_message());
|
||||
writeln!(f, " {message}", message = self.diag.concise_message())?;
|
||||
}
|
||||
DiagnosticFormat::Azure => {
|
||||
let severity = match self.diag.severity() {
|
||||
|
@ -127,7 +333,7 @@ impl std::fmt::Display for DisplayDiagnostic<'_> {
|
|||
)?;
|
||||
}
|
||||
}
|
||||
return writeln!(
|
||||
writeln!(
|
||||
f,
|
||||
"{code}]{body}",
|
||||
code = self
|
||||
|
@ -135,28 +341,35 @@ impl std::fmt::Display for DisplayDiagnostic<'_> {
|
|||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = self.diag.body(),
|
||||
);
|
||||
)?;
|
||||
}
|
||||
DiagnosticFormat::Json => {
|
||||
if let Some(value) = message_to_json_value(self.diag, self.resolver) {
|
||||
writeln!(f, "{value}")?;
|
||||
}
|
||||
}
|
||||
DiagnosticFormat::Full => {
|
||||
let mut renderer = self.annotate_renderer.clone();
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
let resolved = Resolved::new(self.resolver, self.diag);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let mut renderer = self.annotate_renderer.clone();
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
let resolved = Resolved::new(self.resolver, self.diag);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,16 +1,10 @@
|
|||
use std::io::Write;
|
||||
|
||||
use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::{Value, json};
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, DisplayDiagnostics,
|
||||
};
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{DummyFileResolver, Emitter, EmitterContext};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct JsonEmitter;
|
||||
|
@ -20,165 +14,15 @@ impl Emitter for JsonEmitter {
|
|||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
_context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
serde_json::to_writer_pretty(
|
||||
let resolver = DummyFileResolver;
|
||||
let config = DisplayDiagnosticConfig::default().format(DiagnosticFormat::Json);
|
||||
write!(
|
||||
writer,
|
||||
&ExpandedMessages {
|
||||
diagnostics,
|
||||
context,
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct ExpandedMessages<'a> {
|
||||
diagnostics: &'a [Diagnostic],
|
||||
context: &'a EmitterContext<'a>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedMessages<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.diagnostics.len()))?;
|
||||
|
||||
for message in self.diagnostics {
|
||||
let value = message_to_json_value(message, self.context);
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn message_to_json_value(message: &Diagnostic, context: &EmitterContext) -> Value {
|
||||
let source_file = message.expect_ruff_source_file();
|
||||
let source_code = source_file.to_source_code();
|
||||
let filename = message.expect_ruff_filename();
|
||||
let notebook_index = context.notebook_index(&filename);
|
||||
|
||||
let fix = message.fix().map(|fix| {
|
||||
json!({
|
||||
"applicability": fix.applicability(),
|
||||
"message": message.suggestion(),
|
||||
"edits": &ExpandedEdits { edits: fix.edits(), source_code: &source_code, notebook_index },
|
||||
})
|
||||
});
|
||||
|
||||
let mut start_location = source_code.line_column(message.expect_range().start());
|
||||
let mut end_location = source_code.line_column(message.expect_range().end());
|
||||
let mut noqa_location = message
|
||||
.noqa_offset()
|
||||
.map(|offset| source_code.line_column(offset));
|
||||
let mut notebook_cell_index = None;
|
||||
|
||||
if let Some(notebook_index) = notebook_index {
|
||||
notebook_cell_index = Some(
|
||||
notebook_index
|
||||
.cell(start_location.line)
|
||||
.unwrap_or(OneIndexed::MIN),
|
||||
);
|
||||
start_location = notebook_index.translate_line_column(&start_location);
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
noqa_location =
|
||||
noqa_location.map(|location| notebook_index.translate_line_column(&location));
|
||||
}
|
||||
|
||||
json!({
|
||||
"code": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
"message": message.body(),
|
||||
"fix": fix,
|
||||
"cell": notebook_cell_index,
|
||||
"location": location_to_json(start_location),
|
||||
"end_location": location_to_json(end_location),
|
||||
"filename": filename,
|
||||
"noqa_row": noqa_location.map(|location| location.line)
|
||||
})
|
||||
}
|
||||
|
||||
fn location_to_json(location: LineColumn) -> serde_json::Value {
|
||||
json!({
|
||||
"row": location.line,
|
||||
"column": location.column
|
||||
})
|
||||
}
|
||||
|
||||
struct ExpandedEdits<'a> {
|
||||
edits: &'a [Edit],
|
||||
source_code: &'a SourceCode<'a, 'a>,
|
||||
notebook_index: Option<&'a NotebookIndex>,
|
||||
}
|
||||
|
||||
impl Serialize for ExpandedEdits<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut s = serializer.serialize_seq(Some(self.edits.len()))?;
|
||||
|
||||
for edit in self.edits {
|
||||
let mut location = self.source_code.line_column(edit.start());
|
||||
let mut end_location = self.source_code.line_column(edit.end());
|
||||
|
||||
if let Some(notebook_index) = self.notebook_index {
|
||||
// There exists a newline between each cell's source code in the
|
||||
// concatenated source code in Ruff. This newline doesn't actually
|
||||
// exists in the JSON source field.
|
||||
//
|
||||
// Now, certain edits may try to remove this newline, which means
|
||||
// the edit will spill over to the first character of the next cell.
|
||||
// If it does, we need to translate the end location to the last
|
||||
// character of the previous cell.
|
||||
match (
|
||||
notebook_index.cell(location.line),
|
||||
notebook_index.cell(end_location.line),
|
||||
) {
|
||||
(Some(start_cell), Some(end_cell)) if start_cell != end_cell => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: self
|
||||
.source_code
|
||||
.line_column(self.source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
(Some(_), None) => {
|
||||
debug_assert_eq!(end_location.column.get(), 1);
|
||||
|
||||
let prev_row = end_location.line.saturating_sub(1);
|
||||
end_location = LineColumn {
|
||||
line: notebook_index.cell_row(prev_row).unwrap_or(OneIndexed::MIN),
|
||||
column: self
|
||||
.source_code
|
||||
.line_column(self.source_code.line_end_exclusive(prev_row))
|
||||
.column,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
end_location = notebook_index.translate_line_column(&end_location);
|
||||
}
|
||||
}
|
||||
location = notebook_index.translate_line_column(&location);
|
||||
}
|
||||
|
||||
let value = json!({
|
||||
"content": edit.content().unwrap_or_default(),
|
||||
"location": location_to_json(location),
|
||||
"end_location": location_to_json(end_location)
|
||||
});
|
||||
|
||||
s.serialize_element(&value)?;
|
||||
}
|
||||
|
||||
s.end()
|
||||
"{}",
|
||||
DisplayDiagnostics::new(&resolver, &config, diagnostics)
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
use std::io::Write;
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig};
|
||||
|
||||
use crate::message::json::message_to_json_value;
|
||||
use crate::message::{Emitter, EmitterContext};
|
||||
use crate::message::{DummyFileResolver, Emitter, EmitterContext};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct JsonLinesEmitter;
|
||||
|
@ -13,12 +12,14 @@ impl Emitter for JsonLinesEmitter {
|
|||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
diagnostics: &[Diagnostic],
|
||||
context: &EmitterContext,
|
||||
_context: &EmitterContext,
|
||||
) -> anyhow::Result<()> {
|
||||
let resolver = DummyFileResolver;
|
||||
let config = DisplayDiagnosticConfig::default().format(DiagnosticFormat::Json);
|
||||
for diagnostic in diagnostics {
|
||||
serde_json::to_writer(&mut *writer, &message_to_json_value(diagnostic, context))?;
|
||||
writer.write_all(b"\n")?;
|
||||
write!(writer, "{}", diagnostic.display(&resolver, &config))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue