mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:10:09 +00:00
Fix clippy::trivially-copy-pass-by-ref (pedantic) (#862)
This commit is contained in:
parent
6b9e57fb78
commit
58949b564e
18 changed files with 53 additions and 53 deletions
|
@ -253,7 +253,7 @@ pub fn to_module_and_member(target: &str) -> (&str, &str) {
|
||||||
|
|
||||||
/// Convert a location within a file (relative to `base`) to an absolute
|
/// Convert a location within a file (relative to `base`) to an absolute
|
||||||
/// position.
|
/// position.
|
||||||
pub fn to_absolute(relative: &Location, base: &Location) -> Location {
|
pub fn to_absolute(relative: Location, base: Location) -> Location {
|
||||||
if relative.row() == 1 {
|
if relative.row() == 1 {
|
||||||
Location::new(
|
Location::new(
|
||||||
relative.row() + base.row() - 1,
|
relative.row() + base.row() - 1,
|
||||||
|
|
|
@ -94,7 +94,7 @@ fn apply_fixes<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the remaining content.
|
// Add the remaining content.
|
||||||
let slice = locator.slice_source_code_at(&last_pos);
|
let slice = locator.slice_source_code_at(last_pos);
|
||||||
output.append(&slice);
|
output.append(&slice);
|
||||||
|
|
||||||
Cow::from(output.finish())
|
Cow::from(output.finish())
|
||||||
|
|
|
@ -109,7 +109,7 @@ pub fn init() -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_sync(key: &u64, value: &[u8]) -> Result<(), std::io::Error> {
|
fn write_sync(key: u64, value: &[u8]) -> Result<(), std::io::Error> {
|
||||||
fs::write(
|
fs::write(
|
||||||
Path::new(cache_dir())
|
Path::new(cache_dir())
|
||||||
.join(content_dir())
|
.join(content_dir())
|
||||||
|
@ -118,7 +118,7 @@ fn write_sync(key: &u64, value: &[u8]) -> Result<(), std::io::Error> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_sync(key: &u64) -> Result<Vec<u8>, std::io::Error> {
|
fn read_sync(key: u64) -> Result<Vec<u8>, std::io::Error> {
|
||||||
fs::read(
|
fs::read(
|
||||||
Path::new(cache_dir())
|
Path::new(cache_dir())
|
||||||
.join(content_dir())
|
.join(content_dir())
|
||||||
|
@ -138,7 +138,7 @@ pub fn get(
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Ok(encoded) = read_sync(&cache_key(path, settings, autofix)) {
|
if let Ok(encoded) = read_sync(cache_key(path, settings, autofix)) {
|
||||||
match bincode::deserialize::<CheckResult>(&encoded[..]) {
|
match bincode::deserialize::<CheckResult>(&encoded[..]) {
|
||||||
Ok(CheckResult {
|
Ok(CheckResult {
|
||||||
metadata: CacheMetadata { mtime },
|
metadata: CacheMetadata { mtime },
|
||||||
|
@ -174,7 +174,7 @@ pub fn set(
|
||||||
messages,
|
messages,
|
||||||
};
|
};
|
||||||
if let Err(e) = write_sync(
|
if let Err(e) = write_sync(
|
||||||
&cache_key(path, settings, autofix),
|
cache_key(path, settings, autofix),
|
||||||
&bincode::serialize(&check_result).unwrap(),
|
&bincode::serialize(&check_result).unwrap(),
|
||||||
) {
|
) {
|
||||||
error!("Failed to write to cache: {e:?}")
|
error!("Failed to write to cache: {e:?}")
|
||||||
|
|
|
@ -26,7 +26,7 @@ pub fn check_tokens(
|
||||||
let enforce_invalid_escape_sequence = settings.enabled.contains(&CheckCode::W605);
|
let enforce_invalid_escape_sequence = settings.enabled.contains(&CheckCode::W605);
|
||||||
|
|
||||||
let mut state_machine: StateMachine = Default::default();
|
let mut state_machine: StateMachine = Default::default();
|
||||||
for (start, tok, end) in tokens.iter().flatten() {
|
for &(start, ref tok, end) in tokens.iter().flatten() {
|
||||||
let is_docstring = if enforce_ambiguous_unicode_character || enforce_quotes {
|
let is_docstring = if enforce_ambiguous_unicode_character || enforce_quotes {
|
||||||
state_machine.consume(tok)
|
state_machine.consume(tok)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub struct Directives {
|
||||||
pub fn extract_directives(
|
pub fn extract_directives(
|
||||||
lxr: &[LexResult],
|
lxr: &[LexResult],
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
flags: &Flags,
|
flags: Flags,
|
||||||
) -> Directives {
|
) -> Directives {
|
||||||
Directives {
|
Directives {
|
||||||
noqa_line_for: if flags.contains(Flags::NOQA) {
|
noqa_line_for: if flags.contains(Flags::NOQA) {
|
||||||
|
@ -75,13 +75,13 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
|
||||||
/// Extract a set of lines over which to disable isort.
|
/// Extract a set of lines over which to disable isort.
|
||||||
pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator) -> IntSet<usize> {
|
pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator) -> IntSet<usize> {
|
||||||
let mut exclusions: IntSet<usize> = IntSet::default();
|
let mut exclusions: IntSet<usize> = IntSet::default();
|
||||||
let mut off: Option<&Location> = None;
|
let mut off: Option<Location> = None;
|
||||||
for (start, tok, end) in lxr.iter().flatten() {
|
for &(start, ref tok, end) in lxr.iter().flatten() {
|
||||||
// TODO(charlie): Modify RustPython to include the comment text in the token.
|
// TODO(charlie): Modify RustPython to include the comment text in the token.
|
||||||
if matches!(tok, Tok::Comment) {
|
if matches!(tok, Tok::Comment) {
|
||||||
let comment_text = locator.slice_source_code_range(&Range {
|
let comment_text = locator.slice_source_code_range(&Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
});
|
});
|
||||||
if off.is_some() {
|
if off.is_some() {
|
||||||
if comment_text == "# isort: on" {
|
if comment_text == "# isort: on" {
|
||||||
|
|
|
@ -26,7 +26,7 @@ fn match_tuple_range<T>(located: &Located<T>, locator: &SourceCodeLocator) -> Re
|
||||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||||
if matches!(tok, Tok::Lpar) {
|
if matches!(tok, Tok::Lpar) {
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
location = Some(helpers::to_absolute(&start, &range.location));
|
location = Some(helpers::to_absolute(start, range.location));
|
||||||
}
|
}
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
|
@ -34,7 +34,7 @@ fn match_tuple_range<T>(located: &Located<T>, locator: &SourceCodeLocator) -> Re
|
||||||
if matches!(tok, Tok::Rpar) {
|
if matches!(tok, Tok::Rpar) {
|
||||||
count -= 1;
|
count -= 1;
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
end_location = Some(helpers::to_absolute(&end, &range.location));
|
end_location = Some(helpers::to_absolute(end, range.location));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,14 +42,14 @@ fn good_docstring(quote: &Quote) -> &str {
|
||||||
|
|
||||||
pub fn quotes(
|
pub fn quotes(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
start: &Location,
|
start: Location,
|
||||||
end: &Location,
|
end: Location,
|
||||||
is_docstring: bool,
|
is_docstring: bool,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
) -> Option<Check> {
|
) -> Option<Check> {
|
||||||
let text = locator.slice_source_code_range(&Range {
|
let text = locator.slice_source_code_range(&Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Remove any prefixes (e.g., remove `u` from `u"foo"`).
|
// Remove any prefixes (e.g., remove `u` from `u"foo"`).
|
||||||
|
@ -77,8 +77,8 @@ pub fn quotes(
|
||||||
return Some(Check::new(
|
return Some(Check::new(
|
||||||
CheckKind::BadQuotesDocstring(settings.docstring_quotes.clone()),
|
CheckKind::BadQuotesDocstring(settings.docstring_quotes.clone()),
|
||||||
Range {
|
Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
} else if is_multiline {
|
} else if is_multiline {
|
||||||
|
@ -95,8 +95,8 @@ pub fn quotes(
|
||||||
return Some(Check::new(
|
return Some(Check::new(
|
||||||
CheckKind::BadQuotesMultilineString(settings.multiline_quotes.clone()),
|
CheckKind::BadQuotesMultilineString(settings.multiline_quotes.clone()),
|
||||||
Range {
|
Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
|
@ -113,8 +113,8 @@ pub fn quotes(
|
||||||
return Some(Check::new(
|
return Some(Check::new(
|
||||||
CheckKind::AvoidQuoteEscape,
|
CheckKind::AvoidQuoteEscape,
|
||||||
Range {
|
Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -126,8 +126,8 @@ pub fn quotes(
|
||||||
return Some(Check::new(
|
return Some(Check::new(
|
||||||
CheckKind::BadQuotesInlineString(settings.inline_quotes.clone()),
|
CheckKind::BadQuotesInlineString(settings.inline_quotes.clone()),
|
||||||
Range {
|
Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,8 +22,8 @@ pub fn collect_comments<'a>(range: &Range, locator: &'a SourceCodeLocator) -> Ve
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter_map(|(start, tok, end)| {
|
.filter_map(|(start, tok, end)| {
|
||||||
if matches!(tok, Tok::Comment) {
|
if matches!(tok, Tok::Comment) {
|
||||||
let start = helpers::to_absolute(&start, &range.location);
|
let start = helpers::to_absolute(start, range.location);
|
||||||
let end = helpers::to_absolute(&end, &range.location);
|
let end = helpers::to_absolute(end, range.location);
|
||||||
Some(Comment {
|
Some(Comment {
|
||||||
value: locator.slice_source_code_range(&Range {
|
value: locator.slice_source_code_range(&Range {
|
||||||
location: start,
|
location: start,
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub fn format_import_from(
|
||||||
import_from: &ImportFromData,
|
import_from: &ImportFromData,
|
||||||
comments: &CommentSet,
|
comments: &CommentSet,
|
||||||
aliases: &[(AliasData, CommentSet)],
|
aliases: &[(AliasData, CommentSet)],
|
||||||
line_length: &usize,
|
line_length: usize,
|
||||||
is_first: bool,
|
is_first: bool,
|
||||||
) -> String {
|
) -> String {
|
||||||
// We can only inline if: (1) none of the aliases have atop comments, and (3)
|
// We can only inline if: (1) none of the aliases have atop comments, and (3)
|
||||||
|
@ -54,7 +54,7 @@ pub fn format_import_from(
|
||||||
{
|
{
|
||||||
let (single_line, import_length) =
|
let (single_line, import_length) =
|
||||||
format_single_line(import_from, comments, aliases, is_first);
|
format_single_line(import_from, comments, aliases, is_first);
|
||||||
if import_length <= *line_length {
|
if import_length <= line_length {
|
||||||
return single_line;
|
return single_line;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -400,7 +400,7 @@ fn sort_imports(block: ImportBlock) -> OrderedImportBlock {
|
||||||
pub fn format_imports(
|
pub fn format_imports(
|
||||||
block: &[&Stmt],
|
block: &[&Stmt],
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
line_length: &usize,
|
line_length: usize,
|
||||||
src: &[PathBuf],
|
src: &[PathBuf],
|
||||||
known_first_party: &BTreeSet<String>,
|
known_first_party: &BTreeSet<String>,
|
||||||
known_third_party: &BTreeSet<String>,
|
known_third_party: &BTreeSet<String>,
|
||||||
|
|
|
@ -55,7 +55,7 @@ pub fn check_imports(
|
||||||
let expected = format_imports(
|
let expected = format_imports(
|
||||||
&body,
|
&body,
|
||||||
comments,
|
comments,
|
||||||
&(settings.line_length - indentation.len()),
|
settings.line_length - indentation.len(),
|
||||||
&settings.src,
|
&settings.src,
|
||||||
&settings.isort.known_first_party,
|
&settings.isort.known_first_party,
|
||||||
&settings.isort.known_third_party,
|
&settings.isort.known_third_party,
|
||||||
|
|
|
@ -89,7 +89,7 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
|
||||||
let directives = directives::extract_directives(
|
let directives = directives::extract_directives(
|
||||||
&tokens,
|
&tokens,
|
||||||
&locator,
|
&locator,
|
||||||
&directives::Flags::from_settings(&settings),
|
directives::Flags::from_settings(&settings),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Generate checks.
|
// Generate checks.
|
||||||
|
|
|
@ -151,7 +151,7 @@ pub fn lint_stdin(
|
||||||
let directives = directives::extract_directives(
|
let directives = directives::extract_directives(
|
||||||
&tokens,
|
&tokens,
|
||||||
&locator,
|
&locator,
|
||||||
&directives::Flags::from_settings(settings),
|
directives::Flags::from_settings(settings),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Generate checks.
|
// Generate checks.
|
||||||
|
@ -215,7 +215,7 @@ pub fn lint_path(
|
||||||
let directives = directives::extract_directives(
|
let directives = directives::extract_directives(
|
||||||
&tokens,
|
&tokens,
|
||||||
&locator,
|
&locator,
|
||||||
&directives::Flags::from_settings(settings),
|
directives::Flags::from_settings(settings),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Generate checks.
|
// Generate checks.
|
||||||
|
@ -269,7 +269,7 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
|
||||||
let directives = directives::extract_directives(
|
let directives = directives::extract_directives(
|
||||||
&tokens,
|
&tokens,
|
||||||
&locator,
|
&locator,
|
||||||
&directives::Flags::from_settings(settings),
|
directives::Flags::from_settings(settings),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Generate checks.
|
// Generate checks.
|
||||||
|
@ -310,7 +310,7 @@ pub fn test_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Res
|
||||||
let directives = directives::extract_directives(
|
let directives = directives::extract_directives(
|
||||||
&tokens,
|
&tokens,
|
||||||
&locator,
|
&locator,
|
||||||
&directives::Flags::from_settings(settings),
|
directives::Flags::from_settings(settings),
|
||||||
);
|
);
|
||||||
check_path(
|
check_path(
|
||||||
path,
|
path,
|
||||||
|
|
|
@ -103,14 +103,14 @@ fn extract_quote(text: &str) -> &str {
|
||||||
/// W605
|
/// W605
|
||||||
pub fn invalid_escape_sequence(
|
pub fn invalid_escape_sequence(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
start: &Location,
|
start: Location,
|
||||||
end: &Location,
|
end: Location,
|
||||||
) -> Vec<Check> {
|
) -> Vec<Check> {
|
||||||
let mut checks = vec![];
|
let mut checks = vec![];
|
||||||
|
|
||||||
let text = locator.slice_source_code_range(&Range {
|
let text = locator.slice_source_code_range(&Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Determine whether the string is single- or triple-quoted.
|
// Determine whether the string is single- or triple-quoted.
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::source_code_locator::SourceCodeLocator;
|
||||||
/// Generate a fix to remove a base from a ClassDef statement.
|
/// Generate a fix to remove a base from a ClassDef statement.
|
||||||
pub fn remove_class_def_base(
|
pub fn remove_class_def_base(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
stmt_at: &Location,
|
stmt_at: Location,
|
||||||
expr_at: Location,
|
expr_at: Location,
|
||||||
bases: &[Expr],
|
bases: &[Expr],
|
||||||
keywords: &[Keyword],
|
keywords: &[Keyword],
|
||||||
|
@ -28,7 +28,7 @@ pub fn remove_class_def_base(
|
||||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||||
if matches!(tok, Tok::Lpar) {
|
if matches!(tok, Tok::Lpar) {
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
fix_start = Some(helpers::to_absolute(&start, stmt_at));
|
fix_start = Some(helpers::to_absolute(start, stmt_at));
|
||||||
}
|
}
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,7 @@ pub fn remove_class_def_base(
|
||||||
if matches!(tok, Tok::Rpar) {
|
if matches!(tok, Tok::Rpar) {
|
||||||
count -= 1;
|
count -= 1;
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
fix_end = Some(helpers::to_absolute(&end, stmt_at));
|
fix_end = Some(helpers::to_absolute(end, stmt_at));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ pub fn remove_class_def_base(
|
||||||
let mut fix_end: Option<Location> = None;
|
let mut fix_end: Option<Location> = None;
|
||||||
let mut seen_comma = false;
|
let mut seen_comma = false;
|
||||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||||
let start = helpers::to_absolute(&start, stmt_at);
|
let start = helpers::to_absolute(start, stmt_at);
|
||||||
if seen_comma {
|
if seen_comma {
|
||||||
if matches!(tok, Tok::Newline) {
|
if matches!(tok, Tok::Newline) {
|
||||||
fix_end = Some(end);
|
fix_end = Some(end);
|
||||||
|
@ -86,8 +86,8 @@ pub fn remove_class_def_base(
|
||||||
let mut fix_start: Option<Location> = None;
|
let mut fix_start: Option<Location> = None;
|
||||||
let mut fix_end: Option<Location> = None;
|
let mut fix_end: Option<Location> = None;
|
||||||
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||||
let start = helpers::to_absolute(&start, stmt_at);
|
let start = helpers::to_absolute(start, stmt_at);
|
||||||
let end = helpers::to_absolute(&end, stmt_at);
|
let end = helpers::to_absolute(end, stmt_at);
|
||||||
if start == expr_at {
|
if start == expr_at {
|
||||||
fix_end = Some(end);
|
fix_end = Some(end);
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -17,7 +17,7 @@ pub fn useless_object_inheritance(
|
||||||
if checker.patch(check.kind.code()) {
|
if checker.patch(check.kind.code()) {
|
||||||
if let Some(fix) = pyupgrade::fixes::remove_class_def_base(
|
if let Some(fix) = pyupgrade::fixes::remove_class_def_base(
|
||||||
checker.locator,
|
checker.locator,
|
||||||
&stmt.location,
|
stmt.location,
|
||||||
check.location,
|
check.location,
|
||||||
bases,
|
bases,
|
||||||
keywords,
|
keywords,
|
||||||
|
|
|
@ -1603,8 +1603,8 @@ pub enum Context {
|
||||||
|
|
||||||
pub fn ambiguous_unicode_character(
|
pub fn ambiguous_unicode_character(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
start: &Location,
|
start: Location,
|
||||||
end: &Location,
|
end: Location,
|
||||||
context: Context,
|
context: Context,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
autofix: &fixer::Mode,
|
autofix: &fixer::Mode,
|
||||||
|
@ -1612,8 +1612,8 @@ pub fn ambiguous_unicode_character(
|
||||||
let mut checks = vec![];
|
let mut checks = vec![];
|
||||||
|
|
||||||
let text = locator.slice_source_code_range(&Range {
|
let text = locator.slice_source_code_range(&Range {
|
||||||
location: *start,
|
location: start,
|
||||||
end_location: *end,
|
end_location: end,
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut col_offset = 0;
|
let mut col_offset = 0;
|
||||||
|
|
|
@ -25,7 +25,7 @@ impl<'a> SourceCodeLocator<'a> {
|
||||||
self.rope.get_or_init(|| Rope::from_str(self.contents))
|
self.rope.get_or_init(|| Rope::from_str(self.contents))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn slice_source_code_at(&self, location: &Location) -> Cow<'_, str> {
|
pub fn slice_source_code_at(&self, location: Location) -> Cow<'_, str> {
|
||||||
let rope = self.get_or_init_rope();
|
let rope = self.get_or_init_rope();
|
||||||
let offset = rope.line_to_char(location.row() - 1) + location.column();
|
let offset = rope.line_to_char(location.row() - 1) + location.column();
|
||||||
Cow::from(rope.slice(offset..))
|
Cow::from(rope.slice(offset..))
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue