mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-03 18:28:56 +00:00
Upgrade to Rust 1.82 toolchain (#13808)
This commit is contained in:
parent
4ecfe95295
commit
ff72055558
11 changed files with 54 additions and 50 deletions
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
|
@ -193,7 +193,7 @@ jobs:
|
||||||
run: rustup target add wasm32-unknown-unknown
|
run: rustup target add wasm32-unknown-unknown
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: playground/package-lock.json
|
cache-dependency-path: playground/package-lock.json
|
||||||
- uses: jetli/wasm-pack-action@v0.4.0
|
- uses: jetli/wasm-pack-action@v0.4.0
|
||||||
|
|
2
.github/workflows/publish-playground.yml
vendored
2
.github/workflows/publish-playground.yml
vendored
|
@ -29,7 +29,7 @@ jobs:
|
||||||
run: rustup target add wasm32-unknown-unknown
|
run: rustup target add wasm32-unknown-unknown
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
cache: "npm"
|
cache: "npm"
|
||||||
cache-dependency-path: playground/package-lock.json
|
cache-dependency-path: playground/package-lock.json
|
||||||
- uses: jetli/wasm-pack-action@v0.4.0
|
- uses: jetli/wasm-pack-action@v0.4.0
|
||||||
|
|
2
.github/workflows/publish-wasm.yml
vendored
2
.github/workflows/publish-wasm.yml
vendored
|
@ -43,7 +43,7 @@ jobs:
|
||||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 18
|
node-version: 20
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
- name: "Publish (dry-run)"
|
- name: "Publish (dry-run)"
|
||||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
//! Scheduling, I/O, and API endpoints.
|
//! Scheduling, I/O, and API endpoints.
|
||||||
|
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
#[allow(deprecated)]
|
||||||
use std::panic::PanicInfo;
|
use std::panic::PanicInfo;
|
||||||
|
|
||||||
use lsp_server::Message;
|
use lsp_server::Message;
|
||||||
|
@ -119,6 +120,7 @@ impl Server {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn run(self) -> crate::Result<()> {
|
pub(crate) fn run(self) -> crate::Result<()> {
|
||||||
|
#[allow(deprecated)]
|
||||||
type PanicHook = Box<dyn Fn(&PanicInfo<'_>) + 'static + Sync + Send>;
|
type PanicHook = Box<dyn Fn(&PanicInfo<'_>) + 'static + Sync + Send>;
|
||||||
struct RestorePanicHook {
|
struct RestorePanicHook {
|
||||||
hook: Option<PanicHook>,
|
hook: Option<PanicHook>,
|
||||||
|
|
|
@ -235,38 +235,35 @@ fn clean_params_dictionary(right: &Expr, locator: &Locator, stylist: &Stylist) -
|
||||||
let mut seen: Vec<&str> = vec![];
|
let mut seen: Vec<&str> = vec![];
|
||||||
let mut indent = None;
|
let mut indent = None;
|
||||||
for ast::DictItem { key, value } in items {
|
for ast::DictItem { key, value } in items {
|
||||||
match key {
|
if let Some(key) = key {
|
||||||
Some(key) => {
|
if let Expr::StringLiteral(ast::ExprStringLiteral {
|
||||||
if let Expr::StringLiteral(ast::ExprStringLiteral {
|
value: key_string, ..
|
||||||
value: key_string, ..
|
}) = key
|
||||||
}) = key
|
{
|
||||||
{
|
// If the dictionary key is not a valid variable name, abort.
|
||||||
// If the dictionary key is not a valid variable name, abort.
|
if !is_identifier(key_string.to_str()) {
|
||||||
if !is_identifier(key_string.to_str()) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
// If there are multiple entries of the same key, abort.
|
|
||||||
if seen.contains(&key_string.to_str()) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
seen.push(key_string.to_str());
|
|
||||||
if is_multi_line {
|
|
||||||
if indent.is_none() {
|
|
||||||
indent = indentation(locator, key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let value_string = locator.slice(value);
|
|
||||||
arguments.push(format!("{key_string}={value_string}"));
|
|
||||||
} else {
|
|
||||||
// If there are any non-string keys, abort.
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
// If there are multiple entries of the same key, abort.
|
||||||
None => {
|
if seen.contains(&key_string.to_str()) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
seen.push(key_string.to_str());
|
||||||
|
if is_multi_line {
|
||||||
|
if indent.is_none() {
|
||||||
|
indent = indentation(locator, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let value_string = locator.slice(value);
|
let value_string = locator.slice(value);
|
||||||
arguments.push(format!("**{value_string}"));
|
arguments.push(format!("{key_string}={value_string}"));
|
||||||
|
} else {
|
||||||
|
// If there are any non-string keys, abort.
|
||||||
|
return None;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
let value_string = locator.slice(value);
|
||||||
|
arguments.push(format!("**{value_string}"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If we couldn't parse out key values, abort.
|
// If we couldn't parse out key values, abort.
|
||||||
|
|
|
@ -75,12 +75,11 @@ pub(crate) fn derive_cache_key(item: &DeriveInput) -> syn::Result<TokenStream> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let field_attr = match &field.ident {
|
let field_attr = if let Some(ident) = &field.ident {
|
||||||
Some(ident) => quote!(self.#ident),
|
quote!(self.#ident)
|
||||||
None => {
|
} else {
|
||||||
let index = syn::Index::from(i);
|
let index = syn::Index::from(i);
|
||||||
quote!(self.#index)
|
quote!(self.#index)
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fields.push(quote!(#field_attr.cache_key(key);));
|
fields.push(quote!(#field_attr.cache_key(key);));
|
||||||
|
|
|
@ -143,9 +143,10 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
|
||||||
for (prefix, rules) in &rules_by_prefix {
|
for (prefix, rules) in &rules_by_prefix {
|
||||||
let prefix_ident = get_prefix_ident(prefix);
|
let prefix_ident = get_prefix_ident(prefix);
|
||||||
let attrs = intersection_all(rules.iter().map(|(.., attrs)| attrs.as_slice()));
|
let attrs = intersection_all(rules.iter().map(|(.., attrs)| attrs.as_slice()));
|
||||||
let attrs = match attrs.as_slice() {
|
let attrs = if attrs.is_empty() {
|
||||||
[] => quote!(),
|
quote!()
|
||||||
[..] => quote!(#(#attrs)*),
|
} else {
|
||||||
|
quote!(#(#attrs)*)
|
||||||
};
|
};
|
||||||
all_codes.push(quote! {
|
all_codes.push(quote! {
|
||||||
#attrs Self::#linter(#linter::#prefix_ident)
|
#attrs Self::#linter(#linter::#prefix_ident)
|
||||||
|
@ -161,9 +162,10 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
|
||||||
});
|
});
|
||||||
let prefix_ident = get_prefix_ident(&prefix);
|
let prefix_ident = get_prefix_ident(&prefix);
|
||||||
let attrs = intersection_all(rules.iter().map(|(.., attrs)| attrs.as_slice()));
|
let attrs = intersection_all(rules.iter().map(|(.., attrs)| attrs.as_slice()));
|
||||||
let attrs = match attrs.as_slice() {
|
let attrs = if attrs.is_empty() {
|
||||||
[] => quote!(),
|
quote!()
|
||||||
[..] => quote!(#(#attrs)*),
|
} else {
|
||||||
|
quote!(#(#attrs)*)
|
||||||
};
|
};
|
||||||
prefix_into_iter_match_arms.extend(quote! {
|
prefix_into_iter_match_arms.extend(quote! {
|
||||||
#attrs #linter::#prefix_ident => vec![#(#rule_paths,)*].into_iter(),
|
#attrs #linter::#prefix_ident => vec![#(#rule_paths,)*].into_iter(),
|
||||||
|
|
|
@ -90,9 +90,10 @@ fn attributes_for_prefix(
|
||||||
attributes: &BTreeMap<String, &[Attribute]>,
|
attributes: &BTreeMap<String, &[Attribute]>,
|
||||||
) -> proc_macro2::TokenStream {
|
) -> proc_macro2::TokenStream {
|
||||||
let attrs = intersection_all(codes.iter().map(|code| attributes[code]));
|
let attrs = intersection_all(codes.iter().map(|code| attributes[code]));
|
||||||
match attrs.as_slice() {
|
if attrs.is_empty() {
|
||||||
[] => quote!(),
|
quote!()
|
||||||
[..] => quote!(#(#attrs)*),
|
} else {
|
||||||
|
quote!(#(#attrs)*)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,9 +18,10 @@ impl FormatNodeRule<PatternMatchStar> for FormatPatternMatchStar {
|
||||||
|
|
||||||
write!(f, [token("*"), dangling_comments(dangling)])?;
|
write!(f, [token("*"), dangling_comments(dangling)])?;
|
||||||
|
|
||||||
match name {
|
if let Some(name) = name {
|
||||||
Some(name) => write!(f, [name.format()]),
|
write!(f, [name.format()])
|
||||||
None => write!(f, [token("_")]),
|
} else {
|
||||||
|
write!(f, [token("_")])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ use lsp_types::InitializeParams;
|
||||||
use lsp_types::WorkspaceFolder;
|
use lsp_types::WorkspaceFolder;
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
#[allow(deprecated)]
|
||||||
use std::panic::PanicInfo;
|
use std::panic::PanicInfo;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
@ -125,6 +126,7 @@ impl Server {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(self) -> crate::Result<()> {
|
pub fn run(self) -> crate::Result<()> {
|
||||||
|
#[allow(deprecated)]
|
||||||
type PanicHook = Box<dyn Fn(&PanicInfo<'_>) + 'static + Sync + Send>;
|
type PanicHook = Box<dyn Fn(&PanicInfo<'_>) + 'static + Sync + Send>;
|
||||||
struct RestorePanicHook {
|
struct RestorePanicHook {
|
||||||
hook: Option<PanicHook>,
|
hook: Option<PanicHook>,
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.81"
|
channel = "1.82"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue