Upgrade dependencies (#4064)

This commit is contained in:
Micha Reiser 2023-04-22 19:04:01 +02:00 committed by GitHub
parent b7a57ce120
commit ba4f4f4672
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 527 additions and 309 deletions

742
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -11,7 +11,7 @@ authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
[workspace.dependencies] [workspace.dependencies]
anyhow = { version = "1.0.69" } anyhow = { version = "1.0.69" }
bitflags = { version = "1.3.2" } bitflags = { version = "2.1.0" }
chrono = { version = "0.4.23", default-features = false, features = ["clock"] } chrono = { version = "0.4.23", default-features = false, features = ["clock"] }
clap = { version = "4.1.8", features = ["derive"] } clap = { version = "4.1.8", features = ["derive"] }
colored = { version = "2.0.0" } colored = { version = "2.0.0" }
@ -44,7 +44,7 @@ similar = { version = "2.2.1" }
smallvec = { version = "1.10.0" } smallvec = { version = "1.10.0" }
strum = { version = "0.24.1", features = ["strum_macros"] } strum = { version = "0.24.1", features = ["strum_macros"] }
strum_macros = { version = "0.24.3" } strum_macros = { version = "0.24.3" }
syn = { version = "1.0.109" } syn = { version = "2.0.15" }
test-case = { version = "3.0.0" } test-case = { version = "3.0.0" }
textwrap = { version = "0.16.0" } textwrap = { version = "0.16.0" }
toml = { version = "0.7.2" } toml = { version = "0.7.2" }

View file

@ -29,7 +29,7 @@ bitflags = { workspace = true }
chrono = { workspace = true } chrono = { workspace = true }
clap = { workspace = true, features = ["derive", "string"], optional = true } clap = { workspace = true, features = ["derive", "string"], optional = true }
colored = { workspace = true } colored = { workspace = true }
dirs = { version = "4.0.0" } dirs = { version = "5.0.0" }
fern = { version = "0.6.1" } fern = { version = "0.6.1" }
glob = { workspace = true } glob = { workspace = true }
globset = { workspace = true } globset = { workspace = true }

View file

@ -9,7 +9,8 @@ use rustpython_parser::Tok;
use crate::settings::Settings; use crate::settings::Settings;
bitflags! { bitflags! {
pub struct Flags: u32 { #[derive(Debug, Copy, Clone)]
pub struct Flags: u8 {
const NOQA = 0b0000_0001; const NOQA = 0b0000_0001;
const ISORT = 0b0000_0010; const ISORT = 0b0000_0010;
} }

View file

@ -58,7 +58,7 @@ mod whitespace_before_comment;
mod whitespace_before_parameters; mod whitespace_before_parameters;
bitflags! { bitflags! {
#[derive(Default)] #[derive(Default, Eq, PartialEq, Clone, Copy, Debug)]
pub(crate) struct TokenFlags: u8 { pub(crate) struct TokenFlags: u8 {
/// Whether the logical line contains an operator. /// Whether the logical line contains an operator.
const OPERATOR = 0b0000_0001; const OPERATOR = 0b0000_0001;

View file

@ -26,7 +26,7 @@ use ruff::settings::types::SerializationFormat;
use crate::diagnostics::Diagnostics; use crate::diagnostics::Diagnostics;
bitflags! { bitflags! {
#[derive(Default)] #[derive(Default, Debug, Copy, Clone)]
pub(crate) struct Flags: u8 { pub(crate) struct Flags: u8 {
const SHOW_VIOLATIONS = 0b0000_0001; const SHOW_VIOLATIONS = 0b0000_0001;
const SHOW_FIXES = 0b0000_0010; const SHOW_FIXES = 0b0000_0010;

View file

@ -3,8 +3,8 @@ use syn::parse::{Parse, ParseStream};
use syn::spanned::Spanned; use syn::spanned::Spanned;
use syn::token::Comma; use syn::token::Comma;
use syn::{ use syn::{
AngleBracketedGenericArguments, Attribute, Data, DataStruct, DeriveInput, Field, Fields, Lit, AngleBracketedGenericArguments, Attribute, Data, DataStruct, DeriveInput, ExprLit, Field,
LitStr, Path, PathArguments, PathSegment, Token, Type, TypePath, Fields, Lit, LitStr, Path, PathArguments, PathSegment, Token, Type, TypePath,
}; };
pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> { pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
@ -21,7 +21,7 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
let docs: Vec<&Attribute> = field let docs: Vec<&Attribute> = field
.attrs .attrs
.iter() .iter()
.filter(|attr| attr.path.is_ident("doc")) .filter(|attr| attr.path().is_ident("doc"))
.collect(); .collect();
if docs.is_empty() { if docs.is_empty() {
@ -31,14 +31,18 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
)); ));
} }
if let Some(attr) = field.attrs.iter().find(|attr| attr.path.is_ident("option")) { if let Some(attr) = field
.attrs
.iter()
.find(|attr| attr.path().is_ident("option"))
{
output.push(handle_option(field, attr, docs)?); output.push(handle_option(field, attr, docs)?);
}; };
if field if field
.attrs .attrs
.iter() .iter()
.any(|attr| attr.path.is_ident("option_group")) .any(|attr| attr.path().is_ident("option_group"))
{ {
output.push(handle_option_group(field)?); output.push(handle_option_group(field)?);
}; };
@ -101,13 +105,13 @@ fn handle_option_group(field: &Field) -> syn::Result<proc_macro2::TokenStream> {
/// Parse a `doc` attribute into it a string literal. /// Parse a `doc` attribute into it a string literal.
fn parse_doc(doc: &Attribute) -> syn::Result<String> { fn parse_doc(doc: &Attribute) -> syn::Result<String> {
let doc = doc match &doc.meta {
.parse_meta()
.map_err(|e| syn::Error::new(doc.span(), e))?;
match doc {
syn::Meta::NameValue(syn::MetaNameValue { syn::Meta::NameValue(syn::MetaNameValue {
lit: Lit::Str(lit_str), value:
syn::Expr::Lit(ExprLit {
lit: Lit::Str(lit_str),
..
}),
.. ..
}) => Ok(lit_str.value()), }) => Ok(lit_str.value()),
_ => Err(syn::Error::new(doc.span(), "Expected doc attribute.")), _ => Err(syn::Error::new(doc.span(), "Expected doc attribute.")),

View file

@ -19,7 +19,7 @@ pub fn derive_message_formats(func: &ItemFn) -> proc_macro2::TokenStream {
} }
fn parse_block(block: &Block, strings: &mut TokenStream) -> Result<(), TokenStream> { fn parse_block(block: &Block, strings: &mut TokenStream) -> Result<(), TokenStream> {
let Some(Stmt::Expr(last)) = block.stmts.last() else {panic!("expected last statement in block to be an expression")}; let Some(Stmt::Expr(last, _)) = block.stmts.last() else {panic!("expected last statement in block to be an expression")};
parse_expr(last, strings)?; parse_expr(last, strings)?;
Ok(()) Ok(())
} }

View file

@ -14,7 +14,7 @@ pub fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
let Some(last_stmt) = func.block.stmts.last() else { let Some(last_stmt) = func.block.stmts.last() else {
return Err(Error::new(func.block.span(), "expected body to end in an expression")); return Err(Error::new(func.block.span(), "expected body to end in an expression"));
}; };
let Stmt::Expr(Expr::Call(ExprCall{args: some_args, ..})) = last_stmt else { let Stmt::Expr(Expr::Call(ExprCall{args: some_args, ..}), _) = last_stmt else {
return Err(Error::new(last_stmt.span(), "expected last expression to be Some(match (..) { .. })")) return Err(Error::new(last_stmt.span(), "expected last expression to be Some(match (..) { .. })"))
}; };
let mut some_args = some_args.into_iter(); let mut some_args = some_args.into_iter();

View file

@ -3,7 +3,7 @@ use std::collections::HashSet;
use quote::quote; use quote::quote;
use syn::spanned::Spanned; use syn::spanned::Spanned;
use syn::{Attribute, Data, DataEnum, DeriveInput, Error, Lit, Meta, MetaNameValue}; use syn::{Attribute, Data, DataEnum, DeriveInput, Error, ExprLit, Lit, Meta, MetaNameValue};
pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> { pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
let DeriveInput { ident, data: Data::Enum(DataEnum { let DeriveInput { ident, data: Data::Enum(DataEnum {
@ -26,9 +26,9 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
let prefixes: Result<Vec<_>, _> = variant let prefixes: Result<Vec<_>, _> = variant
.attrs .attrs
.iter() .iter()
.filter(|attr| attr.path.is_ident("prefix")) .filter(|attr| attr.path().is_ident("prefix"))
.map(|attr| { .map(|attr| {
let Ok(Meta::NameValue(MetaNameValue{lit: Lit::Str(lit), ..})) = attr.parse_meta() else { let Meta::NameValue(MetaNameValue{value: syn::Expr::Lit (ExprLit { lit: Lit::Str(lit), ..}), ..}) = &attr.meta else {
return Err(Error::new(attr.span(), r#"expected attribute to be in the form of [#prefix = "..."]"#)); return Err(Error::new(attr.span(), r#"expected attribute to be in the form of [#prefix = "..."]"#));
}; };
let str = lit.value(); let str = lit.value();
@ -53,7 +53,7 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
)); ));
} }
let Some(doc_attr) = variant.attrs.iter().find(|attr| attr.path.is_ident("doc")) else { let Some(doc_attr) = variant.attrs.iter().find(|attr| attr.path().is_ident("doc")) else {
return Err(Error::new(variant.span(), r#"expected a doc comment"#)) return Err(Error::new(variant.span(), r#"expected a doc comment"#))
}; };
@ -125,7 +125,7 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
/// Parses an attribute in the form of `#[doc = " [name](https://example.com/)"]` /// Parses an attribute in the form of `#[doc = " [name](https://example.com/)"]`
/// into a tuple of link label and URL. /// into a tuple of link label and URL.
fn parse_doc_attr(doc_attr: &Attribute) -> syn::Result<(String, String)> { fn parse_doc_attr(doc_attr: &Attribute) -> syn::Result<(String, String)> {
let Ok(Meta::NameValue(MetaNameValue{lit: Lit::Str(doc_lit), ..})) = doc_attr.parse_meta() else { let Meta::NameValue(MetaNameValue{value: syn::Expr::Lit(ExprLit { lit: Lit::Str(doc_lit), ..}), ..}) = &doc_attr.meta else {
return Err(Error::new(doc_attr.span(), r#"expected doc attribute to be in the form of #[doc = "..."]"#)) return Err(Error::new(doc_attr.span(), r#"expected doc attribute to be in the form of #[doc = "..."]"#))
}; };
parse_markdown_link(doc_lit.value().trim()) parse_markdown_link(doc_lit.value().trim())

View file

@ -2,8 +2,11 @@ use proc_macro2::TokenStream;
use quote::quote; use quote::quote;
use syn::{Attribute, Error, ItemStruct, Lit, LitStr, Meta, Result}; use syn::{Attribute, Error, ItemStruct, Lit, LitStr, Meta, Result};
fn parse_attr<const LEN: usize>(path: [&'static str; LEN], attr: &Attribute) -> Option<LitStr> { fn parse_attr<'a, const LEN: usize>(
if let Meta::NameValue(name_value) = attr.parse_meta().ok()? { path: [&'static str; LEN],
attr: &'a Attribute,
) -> Option<&'a LitStr> {
if let Meta::NameValue(name_value) = &attr.meta {
let path_idents = name_value let path_idents = name_value
.path .path
.segments .segments
@ -11,7 +14,10 @@ fn parse_attr<const LEN: usize>(path: [&'static str; LEN], attr: &Attribute) ->
.map(|segment| &segment.ident); .map(|segment| &segment.ident);
if itertools::equal(path_idents, path) { if itertools::equal(path_idents, path) {
if let Lit::Str(lit) = name_value.lit { if let syn::Expr::Lit(syn::ExprLit {
lit: Lit::Str(lit), ..
}) = &name_value.value
{
return Some(lit); return Some(lit);
} }
} }
@ -24,7 +30,7 @@ fn parse_attr<const LEN: usize>(path: [&'static str; LEN], attr: &Attribute) ->
fn get_docs(attrs: &[Attribute]) -> Result<String> { fn get_docs(attrs: &[Attribute]) -> Result<String> {
let mut explanation = String::new(); let mut explanation = String::new();
for attr in attrs { for attr in attrs {
if attr.path.is_ident("doc") { if attr.path().is_ident("doc") {
if let Some(lit) = parse_attr(["doc"], attr) { if let Some(lit) = parse_attr(["doc"], attr) {
let value = lit.value(); let value = lit.value();
// `/// ` adds // `/// ` adds

View file

@ -2,8 +2,8 @@ use bitflags::bitflags;
use rustpython_parser::ast::{Constant, Expr, ExprKind, Stmt, StmtKind}; use rustpython_parser::ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
bitflags! { bitflags! {
#[derive(Default)] #[derive(Default, Debug, Copy, Clone, PartialEq, Eq)]
pub struct AllNamesFlags: u32 { pub struct AllNamesFlags: u8 {
const INVALID_FORMAT = 0b0000_0001; const INVALID_FORMAT = 0b0000_0001;
const INVALID_OBJECT = 0b0000_0010; const INVALID_OBJECT = 0b0000_0010;
} }

View file

@ -267,7 +267,8 @@ pub enum BindingKind<'a> {
} }
bitflags! { bitflags! {
pub struct Exceptions: u32 { #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Exceptions: u8 {
const NAME_ERROR = 0b0000_0001; const NAME_ERROR = 0b0000_0001;
const MODULE_NOT_FOUND_ERROR = 0b0000_0010; const MODULE_NOT_FOUND_ERROR = 0b0000_0010;
const IMPORT_ERROR = 0b0000_0100; const IMPORT_ERROR = 0b0000_0100;

View file

@ -44,10 +44,13 @@ impl Parse for ArgValue {
let inner; let inner;
let _ = bracketed!(inner in input); let _ = bracketed!(inner in input);
let values = inner.parse_terminated(|parser| { let values = inner.parse_terminated(
let value: LitStr = parser.parse()?; |parser| {
Ok(value) let value: LitStr = parser.parse()?;
})?; Ok(value)
},
Token![,],
)?;
ArgValue::List(values) ArgValue::List(values)
} else { } else {
ArgValue::LitStr(input.parse()?) ArgValue::LitStr(input.parse()?)
@ -59,7 +62,7 @@ impl Parse for ArgValue {
impl Parse for FixtureConfiguration { impl Parse for FixtureConfiguration {
fn parse(input: ParseStream) -> syn::Result<Self> { fn parse(input: ParseStream) -> syn::Result<Self> {
let args: Punctuated<_, Token![,]> = input.parse_terminated(Arg::parse)?; let args: Punctuated<_, Token![,]> = input.parse_terminated(Arg::parse, Token![,])?;
let mut pattern = None; let mut pattern = None;
let mut exclude = None; let mut exclude = None;
@ -186,7 +189,9 @@ fn generate_fixtures(
configuration: &FixtureConfiguration, configuration: &FixtureConfiguration,
) -> syn::Result<proc_macro2::TokenStream> { ) -> syn::Result<proc_macro2::TokenStream> {
// Remove the fixtures attribute // Remove the fixtures attribute
test_fn.attrs.retain(|attr| !attr.path.is_ident("fixtures")); test_fn
.attrs
.retain(|attr| !attr.path().is_ident("fixtures"));
// Extract the name of the only argument of the test function. // Extract the name of the only argument of the test function.
let last_arg = test_fn.sig.inputs.last(); let last_arg = test_fn.sig.inputs.last();

View file

@ -18,7 +18,8 @@ ruff_python_ast = { path = "../ruff_python_ast" }
ruff_rustpython = { path = "../ruff_rustpython" } ruff_rustpython = { path = "../ruff_rustpython" }
console_error_panic_hook = { version = "0.1.7", optional = true } console_error_panic_hook = { version = "0.1.7", optional = true }
console_log = { version = "0.2.1" }
console_log = { version = "1.0.0" }
getrandom = { version = "0.2.8", features = ["js"] } getrandom = { version = "0.2.8", features = ["js"] }
log = { workspace = true } log = { workspace = true }
ruff = { path = "../ruff" } ruff = { path = "../ruff" }