mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:10:09 +00:00
Remove some unused pub
functions (#11576)
## Summary I left anything in `red-knot`, any `with_` methods, etc.
This commit is contained in:
parent
3989cb8b56
commit
16acd4913f
14 changed files with 7 additions and 495 deletions
38
Cargo.lock
generated
38
Cargo.lock
generated
|
@ -886,12 +886,6 @@ version = "0.3.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
|
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hexf-parse"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "home"
|
name = "home"
|
||||||
version = "0.5.9"
|
version = "0.5.9"
|
||||||
|
@ -1176,36 +1170,6 @@ version = "1.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lexical-parse-float"
|
|
||||||
version = "0.8.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f"
|
|
||||||
dependencies = [
|
|
||||||
"lexical-parse-integer",
|
|
||||||
"lexical-util",
|
|
||||||
"static_assertions",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lexical-parse-integer"
|
|
||||||
version = "0.8.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9"
|
|
||||||
dependencies = [
|
|
||||||
"lexical-util",
|
|
||||||
"static_assertions",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lexical-util"
|
|
||||||
version = "0.8.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc"
|
|
||||||
dependencies = [
|
|
||||||
"static_assertions",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.155"
|
version = "0.2.155"
|
||||||
|
@ -2277,9 +2241,7 @@ name = "ruff_python_literal"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.5.0",
|
"bitflags 2.5.0",
|
||||||
"hexf-parse",
|
|
||||||
"itertools 0.12.1",
|
"itertools 0.12.1",
|
||||||
"lexical-parse-float",
|
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
"unic-ucd-category",
|
"unic-ucd-category",
|
||||||
]
|
]
|
||||||
|
|
|
@ -62,7 +62,6 @@ filetime = { version = "0.2.23" }
|
||||||
glob = { version = "0.3.1" }
|
glob = { version = "0.3.1" }
|
||||||
globset = { version = "0.4.14" }
|
globset = { version = "0.4.14" }
|
||||||
hashbrown = "0.14.3"
|
hashbrown = "0.14.3"
|
||||||
hexf-parse = { version = "0.2.1" }
|
|
||||||
ignore = { version = "0.4.22" }
|
ignore = { version = "0.4.22" }
|
||||||
imara-diff = { version = "0.1.5" }
|
imara-diff = { version = "0.1.5" }
|
||||||
imperative = { version = "1.0.4" }
|
imperative = { version = "1.0.4" }
|
||||||
|
@ -76,12 +75,11 @@ is-wsl = { version = "0.4.0" }
|
||||||
itertools = { version = "0.12.1" }
|
itertools = { version = "0.12.1" }
|
||||||
js-sys = { version = "0.3.69" }
|
js-sys = { version = "0.3.69" }
|
||||||
jod-thread = { version = "0.1.2" }
|
jod-thread = { version = "0.1.2" }
|
||||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
|
||||||
libc = { version = "0.2.153" }
|
libc = { version = "0.2.153" }
|
||||||
libcst = { version = "1.1.0", default-features = false }
|
libcst = { version = "1.1.0", default-features = false }
|
||||||
log = { version = "0.4.17" }
|
log = { version = "0.4.17" }
|
||||||
lsp-server = { version = "0.7.6" }
|
lsp-server = { version = "0.7.6" }
|
||||||
lsp-types = { git="https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = ["proposed"] }
|
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = ["proposed"] }
|
||||||
matchit = { version = "0.8.1" }
|
matchit = { version = "0.8.1" }
|
||||||
memchr = { version = "2.7.1" }
|
memchr = { version = "2.7.1" }
|
||||||
mimalloc = { version = "0.1.39" }
|
mimalloc = { version = "0.1.39" }
|
||||||
|
|
|
@ -553,11 +553,6 @@ impl PrintedRange {
|
||||||
pub fn source_range(&self) -> TextRange {
|
pub fn source_range(&self) -> TextRange {
|
||||||
self.source_range
|
self.source_range
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_code(self, code: String) -> Self {
|
|
||||||
Self { code, ..self }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Public return type of the formatter
|
/// Public return type of the formatter
|
||||||
|
@ -780,10 +775,6 @@ where
|
||||||
self.item = item;
|
self.item = item;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_item(self) -> T {
|
|
||||||
self.item
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, R, C> Format<C> for FormatOwnedWithRule<T, R, C>
|
impl<T, R, C> Format<C> for FormatOwnedWithRule<T, R, C>
|
||||||
|
|
|
@ -46,22 +46,6 @@ pub struct IsortDirectives {
|
||||||
pub skip_file: bool,
|
pub skip_file: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IsortDirectives {
|
|
||||||
pub fn is_excluded(&self, offset: TextSize) -> bool {
|
|
||||||
for range in &self.exclusions {
|
|
||||||
if range.contains(offset) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if range.start() > offset {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Directives {
|
pub struct Directives {
|
||||||
pub noqa_line_for: NoqaMapping,
|
pub noqa_line_for: NoqaMapping,
|
||||||
pub isort: IsortDirectives,
|
pub isort: IsortDirectives,
|
||||||
|
|
|
@ -383,26 +383,6 @@ impl KnownModules {
|
||||||
Some((section, reason))
|
Some((section, reason))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the list of modules that are known to be of a given type.
|
|
||||||
pub fn modules_for_known_type(
|
|
||||||
&self,
|
|
||||||
import_type: ImportType,
|
|
||||||
) -> impl Iterator<Item = &glob::Pattern> {
|
|
||||||
self.known
|
|
||||||
.iter()
|
|
||||||
.filter_map(move |(module, known_section)| {
|
|
||||||
if let ImportSection::Known(section) = known_section {
|
|
||||||
if *section == import_type {
|
|
||||||
Some(module)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the list of user-defined modules, indexed by section.
|
/// Return the list of user-defined modules, indexed by section.
|
||||||
pub fn user_defined(&self) -> FxHashMap<&str, Vec<&glob::Pattern>> {
|
pub fn user_defined(&self) -> FxHashMap<&str, Vec<&glob::Pattern>> {
|
||||||
let mut user_defined: FxHashMap<&str, Vec<&glob::Pattern>> = FxHashMap::default();
|
let mut user_defined: FxHashMap<&str, Vec<&glob::Pattern>> = FxHashMap::default();
|
||||||
|
|
|
@ -59,10 +59,6 @@ impl<'a> QualifiedName<'a> {
|
||||||
matches!(self.segments(), ["", ..])
|
matches!(self.segments(), ["", ..])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_user_defined(&self) -> bool {
|
|
||||||
!self.is_builtin()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If the call path is dot-prefixed, it's an unresolved relative import.
|
/// If the call path is dot-prefixed, it's an unresolved relative import.
|
||||||
/// Ex) `[".foo", "bar"]` -> `".foo.bar"`
|
/// Ex) `[".foo", "bar"]` -> `".foo.bar"`
|
||||||
pub fn is_unresolved_import(&self) -> bool {
|
pub fn is_unresolved_import(&self) -> bool {
|
||||||
|
|
|
@ -3729,20 +3729,6 @@ impl fmt::Display for IpyEscapeKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IpyEscapeKind {
|
impl IpyEscapeKind {
|
||||||
/// Returns the length of the escape kind token.
|
|
||||||
pub fn prefix_len(self) -> TextSize {
|
|
||||||
let len = match self {
|
|
||||||
IpyEscapeKind::Shell
|
|
||||||
| IpyEscapeKind::Magic
|
|
||||||
| IpyEscapeKind::Help
|
|
||||||
| IpyEscapeKind::Quote
|
|
||||||
| IpyEscapeKind::Quote2
|
|
||||||
| IpyEscapeKind::Paren => 1,
|
|
||||||
IpyEscapeKind::ShCap | IpyEscapeKind::Magic2 | IpyEscapeKind::Help2 => 2,
|
|
||||||
};
|
|
||||||
len.into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the escape kind is help i.e., `?` or `??`.
|
/// Returns `true` if the escape kind is help i.e., `?` or `??`.
|
||||||
pub const fn is_help(self) -> bool {
|
pub const fn is_help(self) -> bool {
|
||||||
matches!(self, IpyEscapeKind::Help | IpyEscapeKind::Help2)
|
matches!(self, IpyEscapeKind::Help | IpyEscapeKind::Help2)
|
||||||
|
|
|
@ -79,15 +79,3 @@ pub fn next_sibling<'a>(stmt: &'a Stmt, suite: &'a Suite) -> Option<&'a Stmt> {
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a [`Stmt`] and its containing [`Suite`], return the previous [`Stmt`] in the [`Suite`].
|
|
||||||
pub fn prev_sibling<'a>(stmt: &'a Stmt, suite: &'a Suite) -> Option<&'a Stmt> {
|
|
||||||
let mut prev = None;
|
|
||||||
for sibling in suite {
|
|
||||||
if sibling == stmt {
|
|
||||||
return prev;
|
|
||||||
}
|
|
||||||
prev = Some(sibling);
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
|
@ -18,9 +18,7 @@ doctest = false
|
||||||
ruff_python_ast = { workspace = true }
|
ruff_python_ast = { workspace = true }
|
||||||
|
|
||||||
bitflags = { workspace = true }
|
bitflags = { workspace = true }
|
||||||
hexf-parse = { workspace = true }
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
lexical-parse-float = { workspace = true, features = ["format"] }
|
|
||||||
unic-ucd-category = { workspace = true }
|
unic-ucd-category = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
//! Implementation of Printf-Style string formatting
|
//! Implementation of Printf-Style string formatting
|
||||||
//! as per the [Python Docs](https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting).
|
//! as per the [Python Docs](https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting).
|
||||||
use bitflags::bitflags;
|
|
||||||
use std::{
|
use std::{
|
||||||
fmt,
|
fmt,
|
||||||
iter::{Enumerate, Peekable},
|
iter::{Enumerate, Peekable},
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use bitflags::bitflags;
|
||||||
|
|
||||||
use crate::Case;
|
use crate::Case;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
|
@ -96,19 +97,6 @@ bitflags! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CConversionFlags {
|
|
||||||
#[inline]
|
|
||||||
pub fn sign_string(&self) -> &'static str {
|
|
||||||
if self.contains(CConversionFlags::SIGN_CHAR) {
|
|
||||||
"+"
|
|
||||||
} else if self.contains(CConversionFlags::BLANK_SIGN) {
|
|
||||||
" "
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub enum CFormatQuantity {
|
pub enum CFormatQuantity {
|
||||||
Amount(usize),
|
Amount(usize),
|
||||||
|
@ -337,44 +325,12 @@ pub enum CFormatPart<T> {
|
||||||
Spec(CFormatSpec),
|
Spec(CFormatSpec),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> CFormatPart<T> {
|
|
||||||
#[inline]
|
|
||||||
pub fn is_specifier(&self) -> bool {
|
|
||||||
matches!(self, CFormatPart::Spec(_))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub fn has_key(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
CFormatPart::Spec(s) => s.mapping_key.is_some(),
|
|
||||||
CFormatPart::Literal(_) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub struct CFormatStrOrBytes<S> {
|
pub struct CFormatStrOrBytes<S> {
|
||||||
parts: Vec<(usize, CFormatPart<S>)>,
|
parts: Vec<(usize, CFormatPart<S>)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> CFormatStrOrBytes<S> {
|
impl<S> CFormatStrOrBytes<S> {
|
||||||
pub fn check_specifiers(&self) -> Option<(usize, bool)> {
|
|
||||||
let mut count = 0;
|
|
||||||
let mut mapping_required = false;
|
|
||||||
for (_, part) in &self.parts {
|
|
||||||
if part.is_specifier() {
|
|
||||||
let has_key = part.has_key();
|
|
||||||
if count == 0 {
|
|
||||||
mapping_required = has_key;
|
|
||||||
} else if mapping_required != has_key {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
count += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some((count, mapping_required))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &(usize, CFormatPart<S>)> {
|
pub fn iter(&self) -> impl Iterator<Item = &(usize, CFormatPart<S>)> {
|
||||||
self.parts.iter()
|
self.parts.iter()
|
||||||
|
@ -430,11 +386,6 @@ impl CFormatBytes {
|
||||||
}
|
}
|
||||||
Ok(Self { parts })
|
Ok(Self { parts })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_from_bytes(bytes: &[u8]) -> Result<Self, CFormatError> {
|
|
||||||
let mut iter = bytes.iter().copied().enumerate().peekable();
|
|
||||||
Self::parse(&mut iter)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type CFormatString = CFormatStrOrBytes<String>;
|
pub type CFormatString = CFormatStrOrBytes<String>;
|
||||||
|
|
|
@ -50,11 +50,6 @@ pub struct UnicodeEscape<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> UnicodeEscape<'a> {
|
impl<'a> UnicodeEscape<'a> {
|
||||||
#[inline]
|
|
||||||
pub fn with_forced_quote(source: &'a str, quote: Quote) -> Self {
|
|
||||||
let layout = EscapeLayout { quote, len: None };
|
|
||||||
Self { source, layout }
|
|
||||||
}
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn with_preferred_quote(source: &'a str, quote: Quote) -> Self {
|
pub fn with_preferred_quote(source: &'a str, quote: Quote) -> Self {
|
||||||
let layout = Self::repr_layout(source, quote);
|
let layout = Self::repr_layout(source, quote);
|
||||||
|
@ -240,11 +235,6 @@ impl<'a> AsciiEscape<'a> {
|
||||||
Self { source, layout }
|
Self { source, layout }
|
||||||
}
|
}
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn with_forced_quote(source: &'a [u8], quote: Quote) -> Self {
|
|
||||||
let layout = EscapeLayout { quote, len: None };
|
|
||||||
Self { source, layout }
|
|
||||||
}
|
|
||||||
#[inline]
|
|
||||||
pub fn with_preferred_quote(source: &'a [u8], quote: Quote) -> Self {
|
pub fn with_preferred_quote(source: &'a [u8], quote: Quote) -> Self {
|
||||||
let layout = Self::repr_layout(source, quote);
|
let layout = Self::repr_layout(source, quote);
|
||||||
Self { source, layout }
|
Self { source, layout }
|
||||||
|
@ -271,17 +261,6 @@ impl AsciiEscape<'_> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(
|
|
||||||
clippy::cast_possible_wrap,
|
|
||||||
clippy::cast_possible_truncation,
|
|
||||||
clippy::cast_sign_loss
|
|
||||||
)]
|
|
||||||
pub fn named_repr_layout(source: &[u8], name: &str) -> EscapeLayout {
|
|
||||||
Self::output_layout_with_checker(source, Quote::Single, name.len() + 2 + 3, |a, b| {
|
|
||||||
Some((a as isize).checked_add(b as isize)? as usize)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn output_layout_with_checker(
|
fn output_layout_with_checker(
|
||||||
source: &[u8],
|
source: &[u8],
|
||||||
preferred_quote: Quote,
|
preferred_quote: Quote,
|
||||||
|
|
|
@ -1,178 +1,9 @@
|
||||||
use std::f64;
|
use std::f64;
|
||||||
|
|
||||||
use crate::Case;
|
fn is_integer(v: f64) -> bool {
|
||||||
|
|
||||||
pub fn parse_str(literal: &str) -> Option<f64> {
|
|
||||||
parse_inner(literal.trim().as_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_bytes(literal: &[u8]) -> Option<f64> {
|
|
||||||
parse_inner(trim_slice(literal, u8::is_ascii_whitespace))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn trim_slice<T>(v: &[T], mut trim: impl FnMut(&T) -> bool) -> &[T] {
|
|
||||||
let mut it = v.iter();
|
|
||||||
// it.take_while_ref(&mut trim).for_each(drop);
|
|
||||||
// hmm.. `&mut slice::Iter<_>` is not `Clone`
|
|
||||||
// it.by_ref().rev().take_while_ref(&mut trim).for_each(drop);
|
|
||||||
while it.clone().next().is_some_and(&mut trim) {
|
|
||||||
it.next();
|
|
||||||
}
|
|
||||||
while it.clone().next_back().is_some_and(&mut trim) {
|
|
||||||
it.next_back();
|
|
||||||
}
|
|
||||||
it.as_slice()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_inner(literal: &[u8]) -> Option<f64> {
|
|
||||||
use lexical_parse_float::{
|
|
||||||
format::PYTHON3_LITERAL, FromLexicalWithOptions, NumberFormatBuilder, Options,
|
|
||||||
};
|
|
||||||
// lexical-core's format::PYTHON_STRING is inaccurate
|
|
||||||
const PYTHON_STRING: u128 = NumberFormatBuilder::rebuild(PYTHON3_LITERAL)
|
|
||||||
.no_special(false)
|
|
||||||
.build();
|
|
||||||
f64::from_lexical_with_options::<PYTHON_STRING>(literal, &Options::new()).ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_integer(v: f64) -> bool {
|
|
||||||
(v - v.round()).abs() < f64::EPSILON
|
(v - v.round()).abs() < f64::EPSILON
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_nan(case: Case) -> String {
|
|
||||||
let nan = match case {
|
|
||||||
Case::Lower => "nan",
|
|
||||||
Case::Upper => "NAN",
|
|
||||||
};
|
|
||||||
|
|
||||||
nan.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_inf(case: Case) -> String {
|
|
||||||
let inf = match case {
|
|
||||||
Case::Lower => "inf",
|
|
||||||
Case::Upper => "INF",
|
|
||||||
};
|
|
||||||
|
|
||||||
inf.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decimal_point_or_empty(precision: usize, alternate_form: bool) -> &'static str {
|
|
||||||
match (precision, alternate_form) {
|
|
||||||
(0, true) => ".",
|
|
||||||
_ => "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn format_fixed(precision: usize, magnitude: f64, case: Case, alternate_form: bool) -> String {
|
|
||||||
match magnitude {
|
|
||||||
magnitude if magnitude.is_finite() => {
|
|
||||||
let point = decimal_point_or_empty(precision, alternate_form);
|
|
||||||
format!("{magnitude:.precision$}{point}")
|
|
||||||
}
|
|
||||||
magnitude if magnitude.is_nan() => format_nan(case),
|
|
||||||
magnitude if magnitude.is_infinite() => format_inf(case),
|
|
||||||
_ => String::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Formats floats into Python style exponent notation, by first formatting in Rust style
|
|
||||||
// exponent notation (`1.0000e0`), then convert to Python style (`1.0000e+00`).
|
|
||||||
pub fn format_exponent(
|
|
||||||
precision: usize,
|
|
||||||
magnitude: f64,
|
|
||||||
case: Case,
|
|
||||||
alternate_form: bool,
|
|
||||||
) -> String {
|
|
||||||
match magnitude {
|
|
||||||
magnitude if magnitude.is_finite() => {
|
|
||||||
let r_exp = format!("{magnitude:.precision$e}");
|
|
||||||
let mut parts = r_exp.splitn(2, 'e');
|
|
||||||
let base = parts.next().unwrap();
|
|
||||||
let exponent = parts.next().unwrap().parse::<i64>().unwrap();
|
|
||||||
let e = match case {
|
|
||||||
Case::Lower => 'e',
|
|
||||||
Case::Upper => 'E',
|
|
||||||
};
|
|
||||||
let point = decimal_point_or_empty(precision, alternate_form);
|
|
||||||
format!("{base}{point}{e}{exponent:+#03}")
|
|
||||||
}
|
|
||||||
magnitude if magnitude.is_nan() => format_nan(case),
|
|
||||||
magnitude if magnitude.is_infinite() => format_inf(case),
|
|
||||||
_ => String::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If s represents a floating point value, trailing zeros and a possibly trailing
|
|
||||||
/// decimal point will be removed.
|
|
||||||
/// This function does NOT work with decimal commas.
|
|
||||||
fn maybe_remove_trailing_redundant_chars(s: String, alternate_form: bool) -> String {
|
|
||||||
if !alternate_form && s.contains('.') {
|
|
||||||
// only truncate floating point values when not in alternate form
|
|
||||||
let s = remove_trailing_zeros(s);
|
|
||||||
remove_trailing_decimal_point(s)
|
|
||||||
} else {
|
|
||||||
s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remove_trailing_zeros(s: String) -> String {
|
|
||||||
let mut s = s;
|
|
||||||
while s.ends_with('0') {
|
|
||||||
s.pop();
|
|
||||||
}
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remove_trailing_decimal_point(s: String) -> String {
|
|
||||||
let mut s = s;
|
|
||||||
if s.ends_with('.') {
|
|
||||||
s.pop();
|
|
||||||
}
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(
|
|
||||||
clippy::cast_sign_loss,
|
|
||||||
clippy::cast_possible_truncation,
|
|
||||||
clippy::cast_possible_wrap
|
|
||||||
)]
|
|
||||||
pub fn format_general(
|
|
||||||
precision: usize,
|
|
||||||
magnitude: f64,
|
|
||||||
case: Case,
|
|
||||||
alternate_form: bool,
|
|
||||||
always_shows_fract: bool,
|
|
||||||
) -> String {
|
|
||||||
match magnitude {
|
|
||||||
magnitude if magnitude.is_finite() => {
|
|
||||||
let r_exp = format!("{:.*e}", precision.saturating_sub(1), magnitude);
|
|
||||||
let mut parts = r_exp.splitn(2, 'e');
|
|
||||||
let base = parts.next().unwrap();
|
|
||||||
let exponent = parts.next().unwrap().parse::<i64>().unwrap();
|
|
||||||
if exponent < -4 || exponent + i64::from(always_shows_fract) >= (precision as i64) {
|
|
||||||
let e = match case {
|
|
||||||
Case::Lower => 'e',
|
|
||||||
Case::Upper => 'E',
|
|
||||||
};
|
|
||||||
let magnitude = format!("{:.*}", precision + 1, base);
|
|
||||||
let base = maybe_remove_trailing_redundant_chars(magnitude, alternate_form);
|
|
||||||
let point = decimal_point_or_empty(precision.saturating_sub(1), alternate_form);
|
|
||||||
format!("{base}{point}{e}{exponent:+#03}")
|
|
||||||
} else {
|
|
||||||
let precision = ((precision as i64) - 1 - exponent) as usize;
|
|
||||||
let magnitude = format!("{magnitude:.precision$}");
|
|
||||||
let base = maybe_remove_trailing_redundant_chars(magnitude, alternate_form);
|
|
||||||
let point = decimal_point_or_empty(precision, alternate_form);
|
|
||||||
format!("{base}{point}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
magnitude if magnitude.is_nan() => format_nan(case),
|
|
||||||
magnitude if magnitude.is_infinite() => format_inf(case),
|
|
||||||
_ => String::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: rewrite using format_general
|
// TODO: rewrite using format_general
|
||||||
pub fn to_string(value: f64) -> String {
|
pub fn to_string(value: f64) -> String {
|
||||||
let lit = format!("{value:e}");
|
let lit = format!("{value:e}");
|
||||||
|
@ -195,83 +26,3 @@ pub fn to_string(value: f64) -> String {
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_hex(s: &str) -> Option<f64> {
|
|
||||||
if let Ok(f) = hexf_parse::parse_hexf64(s, false) {
|
|
||||||
return Some(f);
|
|
||||||
}
|
|
||||||
match s.to_ascii_lowercase().as_str() {
|
|
||||||
"nan" | "+nan" | "-nan" => Some(f64::NAN),
|
|
||||||
"inf" | "infinity" | "+inf" | "+infinity" => Some(f64::INFINITY),
|
|
||||||
"-inf" | "-infinity" => Some(f64::NEG_INFINITY),
|
|
||||||
value => {
|
|
||||||
let mut hex = String::with_capacity(value.len());
|
|
||||||
let has_0x = value.contains("0x");
|
|
||||||
let has_p = value.contains('p');
|
|
||||||
let has_dot = value.contains('.');
|
|
||||||
let mut start = 0;
|
|
||||||
|
|
||||||
if !has_0x && value.starts_with('-') {
|
|
||||||
hex.push_str("-0x");
|
|
||||||
start += 1;
|
|
||||||
} else if !has_0x {
|
|
||||||
hex.push_str("0x");
|
|
||||||
if value.starts_with('+') {
|
|
||||||
start += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (index, ch) in value.chars().enumerate() {
|
|
||||||
if ch == 'p' {
|
|
||||||
if has_dot {
|
|
||||||
hex.push('p');
|
|
||||||
} else {
|
|
||||||
hex.push_str(".p");
|
|
||||||
}
|
|
||||||
} else if index >= start {
|
|
||||||
hex.push(ch);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !has_p && has_dot {
|
|
||||||
hex.push_str("p0");
|
|
||||||
} else if !has_p && !has_dot {
|
|
||||||
hex.push_str(".p0");
|
|
||||||
}
|
|
||||||
|
|
||||||
hexf_parse::parse_hexf64(hex.as_str(), false).ok()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_remove_trailing_zeros() {
|
|
||||||
assert!(remove_trailing_zeros(String::from("100")) == *"1");
|
|
||||||
assert!(remove_trailing_zeros(String::from("100.00")) == *"100.");
|
|
||||||
|
|
||||||
// leave leading zeros untouched
|
|
||||||
assert!(remove_trailing_zeros(String::from("001")) == *"001");
|
|
||||||
|
|
||||||
// leave strings untouched if they don't end with 0
|
|
||||||
assert!(remove_trailing_zeros(String::from("101")) == *"101");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_remove_trailing_decimal_point() {
|
|
||||||
assert!(remove_trailing_decimal_point(String::from("100.")) == *"100");
|
|
||||||
assert!(remove_trailing_decimal_point(String::from("1.")) == *"1");
|
|
||||||
|
|
||||||
// leave leading decimal points untouched
|
|
||||||
assert!(remove_trailing_decimal_point(String::from(".5")) == *".5");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_maybe_remove_trailing_redundant_chars() {
|
|
||||||
assert!(maybe_remove_trailing_redundant_chars(String::from("100."), true) == *"100.");
|
|
||||||
assert!(maybe_remove_trailing_redundant_chars(String::from("100."), false) == *"100");
|
|
||||||
assert!(maybe_remove_trailing_redundant_chars(String::from("1."), false) == *"1");
|
|
||||||
assert!(maybe_remove_trailing_redundant_chars(String::from("10.0"), false) == *"10");
|
|
||||||
|
|
||||||
// don't truncate integers
|
|
||||||
assert!(maybe_remove_trailing_redundant_chars(String::from("1000"), false) == *"1000");
|
|
||||||
}
|
|
||||||
|
|
|
@ -113,15 +113,14 @@
|
||||||
use std::iter::FusedIterator;
|
use std::iter::FusedIterator;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
use crate::lexer::{lex, lex_starts_at, LexResult};
|
use ruff_python_ast::{Expr, Mod, ModModule, PySourceType, Suite};
|
||||||
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
|
|
||||||
pub use crate::error::{FStringErrorType, ParseError, ParseErrorType};
|
pub use crate::error::{FStringErrorType, ParseError, ParseErrorType};
|
||||||
|
use crate::lexer::{lex, lex_starts_at, LexResult};
|
||||||
pub use crate::parser::Program;
|
pub use crate::parser::Program;
|
||||||
pub use crate::token::{Tok, TokenKind};
|
pub use crate::token::{Tok, TokenKind};
|
||||||
|
|
||||||
use ruff_python_ast::{Expr, Mod, ModModule, PySourceType, Suite};
|
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
|
||||||
|
|
||||||
mod error;
|
mod error;
|
||||||
pub mod lexer;
|
pub mod lexer;
|
||||||
mod parser;
|
mod parser;
|
||||||
|
@ -355,44 +354,6 @@ impl Tokens {
|
||||||
TokenKindIter::new(&self.0)
|
TokenKindIter::new(&self.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over the [`TokenKind`] and its range for all the tokens that are
|
|
||||||
/// within the given `range`.
|
|
||||||
///
|
|
||||||
/// The start and end position of the given range should correspond to the start position of
|
|
||||||
/// the first token and the end position of the last token in the returned iterator.
|
|
||||||
///
|
|
||||||
/// For example, if the struct contains the following tokens:
|
|
||||||
/// ```txt
|
|
||||||
/// (Def, 0..3)
|
|
||||||
/// (Name, 4..7)
|
|
||||||
/// (Lpar, 7..8)
|
|
||||||
/// (Rpar, 8..9)
|
|
||||||
/// (Colon, 9..10)
|
|
||||||
/// (Ellipsis, 11..14)
|
|
||||||
/// (Newline, 14..14)
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// Then, the range `4..10` returns an iterator which yields `Name`, `Lpar`, `Rpar`, and
|
|
||||||
/// `Colon` token. But, if the given position doesn't match any of the tokens, an empty
|
|
||||||
/// iterator is returned.
|
|
||||||
pub fn kinds_within_range<T: Ranged>(&self, ranged: T) -> TokenKindIter {
|
|
||||||
let Ok(start_index) = self.binary_search_by_key(&ranged.start(), |result| match result {
|
|
||||||
Ok((_, range)) => range.start(),
|
|
||||||
Err(error) => error.location().start(),
|
|
||||||
}) else {
|
|
||||||
return TokenKindIter::default();
|
|
||||||
};
|
|
||||||
|
|
||||||
let Ok(end_index) = self.binary_search_by_key(&ranged.end(), |result| match result {
|
|
||||||
Ok((_, range)) => range.end(),
|
|
||||||
Err(error) => error.location().end(),
|
|
||||||
}) else {
|
|
||||||
return TokenKindIter::default();
|
|
||||||
};
|
|
||||||
|
|
||||||
TokenKindIter::new(self.get(start_index..=end_index).unwrap_or(&[]))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Consumes the [`Tokens`], returning the underlying vector of [`LexResult`].
|
/// Consumes the [`Tokens`], returning the underlying vector of [`LexResult`].
|
||||||
pub fn into_inner(self) -> Vec<LexResult> {
|
pub fn into_inner(self) -> Vec<LexResult> {
|
||||||
self.0
|
self.0
|
||||||
|
|
|
@ -107,14 +107,6 @@ impl<'a> Scope<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Like [`Scope::binding_ids`], but returns all bindings that were added to the scope,
|
|
||||||
/// including those that were shadowed by later bindings.
|
|
||||||
pub fn all_binding_ids(&self) -> impl Iterator<Item = BindingId> + '_ {
|
|
||||||
self.bindings.values().copied().flat_map(|id| {
|
|
||||||
std::iter::successors(Some(id), |id| self.shadowed_bindings.get(id).copied())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Like [`Scope::bindings`], but returns all bindings added to the scope, including those that
|
/// Like [`Scope::bindings`], but returns all bindings added to the scope, including those that
|
||||||
/// were shadowed by later bindings.
|
/// were shadowed by later bindings.
|
||||||
pub fn all_bindings(&self) -> impl Iterator<Item = (&str, BindingId)> + '_ {
|
pub fn all_bindings(&self) -> impl Iterator<Item = (&str, BindingId)> + '_ {
|
||||||
|
@ -144,11 +136,6 @@ impl<'a> Scope<'a> {
|
||||||
!self.star_imports.is_empty()
|
!self.star_imports.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over all star imports (e.g., `from sys import *`) in this scope.
|
|
||||||
pub fn star_imports(&self) -> impl Iterator<Item = &StarImport<'a>> {
|
|
||||||
self.star_imports.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the globals pointer for this scope.
|
/// Set the globals pointer for this scope.
|
||||||
pub(crate) fn set_globals_id(&mut self, globals: GlobalsId) {
|
pub(crate) fn set_globals_id(&mut self, globals: GlobalsId) {
|
||||||
self.globals_id = Some(globals);
|
self.globals_id = Some(globals);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue