diff --git a/src/lib.rs b/src/lib.rs index fb910bc953..6faf7ad1ba 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,14 @@ +use std::path::Path; + +use anyhow::Result; +use log::debug; +use rustpython_parser::lexer::LexResult; + +use crate::autofix::fixer::Mode; +use crate::linter::{check_path, tokenize}; +use crate::message::Message; +use crate::settings::Settings; + mod ast; mod autofix; pub mod cache; @@ -13,3 +24,49 @@ pub mod printer; pub mod pyproject; mod python; pub mod settings; + +/// Run ruff over Python source code directly. +pub fn check(path: &Path, contents: &str) -> Result> { + // Find the project root and pyproject.toml. + let project_root = pyproject::find_project_root(&[path.to_path_buf()]); + match &project_root { + Some(path) => debug!("Found project root at: {:?}", path), + None => debug!("Unable to identify project root; assuming current directory..."), + }; + let pyproject = pyproject::find_pyproject_toml(&project_root); + match &pyproject { + Some(path) => debug!("Found pyproject.toml at: {:?}", path), + None => debug!("Unable to find pyproject.toml; using default settings..."), + }; + + let settings = Settings::from_pyproject(pyproject, project_root)?; + + // Tokenize once. + let tokens: Vec = tokenize(contents); + + // Determine the noqa line for every line in the source. + let noqa_line_for = noqa::extract_noqa_line_for(&tokens); + + // Generate checks. + let checks = check_path( + path, + contents, + tokens, + &noqa_line_for, + &settings, + &Mode::None, + )?; + + // Convert to messages. + let messages: Vec = checks + .into_iter() + .map(|check| Message { + kind: check.kind, + fixed: check.fix.map(|fix| fix.applied).unwrap_or_default(), + location: check.location, + filename: path.to_string_lossy().to_string(), + }) + .collect(); + + Ok(messages) +} diff --git a/src/linter.rs b/src/linter.rs index 179b176893..098d1755ed 100644 --- a/src/linter.rs +++ b/src/linter.rs @@ -16,7 +16,7 @@ use crate::settings::Settings; use crate::{cache, fs, noqa}; /// Collect tokens up to and including the first error. -fn tokenize(contents: &str) -> Vec { +pub(crate) fn tokenize(contents: &str) -> Vec { let mut tokens: Vec = vec![]; for tok in lexer::make_tokenizer(contents) { let is_err = tok.is_err(); @@ -28,7 +28,7 @@ fn tokenize(contents: &str) -> Vec { tokens } -fn check_path( +pub(crate) fn check_path( path: &Path, contents: &str, tokens: Vec, diff --git a/src/main.rs b/src/main.rs index 80c3120aab..e3617c3a46 100644 --- a/src/main.rs +++ b/src/main.rs @@ -23,9 +23,9 @@ use ::ruff::logging::set_up_logging; use ::ruff::message::Message; use ::ruff::printer::{Printer, SerializationFormat}; use ::ruff::pyproject::{self, StrCheckCodePair}; +use ::ruff::settings::CurrentSettings; use ::ruff::settings::{FilePattern, PerFileIgnore, Settings}; use ::ruff::tell_user; -use ruff::settings::CurrentSettings; const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME"); const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");