Expose a public 'check' method (#289)

This commit is contained in:
Charlie Marsh 2022-09-30 11:30:37 -04:00 committed by GitHub
parent 1e36c109c6
commit 417764d309
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 60 additions and 3 deletions

View file

@ -1,3 +1,14 @@
use std::path::Path;
use anyhow::Result;
use log::debug;
use rustpython_parser::lexer::LexResult;
use crate::autofix::fixer::Mode;
use crate::linter::{check_path, tokenize};
use crate::message::Message;
use crate::settings::Settings;
mod ast;
mod autofix;
pub mod cache;
@ -13,3 +24,49 @@ pub mod printer;
pub mod pyproject;
mod python;
pub mod settings;
/// Run ruff over Python source code directly.
pub fn check(path: &Path, contents: &str) -> Result<Vec<Message>> {
// Find the project root and pyproject.toml.
let project_root = pyproject::find_project_root(&[path.to_path_buf()]);
match &project_root {
Some(path) => debug!("Found project root at: {:?}", path),
None => debug!("Unable to identify project root; assuming current directory..."),
};
let pyproject = pyproject::find_pyproject_toml(&project_root);
match &pyproject {
Some(path) => debug!("Found pyproject.toml at: {:?}", path),
None => debug!("Unable to find pyproject.toml; using default settings..."),
};
let settings = Settings::from_pyproject(pyproject, project_root)?;
// Tokenize once.
let tokens: Vec<LexResult> = tokenize(contents);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Generate checks.
let checks = check_path(
path,
contents,
tokens,
&noqa_line_for,
&settings,
&Mode::None,
)?;
// Convert to messages.
let messages: Vec<Message> = checks
.into_iter()
.map(|check| Message {
kind: check.kind,
fixed: check.fix.map(|fix| fix.applied).unwrap_or_default(),
location: check.location,
filename: path.to_string_lossy().to_string(),
})
.collect();
Ok(messages)
}

View file

@ -16,7 +16,7 @@ use crate::settings::Settings;
use crate::{cache, fs, noqa};
/// Collect tokens up to and including the first error.
fn tokenize(contents: &str) -> Vec<LexResult> {
pub(crate) fn tokenize(contents: &str) -> Vec<LexResult> {
let mut tokens: Vec<LexResult> = vec![];
for tok in lexer::make_tokenizer(contents) {
let is_err = tok.is_err();
@ -28,7 +28,7 @@ fn tokenize(contents: &str) -> Vec<LexResult> {
tokens
}
fn check_path(
pub(crate) fn check_path(
path: &Path,
contents: &str,
tokens: Vec<LexResult>,

View file

@ -23,9 +23,9 @@ use ::ruff::logging::set_up_logging;
use ::ruff::message::Message;
use ::ruff::printer::{Printer, SerializationFormat};
use ::ruff::pyproject::{self, StrCheckCodePair};
use ::ruff::settings::CurrentSettings;
use ::ruff::settings::{FilePattern, PerFileIgnore, Settings};
use ::ruff::tell_user;
use ruff::settings::CurrentSettings;
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");