mirror of
https://github.com/WhatsApp/erlang-language-platform.git
synced 2025-12-23 12:26:48 +00:00
Compare commits
46 commits
2025-12-11
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a4eccf14d | ||
|
|
4bba415bc1 | ||
|
|
6456f325c3 | ||
|
|
5c7cfae09d | ||
|
|
220acfd4f7 | ||
|
|
fc09ff94fd | ||
|
|
8e82f1cee4 | ||
|
|
8cb6ac7620 | ||
|
|
8b4c533527 | ||
|
|
b12cf72c7e | ||
|
|
ce0125dec9 | ||
|
|
d7b1c561ad | ||
|
|
3804bd1706 | ||
|
|
439ebade1b | ||
|
|
54dd1089ed | ||
|
|
fa05043043 | ||
|
|
79c33480c5 | ||
|
|
7057624c73 | ||
|
|
4a051d1af8 | ||
|
|
91a0d968db | ||
|
|
cc0a9c26d5 | ||
|
|
83d5b4dfd0 | ||
|
|
9015d80659 | ||
|
|
8997372e1a | ||
|
|
bef3dd15f2 | ||
|
|
56e66e2cd5 | ||
|
|
a0e076cb17 | ||
|
|
8ff602f1c3 | ||
|
|
4a83d0edde | ||
|
|
271065da03 | ||
|
|
37b7c5e28e | ||
|
|
d4e14636b4 | ||
|
|
b2ea905e7f | ||
|
|
858b8e64ab | ||
|
|
271a52c37c | ||
|
|
ba4505e981 | ||
|
|
d2da476882 | ||
|
|
166770cc92 | ||
|
|
b525eac0ea | ||
|
|
2bd3d57d02 | ||
|
|
eb5e0abbf8 | ||
|
|
887d287c94 | ||
|
|
553c90d631 | ||
|
|
a16af7e9e2 | ||
|
|
0085fba772 | ||
|
|
babf61214c |
574 changed files with 3637 additions and 5773 deletions
|
|
@ -1,8 +1,8 @@
|
|||
[alias]
|
||||
xtask = "run --package xtask --"
|
||||
|
||||
# @fb-only
|
||||
# @fb-only
|
||||
# @fb-only: [build]
|
||||
# @fb-only: target-dir = "../../../buck-out/elp"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
|
|
|||
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
|
|
@ -97,6 +97,8 @@ jobs:
|
|||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y crossbuild-essential-arm64
|
||||
- name: Install Buck2
|
||||
uses: dtolnay/install-buck2@latest
|
||||
- id: setup-erlang
|
||||
uses: ./.github/actions/setup-erlang
|
||||
with:
|
||||
|
|
@ -135,7 +137,7 @@ jobs:
|
|||
- name: Test elp
|
||||
# Do not run the tests in case of cross-compilation or on Windows
|
||||
if: matrix.platform-arch != 'macos-latest-arm' && matrix.os != 'windows'
|
||||
run: 'cargo test --no-default-features --workspace --target ${{ matrix.target }}'
|
||||
run: 'cargo test --workspace --target ${{ matrix.target }}'
|
||||
- name: Build elp (No Windows)
|
||||
if: matrix.os != 'windows'
|
||||
run: 'cargo build --release --target ${{ matrix.target }} --config target.aarch64-unknown-linux-gnu.linker=\"aarch64-linux-gnu-gcc\"'
|
||||
|
|
|
|||
12
.vscode/tasks.json
vendored
12
.vscode/tasks.json
vendored
|
|
@ -4,7 +4,7 @@
|
|||
{
|
||||
"label": "ELP: build (debug)",
|
||||
"type": "shell",
|
||||
// @fb-only
|
||||
// @fb-only: "command": "./meta/cargo.sh build",
|
||||
"command": "cargo build", // @oss-only
|
||||
"group": {
|
||||
"kind": "build",
|
||||
|
|
@ -19,7 +19,7 @@
|
|||
{
|
||||
"label": "ELP: build (release)",
|
||||
"type": "shell",
|
||||
// @fb-only
|
||||
// @fb-only: "command": "./meta/cargo.sh build --release",
|
||||
"command": "cargo build --release", // @oss-only
|
||||
"group": {
|
||||
"kind": "build",
|
||||
|
|
@ -34,7 +34,7 @@
|
|||
{
|
||||
"label": "ELP: build (release-thin)",
|
||||
"type": "shell",
|
||||
// @fb-only
|
||||
// @fb-only: "command": "./meta/cargo.sh build --profile release-thin --bins",
|
||||
"command": "cargo build --profile release-thin --bins", // @oss-only
|
||||
"group": {
|
||||
"kind": "build",
|
||||
|
|
@ -49,7 +49,7 @@
|
|||
{
|
||||
"label": "ELP: run clippy on workspace",
|
||||
"type": "shell",
|
||||
// @fb-only
|
||||
// @fb-only: "command": "./meta/clippy.sh --workspace --tests",
|
||||
"command": "cargo clippy --workspace --tests", // @oss-only
|
||||
"group": {
|
||||
"kind": "build",
|
||||
|
|
@ -64,7 +64,7 @@
|
|||
{
|
||||
"label": "ELP: run clippy on workspace, apply fixes",
|
||||
"type": "shell",
|
||||
// @fb-only
|
||||
// @fb-only: "command": "./meta/clippy.sh --workspace --tests --fix",
|
||||
"command": "cargo clippy --workspace --tests --fix", // @oss-only
|
||||
"group": {
|
||||
"kind": "build",
|
||||
|
|
@ -79,7 +79,7 @@
|
|||
{
|
||||
"label": "ELP: run tests on workspace",
|
||||
"type": "shell",
|
||||
// @fb-only
|
||||
// @fb-only: "command": "./meta/cargo.sh test --workspace",
|
||||
"command": "cargo test --workspace", // @oss-only
|
||||
"group": {
|
||||
"kind": "build",
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ mod module_index;
|
|||
// Public API
|
||||
|
||||
pub mod fixture;
|
||||
// @fb-only
|
||||
// @fb-only: mod meta_only;
|
||||
pub mod test_utils;
|
||||
pub use change::Change;
|
||||
pub use elp_project_model::AppType;
|
||||
|
|
@ -476,7 +476,7 @@ static ref IGNORED_SOURCES: Vec<Regex> = {
|
|||
let regexes: Vec<Vec<Regex>> = vec![
|
||||
vec![Regex::new(r"^.*_SUITE_data/.+$").unwrap()],
|
||||
//ignore sources goes here
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::ignored_sources_regexes()
|
||||
];
|
||||
regexes.into_iter().flatten().collect::<Vec<Regex>>()
|
||||
};
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@
|
|||
* above-listed licenses.
|
||||
*/
|
||||
|
||||
// @fb-only
|
||||
// @fb-only
|
||||
// @fb-only: /// Types as defined in https://www.internalfb.com/intern/wiki/Linting/adding-linters/#flow-type
|
||||
// @fb-only: /// and https://www.internalfb.com/code/fbsource/[1238f73dac0efd4009443fee6a345a680dc9401b]/whatsapp/server/erl/tools/lint/arcanist.py?lines=17
|
||||
use std::path::Path;
|
||||
|
||||
use serde::Serialize;
|
||||
|
|
|
|||
|
|
@ -72,6 +72,17 @@ pub struct ParseAllElp {
|
|||
/// Report system memory usage and other statistics
|
||||
#[bpaf(long("report-system-stats"))]
|
||||
pub report_system_stats: bool,
|
||||
/// Minimum severity level to report. Valid values: error, warning, weak_warning, information
|
||||
#[bpaf(
|
||||
argument("SEVERITY"),
|
||||
complete(severity_completer),
|
||||
fallback(None),
|
||||
guard(
|
||||
severity_guard,
|
||||
"Please use error, warning, weak_warning, or information"
|
||||
)
|
||||
)]
|
||||
pub severity: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Bpaf)]
|
||||
|
|
@ -144,8 +155,6 @@ pub struct EqwalizeAll {
|
|||
/// Also eqwalize opted-in generated modules from project (deprecated)
|
||||
#[bpaf(hide)]
|
||||
pub include_generated: bool,
|
||||
/// Also eqwalize test modules from project
|
||||
pub include_tests: bool,
|
||||
/// Exit with a non-zero status code if any errors are found
|
||||
pub bail_on_error: bool,
|
||||
/// Print statistics when done
|
||||
|
|
@ -162,8 +171,6 @@ pub struct EqwalizeTarget {
|
|||
/// Also eqwalize opted-in generated modules from application (deprecated)
|
||||
#[bpaf(hide)]
|
||||
pub include_generated: bool,
|
||||
/// Also eqwalize test modules from project
|
||||
pub include_tests: bool,
|
||||
/// Exit with a non-zero status code if any errors are found
|
||||
pub bail_on_error: bool,
|
||||
/// target, like //erl/chatd/...
|
||||
|
|
@ -182,8 +189,6 @@ pub struct EqwalizeApp {
|
|||
/// Also eqwalize opted-in generated modules from project (deprecated)
|
||||
#[bpaf(hide)]
|
||||
pub include_generated: bool,
|
||||
/// Also eqwalize test modules from project
|
||||
pub include_tests: bool,
|
||||
/// Run with rebar
|
||||
pub rebar: bool,
|
||||
/// Exit with a non-zero status code if any errors are found
|
||||
|
|
@ -206,8 +211,6 @@ pub struct EqwalizeStats {
|
|||
/// Also eqwalize opted-in generated modules from project (deprecated)
|
||||
#[bpaf(hide)]
|
||||
pub include_generated: bool,
|
||||
/// Also eqwalize test modules from project
|
||||
pub include_tests: bool,
|
||||
/// If specified, use the provided CLI severity mapping instead of the default one
|
||||
pub use_cli_severity: bool,
|
||||
}
|
||||
|
|
@ -783,6 +786,25 @@ fn format_guard(format: &Option<String>) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
fn severity_completer(_: &Option<String>) -> Vec<(String, Option<String>)> {
|
||||
vec![
|
||||
("error".to_string(), None),
|
||||
("warning".to_string(), None),
|
||||
("weak_warning".to_string(), None),
|
||||
("information".to_string(), None),
|
||||
]
|
||||
}
|
||||
|
||||
fn severity_guard(severity: &Option<String>) -> bool {
|
||||
match severity {
|
||||
None => true,
|
||||
Some(s) if s == "error" || s == "warning" || s == "weak_warning" || s == "information" => {
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn macros_completer(_: &Option<String>) -> Vec<(String, Option<String>)> {
|
||||
vec![
|
||||
("expand".to_string(), None),
|
||||
|
|
@ -889,7 +911,7 @@ impl Lint {
|
|||
|
||||
/// To prevent flaky test results we allow disabling streaming when applying fixes
|
||||
pub fn skip_stream_print(&self) -> bool {
|
||||
self.apply_fix && self.no_stream
|
||||
self.apply_fix || self.no_stream
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -57,6 +57,35 @@ use crate::args::ParseAllElp;
|
|||
use crate::reporting;
|
||||
use crate::reporting::print_memory_usage;
|
||||
|
||||
fn parse_severity(severity: &str) -> Option<diagnostics::Severity> {
|
||||
match severity {
|
||||
"error" => Some(diagnostics::Severity::Error),
|
||||
"warning" => Some(diagnostics::Severity::Warning),
|
||||
"weak_warning" => Some(diagnostics::Severity::WeakWarning),
|
||||
"information" => Some(diagnostics::Severity::Information),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn severity_rank(severity: diagnostics::Severity) -> u8 {
|
||||
match severity {
|
||||
diagnostics::Severity::Error => 1,
|
||||
diagnostics::Severity::Warning => 2,
|
||||
diagnostics::Severity::WeakWarning => 3,
|
||||
diagnostics::Severity::Information => 4,
|
||||
}
|
||||
}
|
||||
|
||||
fn meets_severity_threshold(
|
||||
diag_severity: diagnostics::Severity,
|
||||
min_severity: Option<diagnostics::Severity>,
|
||||
) -> bool {
|
||||
match min_severity {
|
||||
None => true,
|
||||
Some(min) => severity_rank(diag_severity) <= severity_rank(min),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ParseResult {
|
||||
name: String,
|
||||
|
|
@ -147,6 +176,19 @@ pub fn parse_all(
|
|||
let memory_end = MemoryUsage::now();
|
||||
let memory_used = memory_end - memory_start;
|
||||
|
||||
let min_severity = args
|
||||
.severity
|
||||
.as_ref()
|
||||
.and_then(|s| parse_severity(s.as_str()));
|
||||
|
||||
res.retain(|parse_result| {
|
||||
parse_result
|
||||
.diagnostics
|
||||
.diagnostics_for(parse_result.file_id)
|
||||
.iter()
|
||||
.any(|diag| meets_severity_threshold(diag.severity, min_severity))
|
||||
});
|
||||
|
||||
if res.is_empty() {
|
||||
if args.is_format_normal() {
|
||||
writeln!(cli, "No errors reported")?;
|
||||
|
|
@ -165,6 +207,7 @@ pub fn parse_all(
|
|||
for diags in res {
|
||||
let mut combined: Vec<diagnostics::Diagnostic> =
|
||||
diags.diagnostics.diagnostics_for(diags.file_id);
|
||||
combined.retain(|diag| meets_severity_threshold(diag.severity, min_severity));
|
||||
if args.is_format_normal() {
|
||||
writeln!(cli, " {}: {}", diags.name, combined.len())?;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -186,10 +186,7 @@ pub fn do_eqwalize_all(
|
|||
.par_bridge()
|
||||
.progress_with(pb.clone())
|
||||
.map_with(analysis.clone(), |analysis, (name, _source, file_id)| {
|
||||
if analysis
|
||||
.should_eqwalize(file_id, args.include_tests)
|
||||
.unwrap()
|
||||
&& !otp_file_to_ignore(analysis, file_id)
|
||||
if analysis.should_eqwalize(file_id).unwrap() && !otp_file_to_ignore(analysis, file_id)
|
||||
{
|
||||
if args.stats {
|
||||
add_stat(name.to_string());
|
||||
|
|
@ -269,9 +266,7 @@ pub fn do_eqwalize_app(
|
|||
.iter_own()
|
||||
.filter_map(|(_name, _source, file_id)| {
|
||||
if analysis.file_app_name(file_id).ok()? == Some(AppName(args.app.clone()))
|
||||
&& analysis
|
||||
.should_eqwalize(file_id, args.include_tests)
|
||||
.unwrap()
|
||||
&& analysis.should_eqwalize(file_id).unwrap()
|
||||
&& !otp_file_to_ignore(analysis, file_id)
|
||||
{
|
||||
Some(file_id)
|
||||
|
|
@ -339,9 +334,7 @@ pub fn eqwalize_target(
|
|||
let vfs_path = VfsPath::from(src.clone());
|
||||
if let Some((file_id, _)) = loaded.vfs.file_id(&vfs_path) {
|
||||
at_least_one_found = true;
|
||||
if analysis
|
||||
.should_eqwalize(file_id, args.include_tests)
|
||||
.unwrap()
|
||||
if analysis.should_eqwalize(file_id).unwrap()
|
||||
&& !otp_file_to_ignore(analysis, file_id)
|
||||
{
|
||||
file_ids.push(file_id);
|
||||
|
|
@ -408,9 +401,7 @@ pub fn eqwalize_stats(
|
|||
.par_bridge()
|
||||
.progress_with(pb.clone())
|
||||
.map_with(analysis.clone(), |analysis, (name, _source, file_id)| {
|
||||
if analysis
|
||||
.should_eqwalize(file_id, args.include_tests)
|
||||
.expect("cancelled")
|
||||
if analysis.should_eqwalize(file_id).expect("cancelled")
|
||||
&& !otp_file_to_ignore(analysis, file_id)
|
||||
{
|
||||
analysis
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ const REC_ARITY: u32 = 99;
|
|||
const HEADER_ARITY: u32 = 100;
|
||||
const FACTS_FILE: &str = "facts.json";
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: mod meta_only;
|
||||
|
||||
#[derive(Serialize, Debug, Eq, Hash, PartialEq, Clone)]
|
||||
struct GleanFileId(u32);
|
||||
|
|
@ -994,7 +994,7 @@ impl GleanIndexer {
|
|||
.filter(|text| !text.is_empty())
|
||||
});
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: let exdoc_link = elp_ide::meta_only::exdoc_links::module_exdoc_link(&module, &sema);
|
||||
let exdoc_link: Option<String> = None; // @oss-only
|
||||
|
||||
ModuleFact::new(
|
||||
|
|
@ -1532,7 +1532,7 @@ impl GleanIndexer {
|
|||
}) => {
|
||||
let def = macro_def.as_ref()?;
|
||||
let mut resolved = Self::resolve_macro_v2(sema, def, source_file, ctx)?;
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::resolve_macro_expansion(sema, *expansion, ctx, &mut resolved);
|
||||
Some(resolved)
|
||||
}
|
||||
hir::AnyExpr::Pat(Pat::MacroCall { macro_def, .. })
|
||||
|
|
@ -1560,7 +1560,7 @@ impl GleanIndexer {
|
|||
vars: FxHashMap<&Location, &String>,
|
||||
) -> Vec<VarDecl> {
|
||||
let mut result = vec![];
|
||||
if !db.is_eqwalizer_enabled(file_id, false) {
|
||||
if !db.is_eqwalizer_enabled(file_id) {
|
||||
return result;
|
||||
}
|
||||
let module_diagnostics = db.eqwalizer_diagnostics_by_project(project_id, vec![file_id]);
|
||||
|
|
@ -1875,9 +1875,9 @@ impl GleanIndexer {
|
|||
let source_file = sema.parse(file_id);
|
||||
let range = Self::find_range(sema, ctx, &source_file, &expr_source)?;
|
||||
|
||||
// @fb-only
|
||||
// @fb-only
|
||||
// @fb-only
|
||||
// @fb-only: use elp_ide::meta_only::wam_links;
|
||||
// @fb-only: let wam_ctx = wam_links::WamEventCtx::new(sema.db.upcast());
|
||||
// @fb-only: let wam_url = wam_ctx.build_wam_link(name).map(|link| link.url());
|
||||
let wam_url = None; // @oss-only
|
||||
|
||||
Some(XRef {
|
||||
|
|
|
|||
|
|
@ -295,7 +295,7 @@ pub fn do_codemod(
|
|||
let res;
|
||||
let streamed_err_in_diag;
|
||||
let mut any_diagnostics_printed = false;
|
||||
let initial_diags = {
|
||||
let mut initial_diags = {
|
||||
// We put this in its own block so that analysis is
|
||||
// freed before we apply lints. To apply lints
|
||||
// recursively, we need to update the underlying
|
||||
|
|
@ -394,30 +394,54 @@ pub fn do_codemod(
|
|||
let mut err_in_diag = streamed_err_in_diag;
|
||||
// At this point, the analysis variable from above is dropped
|
||||
|
||||
// Print "No diagnostics reported" if no diagnostics were found after filtering
|
||||
if !any_diagnostics_printed {
|
||||
if args.is_format_normal() {
|
||||
writeln!(cli, "No diagnostics reported")?;
|
||||
// When streaming is disabled (--no-stream) and we're not applying fixes,
|
||||
// we need to print diagnostics now since they weren't printed during streaming
|
||||
if args.no_stream && !args.apply_fix && !initial_diags.is_empty() {
|
||||
let analysis = loaded.analysis();
|
||||
let mut module_count = 0;
|
||||
initial_diags.sort_by(|(a, _, _), (b, _, _)| a.cmp(b));
|
||||
for result in &initial_diags {
|
||||
let printed = print_diagnostic_result(
|
||||
cli,
|
||||
&analysis,
|
||||
diagnostics_config,
|
||||
args,
|
||||
loaded,
|
||||
&args.module,
|
||||
&mut err_in_diag,
|
||||
&mut module_count,
|
||||
result,
|
||||
)?;
|
||||
any_diagnostics_printed = any_diagnostics_printed || printed;
|
||||
}
|
||||
} else {
|
||||
if args.apply_fix && diagnostics_config.enabled.all_enabled() {
|
||||
}
|
||||
|
||||
// Handle apply_fix case separately since it needs to filter diagnostics anyway
|
||||
if args.apply_fix {
|
||||
if diagnostics_config.enabled.all_enabled() {
|
||||
bail!(
|
||||
"We cannot apply fixes if all diagnostics enabled. Perhaps provide --diagnostic-filter"
|
||||
);
|
||||
}
|
||||
if args.apply_fix && !diagnostics_config.enabled.all_enabled() {
|
||||
let mut initial_diags = {
|
||||
let analysis = loaded.analysis();
|
||||
filter_diagnostics(
|
||||
&analysis,
|
||||
&args.module,
|
||||
Some(&diagnostics_config.enabled),
|
||||
&initial_diags,
|
||||
&FxHashSet::default(),
|
||||
)?
|
||||
};
|
||||
|
||||
let mut filtered_diags = {
|
||||
let analysis = loaded.analysis();
|
||||
filter_diagnostics(
|
||||
&analysis,
|
||||
&args.module,
|
||||
Some(&diagnostics_config.enabled),
|
||||
&initial_diags,
|
||||
&FxHashSet::default(),
|
||||
)?
|
||||
};
|
||||
|
||||
if filtered_diags.is_empty() {
|
||||
if args.is_format_normal() {
|
||||
writeln!(cli, "No diagnostics reported")?;
|
||||
}
|
||||
} else {
|
||||
if args.skip_stream_print() {
|
||||
initial_diags.sort_by(|(a, _, _), (b, _, _)| a.cmp(b));
|
||||
filtered_diags.sort_by(|(a, _, _), (b, _, _)| a.cmp(b));
|
||||
let module_count: &mut i32 = &mut 0;
|
||||
let has_diagnostics: &mut bool = &mut false;
|
||||
if args.is_format_json() {
|
||||
|
|
@ -428,7 +452,7 @@ pub fn do_codemod(
|
|||
&mut err_in_diag,
|
||||
module_count,
|
||||
has_diagnostics,
|
||||
&initial_diags,
|
||||
&filtered_diags,
|
||||
)?;
|
||||
} else {
|
||||
{
|
||||
|
|
@ -442,7 +466,7 @@ pub fn do_codemod(
|
|||
&mut err_in_diag,
|
||||
module_count,
|
||||
has_diagnostics,
|
||||
&initial_diags,
|
||||
&filtered_diags,
|
||||
)?;
|
||||
// Analysis is dropped here
|
||||
}
|
||||
|
|
@ -456,7 +480,7 @@ pub fn do_codemod(
|
|||
&mut loaded.vfs,
|
||||
args,
|
||||
&mut changed_files,
|
||||
initial_diags,
|
||||
filtered_diags,
|
||||
);
|
||||
// We handle the fix application result here, so
|
||||
// the overall status of whether error-severity
|
||||
|
|
@ -468,8 +492,19 @@ pub fn do_codemod(
|
|||
writeln!(cli, "Apply fix failed: {err:#}").ok();
|
||||
}
|
||||
};
|
||||
|
||||
if err_in_diag {
|
||||
bail!("Errors found")
|
||||
}
|
||||
}
|
||||
if err_in_diag {
|
||||
} else {
|
||||
// Non-apply-fix case: rely on any_diagnostics_printed which is set
|
||||
// correctly based on filtered diagnostics during streaming/batch printing
|
||||
if !any_diagnostics_printed {
|
||||
if args.is_format_normal() {
|
||||
writeln!(cli, "No diagnostics reported")?;
|
||||
}
|
||||
} else if err_in_diag {
|
||||
bail!("Errors found")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ mod erlang_service_cli;
|
|||
mod explain_cli;
|
||||
mod glean;
|
||||
mod lint_cli;
|
||||
// @fb-only
|
||||
// @fb-only: mod meta_only;
|
||||
mod reporting;
|
||||
mod shell;
|
||||
mod ssr_cli;
|
||||
|
|
@ -110,7 +110,7 @@ fn setup_cli_telemetry(args: &Args) {
|
|||
}
|
||||
_ => {
|
||||
// Initialize CLI telemetry, if used
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::initialize_telemetry();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -288,7 +288,7 @@ mod tests {
|
|||
let (_stdout, stderr, code) = elp(args_vec![
|
||||
"parse-all",
|
||||
"--project",
|
||||
"../../test_projects/standard",
|
||||
"../../test/test_projects/standard",
|
||||
"--to",
|
||||
tmp.path(),
|
||||
]);
|
||||
|
|
@ -306,7 +306,7 @@ mod tests {
|
|||
|
||||
fn parse_all_complete(project: &str) -> Result<i32> {
|
||||
// Just check the command returns.
|
||||
let project_path = format!("../../test_projects/{project}");
|
||||
let project_path = format!("../../test/test_projects/{project}");
|
||||
let tmp = Builder::new().prefix("elp_parse_all_").tempdir().unwrap();
|
||||
let (_stdout, _stderr, code) = elp(args_vec![
|
||||
"parse-all",
|
||||
|
|
@ -443,33 +443,34 @@ mod tests {
|
|||
})
|
||||
.unwrap();
|
||||
|
||||
let exp_path = expect_file!(format!(
|
||||
"../resources/test/{}/{}/{}.pretty",
|
||||
project,
|
||||
app,
|
||||
module.as_str(),
|
||||
));
|
||||
let (stdout, _) = cli.to_strings();
|
||||
|
||||
let otp_version = OTP_VERSION.as_ref().expect("MISSING OTP VERSION");
|
||||
let otp_version_regex =
|
||||
regex::bytes::Regex::new(&format!("{}OTPVersionDependent", "@"))
|
||||
.unwrap();
|
||||
regex::bytes::Regex::new(&format!("{}OTP([0-9]+)Only", "@")).unwrap();
|
||||
let contents = analysis.file_text(file_id).unwrap();
|
||||
let otp_version_dependent = otp_version_regex
|
||||
.is_match(&contents.as_bytes()[0..(2001.min(contents.len()))]);
|
||||
let exp_path = {
|
||||
if otp_version_dependent {
|
||||
expect_file!(format!(
|
||||
"../resources/test/{}/{}/{}-OTP-{}.pretty",
|
||||
project,
|
||||
app,
|
||||
module.as_str(),
|
||||
otp_version,
|
||||
))
|
||||
} else {
|
||||
expect_file!(format!(
|
||||
"../resources/test/{}/{}/{}.pretty",
|
||||
project,
|
||||
app,
|
||||
module.as_str(),
|
||||
))
|
||||
let otp_version_capture = otp_version_regex
|
||||
.captures(&contents.as_bytes()[0..(2001.min(contents.len()))]);
|
||||
if let Some((_, [otp_version_only])) =
|
||||
otp_version_capture.map(|cap| cap.extract())
|
||||
{
|
||||
if otp_version_only == otp_version.as_bytes() {
|
||||
assert_normalised_file(
|
||||
exp_path,
|
||||
&stdout,
|
||||
project_path.into(),
|
||||
false,
|
||||
);
|
||||
}
|
||||
};
|
||||
let (stdout, _) = cli.to_strings();
|
||||
assert_normalised_file(exp_path, &stdout, project_path.into(), false);
|
||||
} else {
|
||||
assert_normalised_file(exp_path, &stdout, project_path.into(), false);
|
||||
}
|
||||
}
|
||||
}
|
||||
EqwalizerDiagnostics::NoAst { module } => {
|
||||
|
|
@ -604,10 +605,7 @@ mod tests {
|
|||
fn eqwalize_target_diagnostics_match_snapshot_pretty() {
|
||||
if cfg!(feature = "buck") {
|
||||
simple_snapshot(
|
||||
args_vec![
|
||||
"eqwalize-target",
|
||||
"//whatsapp/elp/test_projects/standard:app_a",
|
||||
],
|
||||
args_vec!["eqwalize-target", "//standard:app_a",],
|
||||
"standard",
|
||||
expect_file!("../resources/test/standard/eqwalize_target_diagnostics.pretty"),
|
||||
true,
|
||||
|
|
@ -671,6 +669,24 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn parse_all_diagnostics_severity(buck: bool) {
|
||||
simple_snapshot_expect_error(
|
||||
args_vec![
|
||||
"parse-elp",
|
||||
"--module",
|
||||
"diagnostics",
|
||||
"--severity",
|
||||
"error"
|
||||
],
|
||||
"diagnostics",
|
||||
expect_file!("../resources/test/diagnostics/parse_all_diagnostics_error.stdout"),
|
||||
buck,
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn parse_elp_file_attribute(buck: bool) {
|
||||
|
|
@ -954,7 +970,9 @@ mod tests {
|
|||
assert!(tmp_file.clone().exists());
|
||||
let content = fs::read_to_string(tmp_file).unwrap();
|
||||
let mut buck_config = BuckConfig::default();
|
||||
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(current_dir().unwrap()));
|
||||
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(
|
||||
current_dir().unwrap().join(path_str.clone()),
|
||||
));
|
||||
let prelude_cell = get_prelude_cell(&buck_config).expect("could not get prelude");
|
||||
let prelude_cell = prelude_cell.strip_prefix("/").unwrap();
|
||||
let content = content.replace(prelude_cell, "/[prelude]/");
|
||||
|
|
@ -966,38 +984,13 @@ mod tests {
|
|||
Some(AbsPathBuf::assert(Utf8PathBuf::from_path_buf(abs).unwrap()));
|
||||
let content = normalise_prelude_path(content, buck_config);
|
||||
|
||||
let content = sort_json(&content);
|
||||
|
||||
expect![[r#"
|
||||
{
|
||||
"apps": [
|
||||
{
|
||||
"name": "test_exec",
|
||||
"dir": "/[prelude]//erlang/common_test/test_exec/src",
|
||||
"src_dirs": [
|
||||
""
|
||||
],
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [],
|
||||
"macros": {}
|
||||
},
|
||||
{
|
||||
"name": "diagnostics_app_a",
|
||||
"dir": "app_a",
|
||||
"src_dirs": [
|
||||
"src"
|
||||
],
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [
|
||||
"include"
|
||||
],
|
||||
"macros": {
|
||||
"COMMON_TEST": "true",
|
||||
"TEST": "true"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "app_a_SUITE",
|
||||
"dir": "app_a/test",
|
||||
"src_dirs": [],
|
||||
"extra_src_dirs": [
|
||||
""
|
||||
],
|
||||
|
|
@ -1005,61 +998,88 @@ mod tests {
|
|||
"macros": {
|
||||
"COMMON_TEST": "true",
|
||||
"TEST": "true"
|
||||
}
|
||||
},
|
||||
"name": "app_a_SUITE",
|
||||
"src_dirs": []
|
||||
},
|
||||
{
|
||||
"name": "common",
|
||||
"dir": "/[prelude]//erlang/common_test/common",
|
||||
"dir": "/[prelude]//erlang/common_test/test_exec/src",
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [],
|
||||
"macros": {},
|
||||
"name": "test_exec",
|
||||
"src_dirs": [
|
||||
"src"
|
||||
],
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "/[prelude]//erlang/common_test/common",
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [
|
||||
"include"
|
||||
],
|
||||
"macros": {}
|
||||
"macros": {},
|
||||
"name": "common",
|
||||
"src_dirs": [
|
||||
"src"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "cth_hooks",
|
||||
"dir": "/[prelude]//erlang/common_test/cth_hooks/src",
|
||||
"src_dirs": [
|
||||
""
|
||||
],
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [
|
||||
""
|
||||
],
|
||||
"macros": {}
|
||||
"macros": {},
|
||||
"name": "cth_hooks",
|
||||
"src_dirs": [
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "buck2_shell_utils",
|
||||
"dir": "/[prelude]//erlang/shell/src",
|
||||
"src_dirs": [
|
||||
""
|
||||
],
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [],
|
||||
"macros": {}
|
||||
"macros": {},
|
||||
"name": "buck2_shell_utils",
|
||||
"src_dirs": [
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "app_a",
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [
|
||||
"include"
|
||||
],
|
||||
"macros": {
|
||||
"COMMON_TEST": "true",
|
||||
"TEST": "true"
|
||||
},
|
||||
"name": "diagnostics_app_a",
|
||||
"src_dirs": [
|
||||
"src"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "test_binary",
|
||||
"dir": "/[prelude]//erlang/common_test/test_binary/src",
|
||||
"src_dirs": [
|
||||
""
|
||||
],
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [],
|
||||
"macros": {}
|
||||
"macros": {},
|
||||
"name": "test_binary",
|
||||
"src_dirs": [
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "test_cli_lib",
|
||||
"dir": "/[prelude]//erlang/common_test/test_cli_lib/src",
|
||||
"src_dirs": [
|
||||
""
|
||||
],
|
||||
"extra_src_dirs": [],
|
||||
"include_dirs": [],
|
||||
"macros": {}
|
||||
"macros": {},
|
||||
"name": "test_cli_lib",
|
||||
"src_dirs": [
|
||||
""
|
||||
]
|
||||
}
|
||||
],
|
||||
"deps": []
|
||||
|
|
@ -1074,6 +1094,12 @@ mod tests {
|
|||
content.replace(prelude_cell, "/[prelude]/")
|
||||
}
|
||||
|
||||
fn sort_json(content: &str) -> String {
|
||||
let mut json: serde_json::Value = serde_json::from_str(content).unwrap();
|
||||
json.sort_all_objects();
|
||||
serde_json::to_string_pretty(&json).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn build_info_json_buck_bxl_generated() {
|
||||
|
|
@ -1087,7 +1113,7 @@ mod tests {
|
|||
"--to",
|
||||
tmp_file.clone(),
|
||||
"--project",
|
||||
path_str
|
||||
path_str.clone()
|
||||
];
|
||||
let (stdout, stderr, code) = elp(args);
|
||||
assert_eq!(
|
||||
|
|
@ -1102,7 +1128,9 @@ mod tests {
|
|||
assert!(tmp_file.clone().exists());
|
||||
let content = fs::read_to_string(tmp_file).unwrap();
|
||||
let mut buck_config = BuckConfig::default();
|
||||
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(current_dir().unwrap()));
|
||||
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(
|
||||
current_dir().unwrap().join(path_str.clone()),
|
||||
));
|
||||
let prelude_cell = get_prelude_cell(&buck_config).expect("could not get prelude");
|
||||
let prelude_cell = prelude_cell.strip_prefix("/").unwrap();
|
||||
let content = content.replace(prelude_cell, "/[prelude]/");
|
||||
|
|
@ -1426,7 +1454,7 @@ mod tests {
|
|||
"lint",
|
||||
"--experimental",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/does_not_exist.toml"
|
||||
"../../test/test_projects/linter/does_not_exist.toml"
|
||||
],
|
||||
"linter",
|
||||
expect_file!("../resources/test/linter/parse_elp_lint_custom_config_invalid_output.stdout"),
|
||||
|
|
@ -1438,7 +1466,7 @@ mod tests {
|
|||
&[],
|
||||
false,
|
||||
Some(expect![[r#"
|
||||
unable to read "../../test_projects/linter/does_not_exist.toml": No such file or directory (os error 2)
|
||||
unable to read "../../test/test_projects/linter/does_not_exist.toml": No such file or directory (os error 2)
|
||||
"#]]),
|
||||
)
|
||||
.expect("bad test");
|
||||
|
|
@ -1454,7 +1482,7 @@ mod tests {
|
|||
"lint",
|
||||
"--experimental",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_test1.toml"
|
||||
"../../test/test_projects/linter/elp_lint_test1.toml"
|
||||
],
|
||||
"linter",
|
||||
expect_file!("../resources/test/linter/parse_elp_lint_custom_config_output.stdout"),
|
||||
|
|
@ -1480,7 +1508,7 @@ mod tests {
|
|||
"lint",
|
||||
"--experimental",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_adhoc.toml",
|
||||
"../../test/test_projects/linter/elp_lint_adhoc.toml",
|
||||
"--module",
|
||||
"app_b",
|
||||
"--apply-fix",
|
||||
|
|
@ -1511,7 +1539,7 @@ mod tests {
|
|||
"--diagnostic-ignore",
|
||||
"W0011",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_test_ignore.toml"
|
||||
"../../test/test_projects/linter/elp_lint_test_ignore.toml"
|
||||
],
|
||||
"linter",
|
||||
expect_file!("../resources/test/linter/parse_elp_lint_ignore.stdout"),
|
||||
|
|
@ -1555,7 +1583,7 @@ mod tests {
|
|||
&[],
|
||||
false,
|
||||
Some(expect![[r#"
|
||||
failed to read "../../test_projects/linter_bad_config/.elp_lint.toml":expected a right bracket, found an identifier at line 6 column 4
|
||||
failed to read "../../test/test_projects/linter_bad_config/.elp_lint.toml":expected a right bracket, found an identifier at line 6 column 4
|
||||
"#]]),
|
||||
)
|
||||
.expect("bad test");
|
||||
|
|
@ -1573,6 +1601,20 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn lint_no_stream_produces_output(buck: bool) {
|
||||
if otp::supports_eep66_sigils() {
|
||||
simple_snapshot_expect_error(
|
||||
args_vec!["lint", "--no-stream"],
|
||||
"diagnostics",
|
||||
expect_file!("../resources/test/diagnostics/lint_no_stream.stdout"),
|
||||
buck,
|
||||
None,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn lint_no_diagnostics_filter_all_enabled_json(buck: bool) {
|
||||
|
|
@ -1607,7 +1649,7 @@ mod tests {
|
|||
args_vec![
|
||||
"lint",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_test2.toml"
|
||||
"../../test/test_projects/linter/elp_lint_test2.toml"
|
||||
],
|
||||
"linter",
|
||||
expect_file!("../resources/test/linter/parse_elp_lint_explicit_enable_output.stdout"),
|
||||
|
|
@ -1910,7 +1952,8 @@ mod tests {
|
|||
simple_snapshot_expect_stderror(
|
||||
args_vec!["lint",],
|
||||
"buck_bad_config",
|
||||
expect_file!("../resources/test/buck_bad_config/bxl_error_message.stdout"),
|
||||
// @fb-only: expect_file!("../resources/test/buck_bad_config/bxl_error_message.stdout"),
|
||||
expect_file!("../resources/test/buck_bad_config/bxl_error_message_oss.stdout"), // @oss-only
|
||||
true,
|
||||
None,
|
||||
true,
|
||||
|
|
@ -1925,7 +1968,7 @@ mod tests {
|
|||
"lint",
|
||||
"--no-stream"
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_warnings_as_errors.toml"
|
||||
"../../test/test_projects/linter/elp_lint_warnings_as_errors.toml"
|
||||
],
|
||||
"linter",
|
||||
expect_file!("../resources/test/linter/warnings_as_errors.stdout"),
|
||||
|
|
@ -1940,7 +1983,7 @@ mod tests {
|
|||
args_vec![
|
||||
"lint",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_custom_function_matches.toml",
|
||||
"../../test/test_projects/linter/elp_lint_custom_function_matches.toml",
|
||||
"--module",
|
||||
"custom_function_matches"
|
||||
],
|
||||
|
|
@ -1957,7 +2000,7 @@ mod tests {
|
|||
args_vec![
|
||||
"lint",
|
||||
"--config-file",
|
||||
"../../test_projects/xref/elp_lint_unavailable_type.toml",
|
||||
"../../test/test_projects/xref/elp_lint_unavailable_type.toml",
|
||||
"--module",
|
||||
"unavailable_type"
|
||||
],
|
||||
|
|
@ -1974,7 +2017,7 @@ mod tests {
|
|||
args_vec![
|
||||
"lint",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_ssr_adhoc.toml",
|
||||
"../../test/test_projects/linter/elp_lint_ssr_adhoc.toml",
|
||||
],
|
||||
"linter",
|
||||
expect_file!("../resources/test/linter/ssr_ad_hoc.stdout"),
|
||||
|
|
@ -1989,7 +2032,7 @@ mod tests {
|
|||
args_vec![
|
||||
"lint",
|
||||
"--config-file",
|
||||
"../../test_projects/linter/elp_lint_ssr_adhoc_parse_fail.toml",
|
||||
"../../test/test_projects/linter/elp_lint_ssr_adhoc_parse_fail.toml",
|
||||
],
|
||||
"linter",
|
||||
expect_file!("../resources/test/linter/ssr_ad_hoc_parse_fail.stdout"),
|
||||
|
|
@ -2173,6 +2216,36 @@ mod tests {
|
|||
)
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn ssr_exclude_generated_by_default(buck: bool) {
|
||||
simple_snapshot(
|
||||
args_vec!["ssr", "--module", "erlang_diagnostics_errors_gen", "ok"],
|
||||
"diagnostics",
|
||||
expect_file!("../resources/test/diagnostics/ssr_exclude_generated.stdout"),
|
||||
buck,
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn ssr_include_generated_when_requested(buck: bool) {
|
||||
simple_snapshot(
|
||||
args_vec![
|
||||
"ssr",
|
||||
"--module",
|
||||
"erlang_diagnostics_errors_gen",
|
||||
"--include-generated",
|
||||
"ok"
|
||||
],
|
||||
"diagnostics",
|
||||
expect_file!("../resources/test/diagnostics/ssr_include_generated.stdout"),
|
||||
buck,
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
// We cannot use `should_panic` for this test, since the OSS CI runs with the `buck` feature disabled.
|
||||
|
|
@ -2190,6 +2263,18 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn lint_linter_config_basic(buck: bool) {
|
||||
simple_snapshot_sorted(
|
||||
args_vec!["lint", "--read-config", "--no-stream"],
|
||||
"linter_config",
|
||||
expect_file!("../resources/test/linter_config/basic.stdout"),
|
||||
buck,
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
#[test_case(false ; "rebar")]
|
||||
#[test_case(true ; "buck")]
|
||||
fn eqwalizer_tests_check(buck: bool) {
|
||||
|
|
@ -3051,7 +3136,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn project_path(project: &str) -> String {
|
||||
format!("../../test_projects/{project}")
|
||||
format!("../../test/test_projects/{project}")
|
||||
}
|
||||
|
||||
fn strip_ansi_codes(s: &str) -> String {
|
||||
|
|
|
|||
|
|
@ -227,9 +227,6 @@ impl Reporter for JsonReporter<'_> {
|
|||
diagnostics: &[EqwalizerDiagnostic],
|
||||
) -> Result<()> {
|
||||
let line_index = self.analysis.line_index(file_id)?;
|
||||
// Pass include_Tests = false so that errors for tests files that are not opted-in are tagged as
|
||||
// arc_types::Severity::Disabled and don't break CI.
|
||||
let eqwalizer_enabled = self.analysis.is_eqwalizer_enabled(file_id, false).unwrap();
|
||||
let file_path = &self.loaded.vfs.file_path(file_id);
|
||||
let root_path = &self
|
||||
.analysis
|
||||
|
|
@ -238,12 +235,8 @@ impl Reporter for JsonReporter<'_> {
|
|||
.root_dir;
|
||||
let relative_path = get_relative_path(root_path, file_path);
|
||||
for diagnostic in diagnostics {
|
||||
let diagnostic = convert::eqwalizer_to_arc_diagnostic(
|
||||
diagnostic,
|
||||
&line_index,
|
||||
relative_path,
|
||||
eqwalizer_enabled,
|
||||
);
|
||||
let diagnostic =
|
||||
convert::eqwalizer_to_arc_diagnostic(diagnostic, &line_index, relative_path);
|
||||
let diagnostic = serde_json::to_string(&diagnostic)?;
|
||||
writeln!(self.cli, "{diagnostic}")?;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -157,10 +157,9 @@ impl ShellCommand {
|
|||
}
|
||||
"eqwalize-app" => {
|
||||
let include_generated = options.contains(&"--include-generated");
|
||||
let include_tests = options.contains(&"--include-tests");
|
||||
if let Some(other) = options
|
||||
.into_iter()
|
||||
.find(|&opt| opt != "--include-generated" && opt != "--include-tests")
|
||||
.find(|&opt| opt != "--include-generated")
|
||||
{
|
||||
return Err(ShellError::UnexpectedOption(
|
||||
"eqwalize-app".into(),
|
||||
|
|
@ -177,7 +176,6 @@ impl ShellCommand {
|
|||
rebar,
|
||||
app: app.into(),
|
||||
include_generated,
|
||||
include_tests,
|
||||
bail_on_error: false,
|
||||
})));
|
||||
}
|
||||
|
|
@ -185,10 +183,9 @@ impl ShellCommand {
|
|||
}
|
||||
"eqwalize-all" => {
|
||||
let include_generated = options.contains(&"--include-generated");
|
||||
let include_tests = options.contains(&"--include-tests");
|
||||
if let Some(other) = options
|
||||
.into_iter()
|
||||
.find(|&opt| opt != "--include-generated" && opt != "--include-tests")
|
||||
.find(|&opt| opt != "--include-generated")
|
||||
{
|
||||
return Err(ShellError::UnexpectedOption(
|
||||
"eqwalize-all".into(),
|
||||
|
|
@ -204,7 +201,6 @@ impl ShellCommand {
|
|||
rebar,
|
||||
format: None,
|
||||
include_generated,
|
||||
include_tests,
|
||||
bail_on_error: false,
|
||||
stats: false,
|
||||
list_modules: false,
|
||||
|
|
@ -226,10 +222,8 @@ COMMANDS:
|
|||
eqwalize <modules> Eqwalize specified modules
|
||||
--clause-coverage Use experimental clause coverage checker
|
||||
eqwalize-all Eqwalize all modules in the current project
|
||||
--include-tests Also eqwalize test modules from project
|
||||
--clause-coverage Use experimental clause coverage checker
|
||||
eqwalize-app <app> Eqwalize all modules in specified application
|
||||
--include-tests Also eqwalize test modules from project
|
||||
--clause-coverage Use experimental clause coverage checker
|
||||
";
|
||||
|
||||
|
|
|
|||
|
|
@ -401,6 +401,9 @@ fn do_parse_one(
|
|||
name: &str,
|
||||
args: &Ssr,
|
||||
) -> Result<Option<(String, FileId, Vec<diagnostics::Diagnostic>)>> {
|
||||
if !args.include_generated && db.is_generated(file_id)? {
|
||||
return Ok(None);
|
||||
}
|
||||
if !args.include_tests && db.is_test_suite_or_test_helper(file_id)?.unwrap_or(false) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ use serde::de::DeserializeOwned;
|
|||
use serde_json::json;
|
||||
|
||||
use crate::from_json;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::meta_only;
|
||||
|
||||
// Defines the server-side configuration of ELP. We generate *parts*
|
||||
// of VS Code's `package.json` config from this.
|
||||
|
|
@ -180,7 +180,7 @@ impl Config {
|
|||
return;
|
||||
}
|
||||
self.data = ConfigData::from_json(json);
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::harmonise_gks(self);
|
||||
}
|
||||
|
||||
pub fn update_gks(&mut self, json: serde_json::Value) {
|
||||
|
|
|
|||
|
|
@ -126,18 +126,11 @@ pub fn eqwalizer_to_arc_diagnostic(
|
|||
d: &EqwalizerDiagnostic,
|
||||
line_index: &LineIndex,
|
||||
relative_path: &Path,
|
||||
eqwalizer_enabled: bool,
|
||||
) -> arc_types::Diagnostic {
|
||||
let pos = position(line_index, d.range.start());
|
||||
let line_num = pos.line + 1;
|
||||
let character = Some(pos.character + 1);
|
||||
let severity = if eqwalizer_enabled {
|
||||
arc_types::Severity::Error
|
||||
} else {
|
||||
// We use Severity::Disabled so that diagnostics are reported in cont lint
|
||||
// but not in CI.
|
||||
arc_types::Severity::Disabled
|
||||
};
|
||||
let severity = arc_types::Severity::Error;
|
||||
// formatting: https://fburl.com/max_wiki_link_to_phabricator_rich_text
|
||||
let explanation = match &d.explanation {
|
||||
Some(s) => format!("```\n{s}\n```"),
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ pub mod line_endings;
|
|||
pub mod lsp_ext;
|
||||
mod mem_docs;
|
||||
pub mod memory_usage;
|
||||
// @fb-only
|
||||
// @fb-only: mod meta_only;
|
||||
mod op_queue;
|
||||
mod project_loader;
|
||||
pub mod reload;
|
||||
|
|
@ -108,7 +108,7 @@ pub fn otp_file_to_ignore(db: &Analysis, file_id: FileId) -> bool {
|
|||
"redbug_dtop",
|
||||
]
|
||||
.iter()
|
||||
// @fb-only
|
||||
// @fb-only: .chain(meta_only::FILES_TO_IGNORE.iter())
|
||||
.map(SmolStr::new)
|
||||
.collect();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
Project Initialisation Failed: invalid or missing buck 2 configuration
|
||||
138
crates/elp/src/resources/test/diagnostics/lint_no_stream.stdout
Normal file
138
crates/elp/src/resources/test/diagnostics/lint_no_stream.stdout
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
Reporting all diagnostics codes
|
||||
Diagnostics reported:
|
||||
app_a/src/app_a.erl:52:3-52:23::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/app_a.erl:3:10-3:21::[WeakWarning] [W0037] Unspecific include.
|
||||
app_a/src/app_a.erl:27:3-27:9::[Warning] [W0017] Function 'foo:ok/0' is undefined.
|
||||
app_a/src/app_a.erl:28:4-28:11::[Warning] [W0017] Function 'mod:foo/0' is undefined.
|
||||
app_a/src/app_a.erl:72:4-72:11::[Warning] [W0017] Function 'foo:bar/2' is undefined.
|
||||
app_a/src/app_a.erl:37:11-37:28::[Warning] [W0017] Function 'mod_name:fun_name/2' is undefined.
|
||||
app_a/src/app_a.erl:58:11-58:24::[WeakWarning] [W0051] Binary string can be written using sigil syntax.
|
||||
app_a/src/app_a.erl:4:1-4:41::[Warning] [W0020] Unused file: inets/include/httpd.hrl
|
||||
app_a/src/app_a.erl:39:7-39:28::[Error] [L1267] variable 'A' shadowed in 'named fun'
|
||||
app_a/src/app_a.erl:55:32-55:35::[Error] [L1295] type uri/0 undefined
|
||||
app_a/src/app_a.erl:56:20-56:26::[Error] [L1295] type binary/1 undefined
|
||||
app_a/src/app_a.erl:72:3-72:34::[Error] [L1252] record record undefined
|
||||
app_a/src/app_a.erl:75:5-75:16::[Error] [L1252] record record undefined
|
||||
app_a/src/app_a.erl:35:1-35:2::[Warning] [L1230] function g/1 is unused
|
||||
app_a/src/app_a.erl:35:3-35:4::[Warning] [L1268] variable 'A' is unused
|
||||
app_a/src/app_a.erl:36:3-36:4::[Warning] [L1268] variable 'F' is unused
|
||||
app_a/src/app_a.erl:37:3-37:4::[Warning] [L1268] variable 'G' is unused
|
||||
app_a/src/app_a.erl:38:3-38:4::[Warning] [L1268] variable 'H' is unused
|
||||
app_a/src/app_a.erl:39:3-39:4::[Warning] [L1268] variable 'I' is unused
|
||||
app_a/src/app_a.erl:39:7-39:28::[Warning] [L1268] variable 'A' is unused
|
||||
app_a/src/app_a.erl:41:1-41:2::[Warning] [L1230] function h/0 is unused
|
||||
app_a/src/app_a.erl:45:1-45:2::[Warning] [L1230] function i/0 is unused
|
||||
app_a/src/app_a.erl:50:1-50:2::[Warning] [L1230] function j/2 is unused
|
||||
app_a/src/app_a.erl:50:15-50:16::[Warning] [L1268] variable 'A' is unused
|
||||
app_a/src/app_a.erl:50:23-50:24::[Warning] [L1268] variable 'B' is unused
|
||||
app_a/src/app_a.erl:55:1-55:46::[Warning] [L1296] type session(_) is unused
|
||||
app_a/src/app_a.erl:55:1-55:46::[Warning] [L1313] opaque type session(_) is not exported
|
||||
app_a/src/app_a.erl:56:7-56:13::[Warning] [L1296] type source(_) is unused
|
||||
app_a/src/app_a.erl:58:1-58:4::[Warning] [L1230] function map/2 is unused
|
||||
app_a/src/app_a.erl:60:1-60:9::[Warning] [L1230] function with_dot/0 is unused
|
||||
app_a/src/app_a.erl:62:1-62:9::[Warning] [L1230] function lang_dir/1 is unused
|
||||
app_a/src/app_a.erl:66:1-66:7::[Warning] [L1230] function escape/1 is unused
|
||||
app_a/src/app_a.erl:66:13-66:17::[Warning] [L1268] variable 'T' is unused
|
||||
app_a/src/app_a.erl:67:9-67:25::[Warning] [L1260] record all_configs_file is unused
|
||||
app_a/src/app_a.erl:71:1-71:2::[Warning] [L1230] function k/0 is unused
|
||||
app_a/src/app_a.erl:74:1-74:2::[Warning] [L1230] function l/1 is unused
|
||||
app_a/src/app_a.erl:77:1-77:2::[Warning] [L1230] function m/0 is unused
|
||||
app_a/src/broken_parse_trans.erl:10:21-10:22::[Error] [L1256] field b undefined in record a
|
||||
app_a/src/broken_parse_trans.erl:10:32-10:33::[Error] [L1262] variable 'B' is unbound
|
||||
app_a/src/cascading.erl:9:5-9:6::[Error] [W0004] Missing ')'
|
||||
3:10-3:15: function foo/0 undefined
|
||||
6:10-6:15: function foo/0 undefined
|
||||
8:7-8:10: spec for undefined function foo/0
|
||||
app_a/src/diagnostics.erl:3:10-3:27::[WeakWarning] [W0037] Unspecific include.
|
||||
app_a/src/diagnostics.erl:4:10-4:34::[WeakWarning] [W0037] Unspecific include.
|
||||
app_a/src/diagnostics.erl:12:8-12:12::[Warning] [W0060] Match on a bound variable
|
||||
app_a/src/diagnostics.erl:4:1-4:36::[Error] [L0000] Issue in included file
|
||||
[app_a/include/broken_diagnostics.hrl] 1:8-1:15: P1702: bad attribute
|
||||
[app_a/include/broken_diagnostics.hrl] 3:6-3:15: P1702: bad attribute
|
||||
app_a/src/diagnostics.erl:6:31-6:45::[Error] [L1295] type undefined_type/0 undefined
|
||||
app_a/src/diagnostics.erl:7:1-7:5::[Warning] [L1230] function main/1 is unused
|
||||
app_a/src/diagnostics.erl:10:1-10:4::[Warning] [L1230] function foo/0 is unused
|
||||
app_a/src/lint_recursive.erl:23:5-23:14::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/lint_recursive.erl:6:5-6:7::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/lint_recursive.erl:14:5-14:12::[Warning] [L1268] variable 'Config1' is unused
|
||||
app_a/src/lint_recursive.erl:19:5-19:12::[Warning] [L1268] variable 'Config1' is unused
|
||||
app_a/src/lints.erl:5:1-5:14::[Error] [P1700] head mismatch 'head_mismatcX' vs 'head_mismatch'
|
||||
4:1-4:14: Mismatched clause name
|
||||
app_a/src/lints.erl:4:22-4:23::[Warning] [W0018] Unexpected ';'
|
||||
app_a/src/lints.erl:2:10-2:25::[Error] [L1227] function head_mismatch/1 undefined
|
||||
app_a/src/otp27_docstrings.erl:34:9-34:24::[Warning] [W0002] Unused macro (THIS_IS_THE_END)
|
||||
app_a/src/otp27_docstrings.erl:24:5-24:6::[Warning] [W0060] Match on a bound variable
|
||||
app_a/src/otp27_docstrings.erl:30:5-30:6::[Warning] [W0060] Match on a bound variable
|
||||
app_a/src/otp27_sigils.erl:11:6-11:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:12:5-12:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:13:5-13:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:14:5-14:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:15:5-15:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:17:6-17:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:18:5-18:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:19:5-19:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:20:5-20:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:21:5-21:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:23:6-23:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:24:5-24:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:25:5-25:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:26:5-26:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:27:5-27:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:29:6-29:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:30:5-30:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:31:5-31:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:32:5-32:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:33:5-33:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:35:6-35:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:36:5-36:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:37:5-37:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:38:5-38:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:39:5-39:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:41:6-41:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:42:5-42:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:43:5-43:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:44:5-44:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:45:5-45:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:47:6-47:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:48:5-48:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:49:5-49:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:50:5-50:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:51:5-51:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:53:6-53:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:53:6-53:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:54:5-54:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:54:5-54:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:55:5-55:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:55:5-55:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:56:5-56:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:57:5-57:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:59:6-59:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:60:5-60:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:61:5-61:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:62:5-62:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:63:5-63:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:65:6-65:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:66:5-66:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:67:5-67:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:68:5-68:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:69:5-69:24::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:76:5-79:8::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:76:5-79:8::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:80:5-84:9::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:80:5-84:9::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:85:5-89:10::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:85:5-89:10::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:90:5-94:11::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:95:5-99:12::[Warning] [W0006] this statement has no effect
|
||||
app_a/src/otp27_sigils.erl:102:5-102:24::[WeakWarning] [W0051] Binary string can be written using sigil syntax.
|
||||
app_a/src/otp27_sigils.erl:128:9-128:24::[Warning] [W0002] Unused macro (THIS_IS_THE_END)
|
||||
app_a/src/otp27_sigils.erl:112:4-112:5::[Error] [P1711] syntax error before: X
|
||||
4:15-4:18: function g/0 undefined
|
||||
74:7-74:8: spec for undefined function g/0
|
||||
app_a/src/otp27_sigils.erl:71:5-71:6::[Warning] [L1268] variable 'X' is unused
|
||||
app_a/src/otp_7655.erl:5:1-5:28::[Error] [L1201] no module definition
|
||||
app_a/src/parse_error_a_cascade.erl:10:20-11:1::[Error] [W0004] Missing 'atom'
|
||||
6:6-6:11: function bar/0 undefined
|
||||
app_a/src/suppressed.erl:8:5-8:9::[Warning] [L1268] variable 'Life' is unused
|
||||
app_a/src/syntax.erl:5:46-5:47::[Error] [P1711] syntax error before: ')'
|
||||
app_a/src/syntax.erl:11:9-11:10::[Error] [W0004] Missing ')'
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
module specified: diagnostics
|
||||
Diagnostics reported in 1 modules:
|
||||
diagnostics: 6
|
||||
diagnostics: 7
|
||||
2:9-2:26::[Hint] [W0037] Unspecific include.
|
||||
3:0-3:35::[Error] [L0000] Issue in included file
|
||||
3:9-3:33::[Hint] [W0037] Unspecific include.
|
||||
5:30-5:44::[Error] [L1295] type undefined_type/0 undefined
|
||||
6:0-6:4::[Warning] [L1230] function main/1 is unused
|
||||
9:0-9:3::[Warning] [L1230] function foo/0 is unused
|
||||
11:7-11:11::[Warning] [W0060] Match on a bound variable
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
module specified: diagnostics
|
||||
Diagnostics reported in 1 modules:
|
||||
diagnostics: 2
|
||||
3:0-3:35::[Error] [L0000] Issue in included file
|
||||
5:30-5:44::[Error] [L1295] type undefined_type/0 undefined
|
||||
|
|
@ -4,3 +4,4 @@
|
|||
{"path":"app_a/src/diagnostics.erl","line":6,"char":31,"code":"ELP","severity":"error","name":"L1295 (L1295)","original":null,"replacement":null,"description":"type undefined_type/0 undefined\n\nFor more information see: /erlang-error-index/l/L1295","docPath":null}
|
||||
{"path":"app_a/src/diagnostics.erl","line":7,"char":1,"code":"ELP","severity":"warning","name":"L1230 (L1230)","original":null,"replacement":null,"description":"function main/1 is unused\n\nFor more information see: /erlang-error-index/l/L1230","docPath":null}
|
||||
{"path":"app_a/src/diagnostics.erl","line":10,"char":1,"code":"ELP","severity":"warning","name":"L1230 (L1230)","original":null,"replacement":null,"description":"function foo/0 is unused\n\nFor more information see: /erlang-error-index/l/L1230","docPath":null}
|
||||
{"path":"app_a/src/diagnostics.erl","line":12,"char":8,"code":"ELP","severity":"warning","name":"W0060 (bound_var_in_lhs)","original":null,"replacement":null,"description":"Match on a bound variable\n\nFor more information see: /erlang-error-index/w/W0060","docPath":"website/docs/erlang-error-index/w/W0060.md"}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
module specified: otp27_docstrings
|
||||
Diagnostics reported in 1 modules:
|
||||
otp27_docstrings: 1
|
||||
otp27_docstrings: 3
|
||||
23:4-23:5::[Warning] [W0060] Match on a bound variable
|
||||
29:4-29:5::[Warning] [W0060] Match on a bound variable
|
||||
33:8-33:23::[Warning] [W0002] Unused macro (THIS_IS_THE_END)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
module specified: erlang_diagnostics_errors_gen
|
||||
No matches found
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
module specified: erlang_diagnostics_errors_gen
|
||||
erlang_diagnostics_errors_gen: 1
|
||||
6:5-6:7::[WeakWarning] [ad-hoc: ssr-match] SSR pattern matched: ssr: ok.
|
||||
|
||||
Matches found in 1 modules
|
||||
|
|
@ -1,11 +1,10 @@
|
|||
Usage: [--project PROJECT] [--as PROFILE] [[--format FORMAT]] [--rebar] [--include-tests] [--bail-on-error] [--stats] [--list-modules]
|
||||
Usage: [--project PROJECT] [--as PROFILE] [[--format FORMAT]] [--rebar] [--bail-on-error] [--stats] [--list-modules]
|
||||
|
||||
Available options:
|
||||
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
|
||||
--as <PROFILE> Rebar3 profile to pickup (default is test)
|
||||
--format <FORMAT> Show diagnostics in JSON format
|
||||
--rebar Run with rebar
|
||||
--include-tests Also eqwalize test modules from project
|
||||
--bail-on-error Exit with a non-zero status code if any errors are found
|
||||
--stats Print statistics when done
|
||||
--list-modules When printing statistics, include the list of modules parsed
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
Usage: [--project PROJECT] [--as PROFILE] [--include-tests] [--rebar] [--bail-on-error] <APP>
|
||||
Usage: [--project PROJECT] [--as PROFILE] [--rebar] [--bail-on-error] <APP>
|
||||
|
||||
Available positional items:
|
||||
<APP> app name
|
||||
|
|
@ -6,7 +6,6 @@ Available positional items:
|
|||
Available options:
|
||||
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
|
||||
--as <PROFILE> Rebar3 profile to pickup (default is test)
|
||||
--include-tests Also eqwalize test modules from project
|
||||
--rebar Run with rebar
|
||||
--bail-on-error Exit with a non-zero status code if any errors are found
|
||||
-h, --help Prints help information
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
Usage: [--project PROJECT] [--include-tests] [--bail-on-error] <TARGET>
|
||||
Usage: [--project PROJECT] [--bail-on-error] <TARGET>
|
||||
|
||||
Available positional items:
|
||||
<TARGET> target, like //erl/chatd/...
|
||||
|
||||
Available options:
|
||||
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
|
||||
--include-tests Also eqwalize test modules from project
|
||||
--bail-on-error Exit with a non-zero status code if any errors are found
|
||||
-h, --help Prints help information
|
||||
|
|
|
|||
|
|
@ -1,14 +0,0 @@
|
|||
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
|
||||
┌─ check/src/callbacks3_neg.erl:12:1
|
||||
│
|
||||
12 │ -behavior(gen_server).
|
||||
│ ^^^^^^^^^^^^^^^^^^^^^
|
||||
│ │
|
||||
│ Incorrect return type for implementation of gen_server:handle_cast/2. Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}, Got: 'wrong_ret'.
|
||||
│
|
||||
|
||||
'wrong_ret' is not compatible with {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
|
||||
because
|
||||
'wrong_ret' is not compatible with {'noreply', term()}
|
||||
|
||||
1 ERROR
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
|
||||
┌─ check/src/callbacks3_neg.erl:12:1
|
||||
│
|
||||
12 │ -behavior(gen_server).
|
||||
│ ^^^^^^^^^^^^^^^^^^^^^
|
||||
│ │
|
||||
│ Incorrect return type for implementation of gen_server:handle_cast/2. Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}, Got: 'wrong_ret'.
|
||||
│
|
||||
|
||||
'wrong_ret' is not compatible with {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
|
||||
because
|
||||
'wrong_ret' is not compatible with {'noreply', term()}
|
||||
|
||||
1 ERROR
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
|
||||
┌─ check/src/callbacks3_neg.erl:12:1
|
||||
│
|
||||
12 │ -behavior(gen_server).
|
||||
│ ^^^^^^^^^^^^^^^^^^^^^
|
||||
│ │
|
||||
│ Incorrect return type for implementation of gen_server:handle_cast/2. Expected: {'noreply', term()} | {'noreply', term(), gen_server:action()} | {'stop', term(), term()}, Got: 'wrong_ret'.
|
||||
│
|
||||
|
||||
'wrong_ret' is not compatible with {'noreply', term()} | {'noreply', term(), gen_server:action()} | {'stop', term(), term()}
|
||||
because
|
||||
'wrong_ret' is not compatible with {'noreply', term()}
|
||||
|
||||
1 ERROR
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
|
||||
┌─ check/src/callbacks3_neg.erl:13:1
|
||||
│
|
||||
13 │ -behavior(gen_server).
|
||||
│ ^^^^^^^^^^^^^^^^^^^^^ Incorrect return type for implementation of gen_server:handle_cast/2.
|
||||
Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
|
||||
Got: 'wrong_ret'
|
||||
|
||||
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
|
||||
┌─ check/src/callbacks3_neg.erl:13:1
|
||||
│
|
||||
13 │ -behavior(gen_server).
|
||||
│ ^^^^^^^^^^^^^^^^^^^^^
|
||||
│ │
|
||||
│ Incorrect return type for implementation of gen_server:handle_info/2.
|
||||
Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
|
||||
Got: {'noreply', 'ok', 'wrong_atom'}
|
||||
│
|
||||
|
||||
Because in the expression's type:
|
||||
{ 'noreply', 'ok',
|
||||
Here the type is: 'wrong_atom'
|
||||
Context expects type: 'infinity' | number() | 'hibernate' | {'continue', term()}
|
||||
No candidate matches in the expected union.
|
||||
}
|
||||
|
||||
2 ERRORS
|
||||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -70,4 +70,20 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
|
|||
Expression has type: #{a := dynamic(), dynamic() => dynamic()}
|
||||
Context expected type: 'err'
|
||||
|
||||
5 ERRORS
|
||||
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
|
||||
┌─ eqwater/src/eqwater_maps.erl:70:29
|
||||
│
|
||||
70 │ (_, #{a := V}) -> is_ok(V)
|
||||
│ ^
|
||||
│ │
|
||||
│ V.
|
||||
Expression has type: 'ok' | 'err'
|
||||
Context expected type: 'ok'
|
||||
│
|
||||
|
||||
Because in the expression's type:
|
||||
Here the type is a union type with some valid candidates: 'ok'
|
||||
However the following candidate: 'err'
|
||||
Differs from the expected type: 'ok'
|
||||
|
||||
6 ERRORS
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
Diagnostics reported:
|
||||
app_a/src/app_a_unused_param.erl:5:5-5:6::[Warning] [W0010] this variable is unused
|
||||
app_a/src/app_a.erl:9:6-9:7::[Warning] [W0010] this variable is unused
|
||||
app_a/src/app_a_unused_param.erl:5:5-5:6::[Warning] [W0010] this variable is unused
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@
|
|||
{"path":"app_a/src/app_a.erl","line":5,"char":5,"code":"ELP","severity":"warning","name":"W0011 (application_get_env)","original":null,"replacement":null,"description":"module `app_a` belongs to app `app_a`, but reads env for `misc`\n\nFor more information see: /erlang-error-index/w/W0011","docPath":"website/docs/erlang-error-index/w/W0011.md"}
|
||||
{"path":"app_a/src/app_a.erl","line":8,"char":7,"code":"ELP","severity":"warning","name":"W0018 (unexpected_semi_or_dot)","original":null,"replacement":null,"description":"Unexpected ';'\n\nFor more information see: /erlang-error-index/w/W0018","docPath":"website/docs/erlang-error-index/w/W0018.md"}
|
||||
{"path":"app_a/src/app_a.erl","line":9,"char":1,"code":"ELP","severity":"error","name":"P1700 (head_mismatch)","original":null,"replacement":null,"description":"head mismatch 'fooX' vs 'food'\n\nFor more information see: /erlang-error-index/p/P1700","docPath":null}
|
||||
{"path":"app_a/src/app_a_ssr.erl","line":7,"char":6,"code":"ELP","severity":"warning","name":"W0060 (bound_var_in_lhs)","original":null,"replacement":null,"description":"Match on a bound variable\n\nFor more information see: /erlang-error-index/w/W0060","docPath":"website/docs/erlang-error-index/w/W0060.md"}
|
||||
{"path":"app_a/src/app_a_ssr.erl","line":8,"char":6,"code":"ELP","severity":"warning","name":"W0060 (bound_var_in_lhs)","original":null,"replacement":null,"description":"Match on a bound variable\n\nFor more information see: /erlang-error-index/w/W0060","docPath":"website/docs/erlang-error-index/w/W0060.md"}
|
||||
{"path":"app_a/src/app_a_unused_param.erl","line":5,"char":5,"code":"ELP","severity":"warning","name":"L1268 (L1268)","original":null,"replacement":null,"description":"variable 'X' is unused\n\nFor more information see: /erlang-error-index/l/L1268","docPath":null}
|
||||
{"path":"app_a/src/custom_function_matches.erl","line":13,"char":5,"code":"ELP","severity":"warning","name":"W0017 (undefined_function)","original":null,"replacement":null,"description":"Function 'excluded:function/0' is undefined.\n\nFor more information see: /erlang-error-index/w/W0017","docPath":"website/docs/erlang-error-index/w/W0017.md"}
|
||||
{"path":"app_a/src/custom_function_matches.erl","line":14,"char":5,"code":"ELP","severity":"warning","name":"W0017 (undefined_function)","original":null,"replacement":null,"description":"Function 'not_excluded:function/0' is undefined.\n\nFor more information see: /erlang-error-index/w/W0017","docPath":"website/docs/erlang-error-index/w/W0017.md"}
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ app_a/src/app_a.erl:20:1-20:4::[Warning] [L1230] function bat/2 is unused
|
|||
app_a/src/app_a.erl:5:5-5:35::[Warning] [W0011] module `app_a` belongs to app `app_a`, but reads env for `misc`
|
||||
app_a/src/app_a.erl:8:7-8:8::[Warning] [W0018] Unexpected ';'
|
||||
app_a/src/app_a.erl:9:1-9:5::[Error] [P1700] head mismatch 'fooX' vs 'food'
|
||||
app_a/src/app_a_ssr.erl:7:6-7:7::[Warning] [W0060] Match on a bound variable
|
||||
app_a/src/app_a_ssr.erl:8:6-8:7::[Warning] [W0060] Match on a bound variable
|
||||
app_a/src/app_a_unused_param.erl:5:5-5:6::[Warning] [L1268] variable 'X' is unused
|
||||
app_a/src/custom_function_matches.erl:13:5-13:22::[Warning] [W0017] Function 'excluded:function/0' is undefined.
|
||||
app_a/src/custom_function_matches.erl:14:5-14:26::[Warning] [W0017] Function 'not_excluded:function/0' is undefined.
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ app_a/src/app_a.erl:20:1-20:4::[Error] [L1230] function bat/2 is unused
|
|||
app_a/src/app_a.erl:5:5-5:35::[Warning] [W0011] module `app_a` belongs to app `app_a`, but reads env for `misc`
|
||||
app_a/src/app_a.erl:8:7-8:8::[Warning] [W0018] Unexpected ';'
|
||||
app_a/src/app_a.erl:9:1-9:5::[Error] [P1700] head mismatch 'fooX' vs 'food'
|
||||
app_a/src/app_a_ssr.erl:7:6-7:7::[Warning] [W0060] Match on a bound variable
|
||||
app_a/src/app_a_ssr.erl:8:6-8:7::[Warning] [W0060] Match on a bound variable
|
||||
app_a/src/app_a_unused_param.erl:5:5-5:6::[Error] [L1268] variable 'X' is unused
|
||||
app_a/src/custom_function_matches.erl:13:5-13:22::[Warning] [W0017] Function 'excluded:function/0' is undefined.
|
||||
app_a/src/custom_function_matches.erl:14:5-14:26::[Warning] [W0017] Function 'not_excluded:function/0' is undefined.
|
||||
|
|
|
|||
5
crates/elp/src/resources/test/linter_config/basic.stdout
Normal file
5
crates/elp/src/resources/test/linter_config/basic.stdout
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
Diagnostics reported:
|
||||
Reporting all diagnostics codes
|
||||
app_a/src/app_a.erl:3:9-3:16::[Warning] [W0002] Unused macro (MACRO_A)
|
||||
app_a/src/app_a.erl:4:9-4:14::[Warning] [L1260] record rec_a is unused
|
||||
app_b/src/app_b.erl:3:9-3:16::[Warning] [W0002] Unused macro (MACRO_B)
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
Usage: [--project PROJECT] [--module MODULE] [--file ARG] [--to TO] [--no-diags] [--experimental] [--as PROFILE] [--dump-includes] [--rebar] [--include-generated] [--serial] [--use-cli-severity] [[--format FORMAT]] [--report-system-stats]
|
||||
Usage: [--project PROJECT] [--module MODULE] [--file ARG] [--to TO] [--no-diags] [--experimental] [--as PROFILE] [--dump-includes] [--rebar] [--include-generated] [--serial] [--use-cli-severity] [[--format FORMAT]] [--report-system-stats] [[--severity SEVERITY]]
|
||||
|
||||
Available options:
|
||||
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
|
||||
|
|
@ -15,4 +15,5 @@ Available options:
|
|||
--use-cli-severity If specified, use the provided CLI severity mapping instead of the default one
|
||||
--format <FORMAT> Show diagnostics in JSON format
|
||||
--report-system-stats Report system memory usage and other statistics
|
||||
--severity <SEVERITY> Minimum severity level to report. Valid values: error, warning, weak_warning, information
|
||||
-h, --help Prints help information
|
||||
|
|
|
|||
|
|
@ -192,6 +192,14 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
|
|||
Expression has type: 'wrong_ret'
|
||||
Context expected type: 'error'
|
||||
|
||||
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
|
||||
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
|
||||
│
|
||||
5 │ fail() -> error.
|
||||
│ ^^^^^ 'error'.
|
||||
Expression has type: 'error'
|
||||
Context expected type: 'ok'
|
||||
|
||||
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
|
||||
┌─ app_b/src/app_b.erl:16:5
|
||||
│
|
||||
|
|
@ -200,4 +208,4 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
|
|||
Expression has type: [T]
|
||||
Context expected type: T
|
||||
|
||||
20 ERRORS
|
||||
21 ERRORS
|
||||
|
|
|
|||
|
|
@ -17,4 +17,5 @@
|
|||
{"path":"app_a/src/app_a_mod2.erl","line":22,"char":1,"code":"ELP","severity":"error","name":"eqWAlizer: type_alias_is_non_productive","original":null,"replacement":null,"description":"```lang=error,counterexample\n\nrecursive type invalid/0 is not productive\n```\n\n> [docs on `type_alias_is_non_productive`](https://fb.me/eqwalizer_errors#type_alias_is_non_productive)","docPath":null}
|
||||
{"path":"app_a/src/app_a_mod2.erl","line":31,"char":9,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'an_atom'","replacement":null,"description":"```lang=error,counterexample\n`'an_atom'`.\n\nExpression has type: 'an_atom'\nContext expected type: number()\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
{"path":"app_a/test/app_a_test_helpers.erl","line":6,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'wrong_ret'","replacement":null,"description":"```lang=error,counterexample\n`'wrong_ret'`.\n\nExpression has type: 'wrong_ret'\nContext expected type: 'error'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
{"path":"app_a/test/app_a_test_helpers_not_opted_in.erl","line":5,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'error'","replacement":null,"description":"```lang=error,counterexample\n`'error'`.\n\nExpression has type: 'error'\nContext expected type: 'ok'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
{"path":"app_b/src/app_b.erl","line":16,"char":5,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"L","replacement":null,"description":"```lang=error,counterexample\n`L`.\n\nExpression has type: [T]\nContext expected type: T\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
|
|
|
|||
|
|
@ -192,6 +192,14 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
|
|||
Expression has type: 'wrong_ret'
|
||||
Context expected type: 'error'
|
||||
|
||||
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
|
||||
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
|
||||
│
|
||||
5 │ fail() -> error.
|
||||
│ ^^^^^ 'error'.
|
||||
Expression has type: 'error'
|
||||
Context expected type: 'ok'
|
||||
|
||||
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
|
||||
┌─ app_b/src/app_b.erl:16:5
|
||||
│
|
||||
|
|
@ -200,4 +208,4 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
|
|||
Expression has type: [T]
|
||||
Context expected type: T
|
||||
|
||||
20 ERRORS
|
||||
21 ERRORS
|
||||
|
|
|
|||
|
|
@ -17,4 +17,5 @@
|
|||
{"path":"app_a/src/app_a_mod2.erl","line":22,"char":1,"code":"ELP","severity":"error","name":"eqWAlizer: type_alias_is_non_productive","original":null,"replacement":null,"description":"```lang=error,counterexample\n\nrecursive type invalid/0 is not productive\n```\n\n> [docs on `type_alias_is_non_productive`](https://fb.me/eqwalizer_errors#type_alias_is_non_productive)","docPath":null}
|
||||
{"path":"app_a/src/app_a_mod2.erl","line":31,"char":9,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'an_atom'","replacement":null,"description":"```lang=error,counterexample\n`'an_atom'`.\n\nExpression has type: 'an_atom'\nContext expected type: number()\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
{"path":"app_a/test/app_a_test_helpers.erl","line":6,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'wrong_ret'","replacement":null,"description":"```lang=error,counterexample\n`'wrong_ret'`.\n\nExpression has type: 'wrong_ret'\nContext expected type: 'error'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
{"path":"app_a/test/app_a_test_helpers_not_opted_in.erl","line":5,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'error'","replacement":null,"description":"```lang=error,counterexample\n`'error'`.\n\nExpression has type: 'error'\nContext expected type: 'ok'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
{"path":"app_b/src/app_b.erl","line":16,"char":5,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"L","replacement":null,"description":"```lang=error,counterexample\n`L`.\n\nExpression has type: [T]\nContext expected type: T\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
|
||||
|
|
|
|||
|
|
@ -192,4 +192,12 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
|
|||
Expression has type: 'wrong_ret'
|
||||
Context expected type: 'error'
|
||||
|
||||
19 ERRORS
|
||||
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
|
||||
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
|
||||
│
|
||||
5 │ fail() -> error.
|
||||
│ ^^^^^ 'error'.
|
||||
Expression has type: 'error'
|
||||
Context expected type: 'ok'
|
||||
|
||||
20 ERRORS
|
||||
|
|
|
|||
|
|
@ -192,4 +192,12 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
|
|||
Expression has type: 'wrong_ret'
|
||||
Context expected type: 'error'
|
||||
|
||||
19 ERRORS
|
||||
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
|
||||
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
|
||||
│
|
||||
5 │ fail() -> error.
|
||||
│ ^^^^^ 'error'.
|
||||
Expression has type: 'error'
|
||||
Context expected type: 'ok'
|
||||
|
||||
20 ERRORS
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
Reporting all diagnostics codes
|
||||
module specified: unavailable_type
|
||||
Diagnostics reported:
|
||||
app_a/src/unavailable_type.erl:10:43-10:58::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'fbcode//whatsapp/elp/test_projects/xref:app_a').
|
||||
app_a/src/unavailable_type.erl:6:16-6:31::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'fbcode//whatsapp/elp/test_projects/xref:app_a').
|
||||
app_a/src/unavailable_type.erl:10:43-10:58::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'root//xref:app_a').
|
||||
app_a/src/unavailable_type.erl:6:16-6:31::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'root//xref:app_a').
|
||||
|
|
|
|||
|
|
@ -2073,7 +2073,7 @@ impl Server {
|
|||
};
|
||||
|
||||
for (_, _, file_id) in module_index.iter_own() {
|
||||
match snapshot.analysis.should_eqwalize(file_id, false) {
|
||||
match snapshot.analysis.should_eqwalize(file_id) {
|
||||
Ok(true) => {
|
||||
files.push(file_id);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ use super::FILE_WATCH_LOGGER_NAME;
|
|||
use super::logger::LspLogger;
|
||||
use crate::config::Config;
|
||||
use crate::from_json;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::meta_only::get_log_dir;
|
||||
use crate::server::Handle;
|
||||
use crate::server::LOGGER_NAME;
|
||||
use crate::server::Server;
|
||||
|
|
@ -126,7 +126,7 @@ impl ServerSetup {
|
|||
|
||||
// Set up a logger for tracking down why we are seeing stale
|
||||
// results when branches are switched, as per T218973130
|
||||
// @fb-only
|
||||
// @fb-only: let log_dir = get_log_dir();
|
||||
let log_dir = format!("{}/elp", std::env::temp_dir().display()); // @oss-only
|
||||
let _ = fs::create_dir_all(&log_dir);
|
||||
let log_file = format!(
|
||||
|
|
|
|||
|
|
@ -36,9 +36,11 @@ use parking_lot::Mutex;
|
|||
use parking_lot::RwLock;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use vfs::AnchoredPathBuf;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::convert;
|
||||
use crate::convert::url_from_abs_path;
|
||||
use crate::line_endings::LineEndings;
|
||||
use crate::mem_docs::MemDocs;
|
||||
use crate::server::EqwalizerTypes;
|
||||
|
|
@ -186,6 +188,14 @@ impl Snapshot {
|
|||
self.line_ending_map.read()[&id]
|
||||
}
|
||||
|
||||
pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Option<Url> {
|
||||
let mut base = self.vfs.read().file_path(path.anchor).clone();
|
||||
base.pop();
|
||||
let path = base.join(&path.path)?;
|
||||
let path = path.as_path()?;
|
||||
Some(url_from_abs_path(path))
|
||||
}
|
||||
|
||||
pub fn update_cache_for_file(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
|
|
@ -193,7 +203,7 @@ impl Snapshot {
|
|||
) -> Result<()> {
|
||||
let _ = self.analysis.def_map(file_id)?;
|
||||
if optimize_for_eqwalizer {
|
||||
let should_eqwalize = self.analysis.should_eqwalize(file_id, false)?;
|
||||
let should_eqwalize = self.analysis.should_eqwalize(file_id)?;
|
||||
if should_eqwalize {
|
||||
let _ = self.analysis.module_ast(file_id)?;
|
||||
}
|
||||
|
|
@ -242,7 +252,7 @@ impl Snapshot {
|
|||
let file_ids: Vec<FileId> = module_index
|
||||
.iter_own()
|
||||
.filter_map(|(_, _, file_id)| {
|
||||
if let Ok(true) = self.analysis.should_eqwalize(file_id, false) {
|
||||
if let Ok(true) = self.analysis.should_eqwalize(file_id) {
|
||||
Some(file_id)
|
||||
} else {
|
||||
None
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
|
||||
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
|
||||
|
||||
use std::mem;
|
||||
use std::sync::atomic::AtomicU32;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
|
|
@ -47,6 +48,7 @@ use elp_ide::elp_ide_db::elp_base_db::FileId;
|
|||
use elp_ide::elp_ide_db::elp_base_db::FilePosition;
|
||||
use elp_ide::elp_ide_db::elp_base_db::FileRange;
|
||||
use elp_ide::elp_ide_db::rename::RenameError;
|
||||
use elp_ide::elp_ide_db::source_change::FileSystemEdit;
|
||||
use elp_ide::elp_ide_db::source_change::SourceChange;
|
||||
use elp_ide_db::text_edit::Indel;
|
||||
use elp_ide_db::text_edit::TextEdit;
|
||||
|
|
@ -121,9 +123,9 @@ pub(crate) fn optional_versioned_text_document_identifier(
|
|||
pub(crate) fn text_document_edit(
|
||||
snap: &Snapshot,
|
||||
file_id: FileId,
|
||||
text_document: lsp_types::OptionalVersionedTextDocumentIdentifier,
|
||||
edit: TextEdit,
|
||||
) -> Result<lsp_types::TextDocumentEdit> {
|
||||
let text_document = optional_versioned_text_document_identifier(snap, file_id);
|
||||
let line_index = snap.analysis.line_index(file_id)?;
|
||||
let line_endings = snap.line_endings(file_id);
|
||||
let edits: Vec<lsp_types::OneOf<lsp_types::TextEdit, lsp_types::AnnotatedTextEdit>> = edit
|
||||
|
|
@ -131,34 +133,131 @@ pub(crate) fn text_document_edit(
|
|||
.map(|it| lsp_types::OneOf::Left(text_edit(&line_index, line_endings, it)))
|
||||
.collect();
|
||||
|
||||
// if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
|
||||
// for edit in &mut edits {
|
||||
// edit.annotation_id = Some(outside_workspace_annotation_id())
|
||||
// }
|
||||
// }
|
||||
Ok(lsp_types::TextDocumentEdit {
|
||||
text_document,
|
||||
edits,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn text_document_ops(
|
||||
snap: &Snapshot,
|
||||
file_system_edit: FileSystemEdit,
|
||||
) -> Cancellable<Vec<lsp_types::DocumentChangeOperation>> {
|
||||
let mut ops = Vec::new();
|
||||
match file_system_edit {
|
||||
FileSystemEdit::CreateFile {
|
||||
dst,
|
||||
initial_contents,
|
||||
} => {
|
||||
if let Some(uri) = snap.anchored_path(&dst) {
|
||||
let create_file = lsp_types::ResourceOp::Create(lsp_types::CreateFile {
|
||||
uri: uri.clone(),
|
||||
options: None,
|
||||
annotation_id: None,
|
||||
});
|
||||
ops.push(lsp_types::DocumentChangeOperation::Op(create_file));
|
||||
if !initial_contents.is_empty() {
|
||||
let text_document =
|
||||
lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
|
||||
let text_edit = lsp_types::TextEdit {
|
||||
range: lsp_types::Range::default(),
|
||||
new_text: initial_contents,
|
||||
};
|
||||
let edit_file = lsp_types::TextDocumentEdit {
|
||||
text_document,
|
||||
edits: vec![lsp_types::OneOf::Left(text_edit)],
|
||||
};
|
||||
ops.push(lsp_types::DocumentChangeOperation::Edit(edit_file));
|
||||
}
|
||||
} else {
|
||||
log::warn!("create file failed: {:?}", dst);
|
||||
}
|
||||
}
|
||||
FileSystemEdit::MoveFile { src, dst } => {
|
||||
if let Some(new_uri) = snap.anchored_path(&dst) {
|
||||
let old_uri = snap.file_id_to_url(src);
|
||||
let rename_file = lsp_types::RenameFile {
|
||||
old_uri,
|
||||
new_uri,
|
||||
options: None,
|
||||
annotation_id: None,
|
||||
};
|
||||
ops.push(lsp_types::DocumentChangeOperation::Op(
|
||||
lsp_types::ResourceOp::Rename(rename_file),
|
||||
))
|
||||
} else {
|
||||
log::warn!("rename file failed: {:?} -> {:?}", src, dst);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ops)
|
||||
}
|
||||
|
||||
pub(crate) fn workspace_edit(
|
||||
snap: &Snapshot,
|
||||
source_change: SourceChange,
|
||||
mut source_change: SourceChange,
|
||||
) -> Result<lsp_types::WorkspaceEdit> {
|
||||
let mut edits: Vec<_> = vec![];
|
||||
for (file_id, edit) in source_change.source_file_edits {
|
||||
// let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
|
||||
let edit = text_document_edit(snap, file_id, edit)?;
|
||||
edits.push(lsp_types::TextDocumentEdit {
|
||||
text_document: edit.text_document,
|
||||
edits: edit.edits.into_iter().collect(),
|
||||
});
|
||||
let mut document_changes: Vec<lsp_types::DocumentChangeOperation> = Vec::new();
|
||||
|
||||
// This is copying RA's order of operations, first file creates,
|
||||
// then edits, then file moves.
|
||||
|
||||
// This allows us to apply edits to the file once it has
|
||||
// moved. Except we have no FileId at that point
|
||||
for op in &mut source_change.file_system_edits {
|
||||
if let FileSystemEdit::CreateFile {
|
||||
dst,
|
||||
initial_contents,
|
||||
} = op
|
||||
{
|
||||
// replace with a placeholder to avoid cloning the edit
|
||||
let op = FileSystemEdit::CreateFile {
|
||||
dst: dst.clone(),
|
||||
initial_contents: mem::take(initial_contents),
|
||||
};
|
||||
let ops = text_document_ops(snap, op)?;
|
||||
document_changes.extend_from_slice(&ops);
|
||||
}
|
||||
}
|
||||
let document_changes = lsp_types::DocumentChanges::Edits(edits);
|
||||
|
||||
for op in source_change.file_system_edits {
|
||||
if !matches!(op, FileSystemEdit::CreateFile { .. }) {
|
||||
let ops = text_document_ops(snap, op)?;
|
||||
document_changes.extend_from_slice(&ops);
|
||||
}
|
||||
}
|
||||
|
||||
for (file_id, edit) in source_change.source_file_edits {
|
||||
let text_document = optional_versioned_text_document_identifier(snap, file_id);
|
||||
let edit = text_document_edit(snap, file_id, text_document, edit)?;
|
||||
document_changes.push(lsp_types::DocumentChangeOperation::Edit(
|
||||
lsp_types::TextDocumentEdit {
|
||||
text_document: edit.text_document,
|
||||
edits: edit.edits.into_iter().collect(),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
// Edits on renamed files. The LineIndex from the original can be used.
|
||||
for (file_ref, edit) in source_change.new_file_edits {
|
||||
if let Some(uri) = snap.anchored_path(&file_ref.clone().into()) {
|
||||
let version = snap.url_file_version(&uri);
|
||||
let text_document = lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version };
|
||||
let edit = text_document_edit(snap, file_ref.anchor, text_document, edit)?;
|
||||
document_changes.push(lsp_types::DocumentChangeOperation::Edit(
|
||||
lsp_types::TextDocumentEdit {
|
||||
text_document: edit.text_document,
|
||||
edits: edit.edits.into_iter().collect(),
|
||||
},
|
||||
));
|
||||
} else {
|
||||
log::warn!("new file edit failed: {:?}", file_ref);
|
||||
}
|
||||
}
|
||||
|
||||
let workspace_edit = lsp_types::WorkspaceEdit {
|
||||
changes: None,
|
||||
document_changes: Some(document_changes),
|
||||
document_changes: Some(lsp_types::DocumentChanges::Operations(document_changes)),
|
||||
change_annotations: None,
|
||||
};
|
||||
Ok(workspace_edit)
|
||||
|
|
@ -182,10 +281,6 @@ pub(crate) fn code_action(
|
|||
) -> Result<lsp_types::CodeActionOrCommand> {
|
||||
let mut res = lsp_types::CodeAction {
|
||||
title: assist.label.to_string(),
|
||||
// group: assist
|
||||
// .group
|
||||
// .filter(|_| snap.config.code_action_group())
|
||||
// .map(|gr| gr.0),
|
||||
kind: Some(code_action_kind(assist.id.1)),
|
||||
edit: None,
|
||||
is_preferred: None,
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ mod tests {
|
|||
#[test]
|
||||
#[ignore]
|
||||
fn test_success_case() {
|
||||
let path_str = "../../test_projects/buck_tests";
|
||||
let path_str = "../../test/test_projects/buck_tests";
|
||||
let path: PathBuf = path_str.into();
|
||||
let cli = Fake::default();
|
||||
|
||||
|
|
@ -65,7 +65,7 @@ mod tests {
|
|||
let ast = analysis.module_ast(file_id).unwrap();
|
||||
assert_eq!(ast.errors, vec![]);
|
||||
let eq_enabled = analysis
|
||||
.is_eqwalizer_enabled(file_id, false)
|
||||
.is_eqwalizer_enabled(file_id)
|
||||
.unwrap_or_else(|_| panic!("Failed to check if eqwalizer enabled for {module}"));
|
||||
assert_eq!(eq_enabled, eqwalizer_enabled);
|
||||
let project_data = analysis.project_data(file_id).unwrap();
|
||||
|
|
@ -76,7 +76,7 @@ mod tests {
|
|||
#[test]
|
||||
#[ignore]
|
||||
fn test_load_buck_targets() {
|
||||
let path_str = "../../test_projects/buck_tests";
|
||||
let path_str = "../../test/test_projects/buck_tests";
|
||||
let path: PathBuf = path_str.into();
|
||||
|
||||
let (elp_config, buck_config) =
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ use crate::support::diagnostic_project;
|
|||
fn test_run_mock_lsp() {
|
||||
if cfg!(feature = "buck") {
|
||||
let workspace_root = AbsPathBuf::assert(
|
||||
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test_projects/end_to_end"),
|
||||
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test/test_projects/end_to_end"),
|
||||
);
|
||||
|
||||
// Sanity check
|
||||
|
|
@ -70,7 +70,7 @@ fn test_run_mock_lsp() {
|
|||
}
|
||||
],
|
||||
"textDocument": {
|
||||
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"version": 0
|
||||
}
|
||||
}
|
||||
|
|
@ -99,7 +99,7 @@ fn test_run_mock_lsp() {
|
|||
}
|
||||
],
|
||||
"textDocument": {
|
||||
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"version": 0
|
||||
}
|
||||
}
|
||||
|
|
@ -128,7 +128,7 @@ fn test_run_mock_lsp() {
|
|||
}
|
||||
],
|
||||
"textDocument": {
|
||||
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"version": 0
|
||||
}
|
||||
}
|
||||
|
|
@ -157,7 +157,7 @@ fn test_run_mock_lsp() {
|
|||
}
|
||||
],
|
||||
"textDocument": {
|
||||
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
|
||||
"version": 0
|
||||
}
|
||||
}
|
||||
|
|
@ -175,7 +175,7 @@ fn test_run_mock_lsp() {
|
|||
fn test_e2e_eqwalizer_module() {
|
||||
if cfg!(feature = "buck") {
|
||||
let workspace_root = AbsPathBuf::assert(
|
||||
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test_projects/standard"),
|
||||
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test/test_projects/standard"),
|
||||
);
|
||||
|
||||
// Sanity check
|
||||
|
|
@ -321,7 +321,7 @@ fn test_e2e_eqwalizer_module() {
|
|||
"source": "eqWAlizer"
|
||||
}
|
||||
],
|
||||
"uri": "file:///[..]/test_projects/standard/app_a/src/app_a.erl",
|
||||
"uri": "file:///[..]/test/test_projects/standard/app_a/src/app_a.erl",
|
||||
"version": 0
|
||||
}"#]],
|
||||
);
|
||||
|
|
@ -334,7 +334,7 @@ fn test_e2e_eqwalizer_module() {
|
|||
// #[test]
|
||||
// fn test_e2e_eqwalizer_header() {
|
||||
// let workspace_root =
|
||||
// AbsPathBuf::assert(Path::new(env!("CARGO_WORKSPACE_DIR")).join("test_projects/standard"));
|
||||
// AbsPathBuf::assert(Path::new(env!("CARGO_WORKSPACE_DIR")).join("test/test_projects/standard"));
|
||||
|
||||
// // Sanity check
|
||||
// assert!(std::fs::metadata(&workspace_root).is_ok());
|
||||
|
|
|
|||
|
|
@ -339,7 +339,7 @@ pub enum ParentId {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct AnyCallBackCtx<'a> {
|
||||
pub in_macro: Option<HirIdx>,
|
||||
pub in_macro: Option<(HirIdx, Option<InFile<DefineId>>)>,
|
||||
pub parents: &'a Vec<ParentId>,
|
||||
pub item_id: AnyExprId,
|
||||
pub item: AnyExpr,
|
||||
|
|
@ -426,7 +426,7 @@ pub struct FoldCtx<'a, T> {
|
|||
body_origin: BodyOrigin,
|
||||
body: &'a FoldBody<'a>,
|
||||
strategy: Strategy,
|
||||
macro_stack: Vec<HirIdx>,
|
||||
macro_stack: Vec<(HirIdx, Option<InFile<DefineId>>)>,
|
||||
parents: Vec<ParentId>,
|
||||
callback: AnyCallBack<'a, T>,
|
||||
}
|
||||
|
|
@ -594,7 +594,7 @@ impl<'a, T> FoldCtx<'a, T> {
|
|||
.do_fold_pat(pat_id, initial)
|
||||
}
|
||||
|
||||
fn in_macro(&self) -> Option<HirIdx> {
|
||||
fn in_macro(&self) -> Option<(HirIdx, Option<InFile<DefineId>>)> {
|
||||
self.macro_stack.first().copied()
|
||||
}
|
||||
|
||||
|
|
@ -752,16 +752,19 @@ impl<'a, T> FoldCtx<'a, T> {
|
|||
crate::Expr::MacroCall {
|
||||
expansion,
|
||||
args,
|
||||
macro_def: _,
|
||||
macro_def,
|
||||
macro_name: _,
|
||||
} => {
|
||||
if self.strategy.macros == MacroStrategy::DoNotExpand {
|
||||
self.do_fold_exprs(args, acc)
|
||||
} else {
|
||||
self.macro_stack.push(HirIdx {
|
||||
body_origin: self.body_origin,
|
||||
idx: AnyExprId::Expr(expr_id),
|
||||
});
|
||||
self.macro_stack.push((
|
||||
HirIdx {
|
||||
body_origin: self.body_origin,
|
||||
idx: AnyExprId::Expr(expr_id),
|
||||
},
|
||||
*macro_def,
|
||||
));
|
||||
let e = self.do_fold_expr(*expansion, acc);
|
||||
self.macro_stack.pop();
|
||||
e
|
||||
|
|
@ -950,16 +953,19 @@ impl<'a, T> FoldCtx<'a, T> {
|
|||
crate::Pat::MacroCall {
|
||||
expansion,
|
||||
args,
|
||||
macro_def: _,
|
||||
macro_def,
|
||||
macro_name: _,
|
||||
} => {
|
||||
if self.strategy.macros == MacroStrategy::DoNotExpand {
|
||||
self.do_fold_exprs(args, acc)
|
||||
} else {
|
||||
self.macro_stack.push(HirIdx {
|
||||
body_origin: self.body_origin,
|
||||
idx: AnyExprId::Pat(pat_id),
|
||||
});
|
||||
self.macro_stack.push((
|
||||
HirIdx {
|
||||
body_origin: self.body_origin,
|
||||
idx: AnyExprId::Pat(pat_id),
|
||||
},
|
||||
*macro_def,
|
||||
));
|
||||
let e = self.do_fold_pat(*expansion, acc);
|
||||
self.macro_stack.pop();
|
||||
e
|
||||
|
|
@ -1165,16 +1171,19 @@ impl<'a, T> FoldCtx<'a, T> {
|
|||
TypeExpr::MacroCall {
|
||||
expansion,
|
||||
args,
|
||||
macro_def: _,
|
||||
macro_def,
|
||||
macro_name: _,
|
||||
} => {
|
||||
if self.strategy.macros == MacroStrategy::DoNotExpand {
|
||||
self.do_fold_exprs(args, acc)
|
||||
} else {
|
||||
self.macro_stack.push(HirIdx {
|
||||
body_origin: self.body_origin,
|
||||
idx: AnyExprId::TypeExpr(type_expr_id),
|
||||
});
|
||||
self.macro_stack.push((
|
||||
HirIdx {
|
||||
body_origin: self.body_origin,
|
||||
idx: AnyExprId::TypeExpr(type_expr_id),
|
||||
},
|
||||
*macro_def,
|
||||
));
|
||||
let e = self.do_fold_type_expr(*expansion, acc);
|
||||
self.macro_stack.pop();
|
||||
e
|
||||
|
|
|
|||
|
|
@ -155,7 +155,7 @@ pub use name::MacroName;
|
|||
pub use name::Name;
|
||||
pub use name::NameArity;
|
||||
pub use name::known;
|
||||
// @fb-only
|
||||
// @fb-only: pub use name::meta_only;
|
||||
pub use sema::AtomDef;
|
||||
pub use sema::CallDef;
|
||||
pub use sema::DefinitionOrReference;
|
||||
|
|
@ -232,6 +232,10 @@ impl HirIdx {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.body_origin.file_id()
|
||||
}
|
||||
|
||||
/// This function is used to print a representation of the HIR AST
|
||||
/// corresponding to the given `HirIdx`. It is used for debugging
|
||||
/// and testing.
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
//! See [`Name`].
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: pub mod meta_only;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ use crate::resolver::Resolution;
|
|||
use crate::resolver::Resolver;
|
||||
|
||||
mod find;
|
||||
// @fb-only
|
||||
// @fb-only: pub mod meta_only;
|
||||
pub mod to_def;
|
||||
|
||||
pub struct ModuleIter(Arc<ModuleIndex>);
|
||||
|
|
@ -1006,6 +1006,28 @@ impl Semantic<'_> {
|
|||
// Folds end
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pub fn bound_vars_by_function(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
) -> FxHashMap<FunctionClauseId, FxHashSet<PatId>> {
|
||||
let bound_vars = self.bound_vars_in_pattern_diagnostic(file_id);
|
||||
let mut bound_vars_by_function: FxHashMap<FunctionClauseId, FxHashSet<PatId>> =
|
||||
FxHashMap::default();
|
||||
bound_vars.iter().for_each(|(function_id, pat_id, _var)| {
|
||||
bound_vars_by_function
|
||||
.entry(function_id.value)
|
||||
.and_modify(|vars| {
|
||||
vars.insert(*pat_id);
|
||||
})
|
||||
.or_insert_with(|| {
|
||||
let mut vars = FxHashSet::default();
|
||||
vars.insert(*pat_id);
|
||||
vars
|
||||
});
|
||||
});
|
||||
bound_vars_by_function
|
||||
}
|
||||
|
||||
pub fn bound_vars_in_pattern_diagnostic(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ use crate::macro_exp;
|
|||
use crate::macro_exp::BuiltInMacro;
|
||||
use crate::macro_exp::MacroExpCtx;
|
||||
use crate::resolver::Resolver;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::sema::meta_only;
|
||||
|
||||
pub trait ToDef: Clone {
|
||||
type Def;
|
||||
|
|
@ -567,7 +567,7 @@ pub fn resolve_call_target(
|
|||
let fn_name: Name = sema.db.lookup_atom(body[*name].as_atom()?);
|
||||
let mo =
|
||||
None; // @oss-only
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::resolve_handle_call_target(sema, arity, file_id, &module_name, &fn_name);
|
||||
if let Some(r) = mo {
|
||||
r
|
||||
} else {
|
||||
|
|
@ -885,12 +885,183 @@ fn add_dynamic_call_patterns(patterns: &mut FxHashMap<PatternKey, DynamicCallPat
|
|||
);
|
||||
}
|
||||
|
||||
// Lazy static initialization for the patterns map
|
||||
/// Specifies what forms a module argument can take.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ModuleArgType {
|
||||
/// The argument must be a single module atom (e.g., `apply(Mod, Fun, Args)`)
|
||||
Atom,
|
||||
/// The argument must be a list of module atoms (e.g., some batch operations)
|
||||
List,
|
||||
/// The argument can be either a single module atom or a list of modules
|
||||
/// (e.g., `meck:new(Mod | [Mod], Opts)`)
|
||||
AtomOrList,
|
||||
}
|
||||
|
||||
/// Pattern for matching module argument positions in function calls.
|
||||
/// Used by rename operations to identify which argument contains a module name.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ModuleArgPattern {
|
||||
/// Index of the argument containing the module name (0-based)
|
||||
pub index: usize,
|
||||
/// The type of the module argument (atom, list, or either)
|
||||
pub arg_type: ModuleArgType,
|
||||
}
|
||||
|
||||
impl ModuleArgPattern {
|
||||
/// Creates a pattern where the argument is a single module atom.
|
||||
pub const fn atom(index: usize) -> Self {
|
||||
Self {
|
||||
index,
|
||||
arg_type: ModuleArgType::Atom,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a pattern where the argument is a list of module atoms.
|
||||
pub const fn list(index: usize) -> Self {
|
||||
Self {
|
||||
index,
|
||||
arg_type: ModuleArgType::List,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a pattern where the argument can be either a single atom or a list.
|
||||
pub const fn atom_or_list(index: usize) -> Self {
|
||||
Self {
|
||||
index,
|
||||
arg_type: ModuleArgType::AtomOrList,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if this pattern accepts a single atom.
|
||||
pub const fn accepts_atom(&self) -> bool {
|
||||
matches!(
|
||||
self.arg_type,
|
||||
ModuleArgType::Atom | ModuleArgType::AtomOrList
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns true if this pattern accepts a list of atoms.
|
||||
pub const fn accepts_list(&self) -> bool {
|
||||
matches!(
|
||||
self.arg_type,
|
||||
ModuleArgType::List | ModuleArgType::AtomOrList
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn add_module_argument_patterns(patterns: &mut FxHashMap<PatternKey, ModuleArgPattern>) {
|
||||
// Each entry follows the format:
|
||||
// (module, function, arity) -> ModuleArgPattern
|
||||
//
|
||||
// Where:
|
||||
// module: Module name (Some("meck"), Some("application"), etc.)
|
||||
// function: Function name as string literal (e.g., "new", "get_env")
|
||||
// arity: Number of arguments this function pattern expects
|
||||
// ModuleArgPattern: Contains the argument index and the expected type
|
||||
//
|
||||
// All indexes are 0-based.
|
||||
|
||||
// meck - mocking library
|
||||
// meck:new/2 accepts either a single module atom or a list of modules
|
||||
patterns.insert((Some("meck"), "called", 3), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("meck"), "called", 4), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("meck"), "capture", 5), ModuleArgPattern::atom(1));
|
||||
patterns.insert((Some("meck"), "capture", 6), ModuleArgPattern::atom(1));
|
||||
patterns.insert(
|
||||
(Some("meck"), "delete", 3),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert(
|
||||
(Some("meck"), "delete", 4),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert(
|
||||
(Some("meck"), "expect", 3),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert(
|
||||
(Some("meck"), "expect", 4),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert(
|
||||
(Some("meck"), "expects", 2),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert((Some("meck"), "history", 1), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("meck"), "history", 2), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("meck"), "loop", 4), ModuleArgPattern::atom_or_list(0));
|
||||
patterns.insert((Some("meck"), "new", 1), ModuleArgPattern::atom_or_list(0));
|
||||
patterns.insert((Some("meck"), "new", 2), ModuleArgPattern::atom_or_list(0));
|
||||
patterns.insert((Some("meck"), "num_calls", 3), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("meck"), "num_calls", 4), ModuleArgPattern::atom(0));
|
||||
patterns.insert(
|
||||
(Some("meck"), "reset", 1),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert(
|
||||
(Some("meck"), "sequence", 4),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert(
|
||||
(Some("meck"), "unload", 1),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert(
|
||||
(Some("meck"), "validate", 1),
|
||||
ModuleArgPattern::atom_or_list(0),
|
||||
);
|
||||
patterns.insert((Some("meck"), "wait", 4), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("meck"), "wait", 5), ModuleArgPattern::atom(1));
|
||||
patterns.insert((Some("meck"), "wait", 6), ModuleArgPattern::atom(1));
|
||||
|
||||
// code module - module loading and management
|
||||
// These functions from the Erlang stdlib take module() as their argument
|
||||
patterns.insert((Some("code"), "load_file", 1), ModuleArgPattern::atom(0));
|
||||
patterns.insert(
|
||||
(Some("code"), "ensure_loaded", 1),
|
||||
ModuleArgPattern::atom(0),
|
||||
);
|
||||
patterns.insert((Some("code"), "delete", 1), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("code"), "purge", 1), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("code"), "soft_purge", 1), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("code"), "is_loaded", 1), ModuleArgPattern::atom(0));
|
||||
patterns.insert(
|
||||
(Some("code"), "get_object_code", 1),
|
||||
ModuleArgPattern::atom(0),
|
||||
);
|
||||
patterns.insert((Some("code"), "module_md5", 1), ModuleArgPattern::atom(0));
|
||||
patterns.insert((Some("code"), "is_sticky", 1), ModuleArgPattern::atom(0));
|
||||
}
|
||||
|
||||
// Lazy static initialization for the patterns maps
|
||||
lazy_static! {
|
||||
static ref DYNAMIC_CALL_PATTERNS: FxHashMap<PatternKey, DynamicCallPattern> = {
|
||||
let mut patterns = FxHashMap::default();
|
||||
add_dynamic_call_patterns(&mut patterns);
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::add_dynamic_call_patterns(&mut patterns);
|
||||
patterns
|
||||
};
|
||||
static ref MODULE_ARGUMENT_PATTERNS: FxHashMap<PatternKey, ModuleArgPattern> = {
|
||||
let mut patterns = FxHashMap::default();
|
||||
add_module_argument_patterns(&mut patterns);
|
||||
// @fb-only: meta_only::add_module_argument_patterns(&mut patterns);
|
||||
patterns
|
||||
};
|
||||
/// Combined patterns for module argument positions.
|
||||
/// Merges dynamic call patterns (that have module_arg_index) with simple module argument patterns.
|
||||
/// Used by rename operations where we only care about the module argument position.
|
||||
static ref COMBINED_MODULE_ARG_PATTERNS: FxHashMap<PatternKey, ModuleArgPattern> = {
|
||||
let mut patterns: FxHashMap<PatternKey, ModuleArgPattern> = FxHashMap::default();
|
||||
// Add module_arg_index from dynamic call patterns (where present)
|
||||
for (key, pattern) in DYNAMIC_CALL_PATTERNS.iter() {
|
||||
if let Some(module_idx) = pattern.module_arg_index {
|
||||
patterns.insert(*key, ModuleArgPattern::atom(module_idx));
|
||||
}
|
||||
}
|
||||
// Add from simple module argument patterns
|
||||
for (key, module_arg_pattern) in MODULE_ARGUMENT_PATTERNS.iter() {
|
||||
patterns.insert(*key, *module_arg_pattern);
|
||||
}
|
||||
patterns
|
||||
};
|
||||
}
|
||||
|
|
@ -899,6 +1070,10 @@ fn get_dynamic_call_patterns() -> &'static FxHashMap<PatternKey, DynamicCallPatt
|
|||
&DYNAMIC_CALL_PATTERNS
|
||||
}
|
||||
|
||||
pub fn get_module_arg_patterns() -> &'static FxHashMap<PatternKey, ModuleArgPattern> {
|
||||
&COMBINED_MODULE_ARG_PATTERNS
|
||||
}
|
||||
|
||||
fn look_for_dynamic_call(
|
||||
sema: &Semantic,
|
||||
file_id: FileId,
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ use elp_syntax::TextRange;
|
|||
use fxhash::FxHashMap;
|
||||
use fxhash::FxHashSet;
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: use crate::meta_only;
|
||||
use crate::runnables::Runnable;
|
||||
use crate::runnables::runnables;
|
||||
|
||||
|
|
@ -57,11 +57,11 @@ pub struct Link {
|
|||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
// @fb-only
|
||||
// @fb-only: pub(crate) fn annotations(db: &RootDatabase, file_id: FileId) -> Vec<Annotation> {
|
||||
pub(crate) fn annotations(_db: &RootDatabase, _file_id: FileId) -> Vec<Annotation> { // @oss-only
|
||||
// @fb-only
|
||||
// @fb-only: let mut annotations = Vec::default();
|
||||
let annotations = Vec::default(); // @oss-only
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::annotations(db, file_id, &mut annotations);
|
||||
annotations
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -573,8 +573,8 @@ pub(crate) fn find_call_in_function<CallCtx, MakeCtx, Res>(
|
|||
};
|
||||
if let Some(extra) = check_call(context) {
|
||||
// Got one.
|
||||
let call_expr_id = if let Some(expr_id) = ctx.in_macro {
|
||||
expr_id.idx
|
||||
let call_expr_id = if let Some((hir_idx, _macro_def)) = ctx.in_macro {
|
||||
hir_idx.idx
|
||||
} else {
|
||||
ctx.item_id
|
||||
};
|
||||
|
|
|
|||
|
|
@ -50,6 +50,7 @@ use elp_ide_db::text_edit::TextEdit;
|
|||
use elp_ide_ssr::Match;
|
||||
use elp_ide_ssr::SsrSearchScope;
|
||||
use elp_ide_ssr::match_pattern;
|
||||
use elp_project_model::AppName;
|
||||
use elp_syntax::NodeOrToken;
|
||||
use elp_syntax::Parse;
|
||||
use elp_syntax::SourceFile;
|
||||
|
|
@ -96,13 +97,13 @@ mod application_env;
|
|||
mod atoms_exhaustion;
|
||||
mod binary_string_to_sigil;
|
||||
mod boolean_precedence;
|
||||
mod bound_variable;
|
||||
mod could_be_a_string_literal;
|
||||
mod cross_node_eval;
|
||||
mod debugging_function;
|
||||
mod dependent_header;
|
||||
mod deprecated_function;
|
||||
mod duplicate_module;
|
||||
mod edoc;
|
||||
mod effect_free_statement;
|
||||
mod equality_check_with_unnecessary_operator;
|
||||
mod eqwalizer_assists;
|
||||
|
|
@ -117,7 +118,7 @@ mod macro_precedence_suprise;
|
|||
mod map_find_to_syntax;
|
||||
mod map_insertion_to_syntax;
|
||||
mod meck;
|
||||
// @fb-only
|
||||
// @fb-only: mod meta_only;
|
||||
mod missing_compile_warn_missing_spec;
|
||||
mod missing_module;
|
||||
mod missing_separator;
|
||||
|
|
@ -131,6 +132,7 @@ mod no_garbage_collect;
|
|||
mod no_nowarn_suppressions;
|
||||
mod no_size;
|
||||
mod nonstandard_integer_formatting;
|
||||
mod old_edoc_syntax;
|
||||
mod record_tuple_match;
|
||||
mod redundant_assignment;
|
||||
mod replace_call;
|
||||
|
|
@ -549,12 +551,37 @@ pub(crate) trait Linter {
|
|||
}
|
||||
}
|
||||
|
||||
fn should_process_app(
|
||||
app_name: &Option<AppName>,
|
||||
config: &DiagnosticsConfig,
|
||||
diagnostic_code: &DiagnosticCode,
|
||||
) -> bool {
|
||||
let app = match app_name {
|
||||
Some(app) => app.to_string(),
|
||||
None => return true,
|
||||
};
|
||||
|
||||
if let Some(lint_config) = config.lint_config.as_ref()
|
||||
&& let Some(linter_config) = lint_config.linters.get(diagnostic_code)
|
||||
&& let Some(ref excluded) = linter_config.exclude_apps
|
||||
&& excluded.contains(&app)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn should_run(
|
||||
linter: &dyn Linter,
|
||||
config: &DiagnosticsConfig,
|
||||
app_name: &Option<AppName>,
|
||||
is_generated: bool,
|
||||
is_test: bool,
|
||||
) -> bool {
|
||||
if !should_process_app(app_name, config, &linter.id()) {
|
||||
return false;
|
||||
}
|
||||
let is_enabled = if let Some(lint_config) = config.lint_config.as_ref() {
|
||||
lint_config
|
||||
.get_is_enabled_override(&linter.id())
|
||||
|
|
@ -865,6 +892,7 @@ pub(crate) trait GenericLinter: Linter {
|
|||
fn fixes(
|
||||
&self,
|
||||
_context: &Self::Context,
|
||||
_range: TextRange,
|
||||
_sema: &Semantic,
|
||||
_file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
|
|
@ -898,7 +926,7 @@ impl<T: GenericLinter> GenericDiagnostics for T {
|
|||
if let Some(matches) = self.matches(sema, file_id) {
|
||||
for matched in matches {
|
||||
let message = self.match_description(&matched.context);
|
||||
let fixes = self.fixes(&matched.context, sema, file_id);
|
||||
let fixes = self.fixes(&matched.context, matched.range, sema, file_id);
|
||||
let tag = self.tag(&matched.context);
|
||||
let mut d = Diagnostic::new(self.id(), message, matched.range)
|
||||
.with_fixes(fixes)
|
||||
|
|
@ -1216,6 +1244,16 @@ impl LintConfig {
|
|||
self.linters.get(diagnostic_code)?.experimental
|
||||
}
|
||||
|
||||
/// Get the exclude_apps override for a linter based on its diagnostic code.
|
||||
pub fn get_exclude_apps_override(
|
||||
&self,
|
||||
diagnostic_code: &DiagnosticCode,
|
||||
) -> Option<Vec<String>> {
|
||||
self.linters
|
||||
.get(diagnostic_code)
|
||||
.and_then(|c| c.exclude_apps.clone())
|
||||
}
|
||||
|
||||
pub fn get_function_call_linter_config(
|
||||
&self,
|
||||
diagnostic_code: &DiagnosticCode,
|
||||
|
|
@ -1339,6 +1377,7 @@ pub struct LinterConfig {
|
|||
pub include_tests: Option<bool>,
|
||||
pub include_generated: Option<bool>,
|
||||
pub experimental: Option<bool>,
|
||||
pub exclude_apps: Option<Vec<String>>,
|
||||
#[serde(flatten)]
|
||||
pub config: Option<LinterTraitConfig>,
|
||||
}
|
||||
|
|
@ -1359,6 +1398,7 @@ impl LinterConfig {
|
|||
include_tests: other.include_tests.or(self.include_tests),
|
||||
include_generated: other.include_generated.or(self.include_generated),
|
||||
experimental: other.experimental.or(self.experimental),
|
||||
exclude_apps: other.exclude_apps.or(self.exclude_apps),
|
||||
config: merged_config,
|
||||
}
|
||||
}
|
||||
|
|
@ -1524,7 +1564,7 @@ pub fn native_diagnostics(
|
|||
config
|
||||
.lints_from_config
|
||||
.get_diagnostics(&mut res, &sema, file_id);
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::diagnostics(&mut res, &sema, file_id, file_kind, config);
|
||||
syntax_diagnostics(&sema, &parse, &mut res, file_id);
|
||||
diagnostics_from_descriptors(
|
||||
&mut res,
|
||||
|
|
@ -1553,6 +1593,7 @@ pub fn native_diagnostics(
|
|||
} else {
|
||||
FxHashMap::default()
|
||||
};
|
||||
let app_name = db.file_app_name(file_id);
|
||||
let metadata = db.elp_metadata(file_id);
|
||||
// TODO: can we ever disable DiagnosticCode::SyntaxError?
|
||||
// In which case we must check labeled_syntax_errors
|
||||
|
|
@ -1561,6 +1602,7 @@ pub fn native_diagnostics(
|
|||
&& (config.experimental && d.has_category(Category::Experimental)
|
||||
|| !d.has_category(Category::Experimental))
|
||||
&& !d.should_be_suppressed(&metadata, config)
|
||||
&& should_process_app(&app_name, config, &d.code)
|
||||
});
|
||||
|
||||
LabeledDiagnostics {
|
||||
|
|
@ -1611,20 +1653,20 @@ pub fn diagnostics_from_descriptors(
|
|||
.db
|
||||
.is_test_suite_or_test_helper(file_id)
|
||||
.unwrap_or(false);
|
||||
let app_name = sema.db.file_app_name(file_id);
|
||||
descriptors.iter().for_each(|descriptor| {
|
||||
if descriptor.conditions.enabled(config, is_generated, is_test) {
|
||||
if descriptor.conditions.default_disabled {
|
||||
// Filter the returned diagnostics to ensure they are
|
||||
// enabled
|
||||
let mut diags: Vec<Diagnostic> = Vec::default();
|
||||
(descriptor.checker)(&mut diags, sema, file_id, file_kind);
|
||||
for diag in diags {
|
||||
if config.enabled.contains(&diag.code) {
|
||||
res.push(diag);
|
||||
}
|
||||
let mut diags: Vec<Diagnostic> = Vec::default();
|
||||
(descriptor.checker)(&mut diags, sema, file_id, file_kind);
|
||||
for diag in diags {
|
||||
// Check if this diagnostic is enabled (for default_disabled descriptors)
|
||||
// and if the app is not excluded for this diagnostic code
|
||||
let is_enabled =
|
||||
!descriptor.conditions.default_disabled || config.enabled.contains(&diag.code);
|
||||
let app_allowed = should_process_app(&app_name, config, &diag.code);
|
||||
if is_enabled && app_allowed {
|
||||
res.push(diag);
|
||||
}
|
||||
} else {
|
||||
(descriptor.checker)(res, sema, file_id, file_kind);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
@ -1681,11 +1723,12 @@ const GENERIC_LINTERS: &[&dyn GenericDiagnostics] = &[
|
|||
&duplicate_module::LINTER,
|
||||
&no_nowarn_suppressions::LINTER,
|
||||
¯o_precedence_suprise::LINTER,
|
||||
&edoc::LINTER,
|
||||
&old_edoc_syntax::LINTER,
|
||||
&missing_module::LINTER,
|
||||
&unused_include::LINTER,
|
||||
&misspelled_attribute::LINTER,
|
||||
&boolean_precedence::LINTER,
|
||||
&bound_variable::LINTER,
|
||||
];
|
||||
|
||||
/// Unified registry for all types of linters
|
||||
|
|
@ -1714,7 +1757,7 @@ pub(crate) fn linters() -> Vec<DiagnosticLinter> {
|
|||
);
|
||||
|
||||
// Add meta-only linters
|
||||
// @fb-only
|
||||
// @fb-only: all_linters.extend(meta_only::linters());
|
||||
|
||||
all_linters
|
||||
}
|
||||
|
|
@ -1731,11 +1774,12 @@ fn diagnostics_from_linters(
|
|||
.db
|
||||
.is_test_suite_or_test_helper(file_id)
|
||||
.unwrap_or(false);
|
||||
let app_name = sema.db.file_app_name(file_id);
|
||||
|
||||
for l in linters {
|
||||
let linter = l.as_linter();
|
||||
if linter.should_process_file_id(sema, file_id)
|
||||
&& should_run(linter, config, is_generated, is_test)
|
||||
&& should_run(linter, config, &app_name, is_generated, is_test)
|
||||
{
|
||||
let severity = if let Some(lint_config) = config.lint_config.as_ref() {
|
||||
lint_config
|
||||
|
|
@ -2297,11 +2341,14 @@ pub fn erlang_service_diagnostics(
|
|||
diags
|
||||
};
|
||||
|
||||
let app_name = db.file_app_name(file_id);
|
||||
let metadata = db.elp_metadata(file_id);
|
||||
let diags = diags
|
||||
.into_iter()
|
||||
.filter(|(_file_id, d)| {
|
||||
!d.should_be_suppressed(&metadata, config) && !config.disabled.contains(&d.code)
|
||||
!d.should_be_suppressed(&metadata, config)
|
||||
&& !config.disabled.contains(&d.code)
|
||||
&& should_process_app(&app_name, config, &d.code)
|
||||
})
|
||||
.map(|(file_id, d)| {
|
||||
(
|
||||
|
|
@ -2592,7 +2639,7 @@ pub fn ct_diagnostics(
|
|||
CommonTestInfo::Result { all, groups } => {
|
||||
let testcases = common_test::runnable_names(&sema, file_id, all, groups).ok();
|
||||
common_test::unreachable_test(&mut res, &sema, file_id, &testcases);
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::ct_diagnostics(&mut res, &sema, file_id, testcases);
|
||||
}
|
||||
CommonTestInfo::EvalError(_error) => {
|
||||
// The error currently does not contain anything useful, so we ignore it
|
||||
|
|
@ -3563,7 +3610,7 @@ main(X) ->
|
|||
#[test]
|
||||
fn group_related_diagnostics_elp_only() {
|
||||
// Demonstrate that ELP does not pick up a syntax error in the
|
||||
// spec, same code as in test_projects/diagnostics/app_a/src/syntax.erl
|
||||
// spec, same code as in test/test_projects/diagnostics/app_a/src/syntax.erl
|
||||
check_diagnostics(
|
||||
r#"
|
||||
-module(main).
|
||||
|
|
@ -3992,6 +4039,7 @@ main(X) ->
|
|||
include_tests: None,
|
||||
include_generated: None,
|
||||
experimental: None,
|
||||
exclude_apps: None,
|
||||
config: None,
|
||||
},
|
||||
);
|
||||
|
|
@ -4034,6 +4082,7 @@ main(X) ->
|
|||
include_tests: Some(true),
|
||||
include_generated: None,
|
||||
experimental: None,
|
||||
exclude_apps: None,
|
||||
config: None,
|
||||
},
|
||||
);
|
||||
|
|
@ -4075,6 +4124,7 @@ main(X) ->
|
|||
include_tests: None,
|
||||
include_generated: Some(true),
|
||||
experimental: None,
|
||||
exclude_apps: None,
|
||||
config: None,
|
||||
},
|
||||
);
|
||||
|
|
@ -4117,6 +4167,7 @@ main(X) ->
|
|||
include_tests: None,
|
||||
include_generated: None,
|
||||
experimental: Some(true),
|
||||
exclude_apps: None,
|
||||
config: None,
|
||||
},
|
||||
);
|
||||
|
|
@ -4161,6 +4212,7 @@ main(X) ->
|
|||
include_tests: None,
|
||||
include_generated: None,
|
||||
experimental: None,
|
||||
exclude_apps: None,
|
||||
config: None,
|
||||
},
|
||||
);
|
||||
|
|
@ -4191,6 +4243,47 @@ main(X) ->
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_linter_exclude_apps_override() {
|
||||
let mut lint_config = LintConfig::default();
|
||||
lint_config.linters.insert(
|
||||
DiagnosticCode::NoGarbageCollect,
|
||||
LinterConfig {
|
||||
is_enabled: Some(false),
|
||||
severity: None,
|
||||
include_tests: None,
|
||||
include_generated: None,
|
||||
experimental: None,
|
||||
exclude_apps: Some(vec!["my_app".to_string()]),
|
||||
config: None,
|
||||
},
|
||||
);
|
||||
|
||||
let config = DiagnosticsConfig::default()
|
||||
.configure_diagnostics(
|
||||
&lint_config,
|
||||
&Some("no_garbage_collect".to_string()),
|
||||
&None,
|
||||
FallBackToAll::No,
|
||||
)
|
||||
.unwrap();
|
||||
check_diagnostics_with_config(
|
||||
config,
|
||||
r#"
|
||||
//- /src/main.erl app:my_app
|
||||
-module(main).
|
||||
-export([warning/0]).
|
||||
|
||||
warning() ->
|
||||
erlang:garbage_collect().
|
||||
//- /opt/lib/stdlib-3.17/src/erlang.erl otp_app:/opt/lib/stdlib-3.17
|
||||
-module(erlang).
|
||||
-export([garbage_collect/0]).
|
||||
garbage_collect() -> ok.
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_unused_macro_in_macro_rhs_for_function_name() {
|
||||
let config = DiagnosticsConfig::default()
|
||||
|
|
@ -4238,6 +4331,7 @@ main(X) ->
|
|||
include_tests: None,
|
||||
include_generated: None,
|
||||
experimental: None,
|
||||
exclude_apps: None,
|
||||
config: Some(LinterTraitConfig::FunctionCallLinterConfig(
|
||||
FunctionCallLinterConfig {
|
||||
include: Some(vec![FunctionMatch::mf("mod_a", "func_a")]),
|
||||
|
|
@ -4270,6 +4364,7 @@ main(X) ->
|
|||
include_tests: Some(true),
|
||||
include_generated: None,
|
||||
experimental: None,
|
||||
exclude_apps: None,
|
||||
config: Some(LinterTraitConfig::FunctionCallLinterConfig(
|
||||
FunctionCallLinterConfig {
|
||||
include: Some(vec![FunctionMatch::mf("mod_b", "func_b")]),
|
||||
|
|
@ -4287,6 +4382,7 @@ main(X) ->
|
|||
include_tests: None,
|
||||
include_generated: Some(true),
|
||||
experimental: None,
|
||||
exclude_apps: None,
|
||||
config: None,
|
||||
},
|
||||
);
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ use crate::codemod_helpers::CheckCallCtx;
|
|||
use crate::codemod_helpers::FunctionMatch;
|
||||
use crate::codemod_helpers::MatchCtx;
|
||||
use crate::codemod_helpers::find_call_in_function;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::diagnostics;
|
||||
use crate::diagnostics::DiagnosticCode;
|
||||
use crate::diagnostics::Severity;
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ pub(crate) static DESCRIPTOR: DiagnosticDescriptor = DiagnosticDescriptor {
|
|||
conditions: DiagnosticConditions {
|
||||
experimental: false,
|
||||
include_generated: true,
|
||||
include_tests: true,
|
||||
include_tests: false,
|
||||
default_disabled: false,
|
||||
},
|
||||
checker: &|diags, sema, file_id, _ext| {
|
||||
|
|
@ -108,7 +108,7 @@ fn check_function(diags: &mut Vec<Diagnostic>, sema: &Semantic, def: &FunctionDe
|
|||
vec![2, 3],
|
||||
BadEnvCallAction::AppArg(0),
|
||||
),
|
||||
// @fb-only
|
||||
// @fb-only: diagnostics::meta_only::application_env_bad_matches(),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use hir::Semantic;
|
|||
|
||||
use crate::FunctionMatch;
|
||||
use crate::codemod_helpers::CheckCallCtx;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::diagnostics;
|
||||
use crate::diagnostics::DiagnosticCode;
|
||||
use crate::diagnostics::FunctionCallLinter;
|
||||
use crate::diagnostics::Linter;
|
||||
|
|
@ -35,9 +35,9 @@ impl Linter for AtomsExhaustionLinter {
|
|||
false
|
||||
}
|
||||
#[rustfmt::skip]
|
||||
// @fb-only
|
||||
// @fb-only: fn should_process_file_id(&self, sema: &Semantic, file_id: FileId) -> bool {
|
||||
fn should_process_file_id(&self, _sema: &Semantic, _file_id: FileId) -> bool { // @oss-only
|
||||
// @fb-only
|
||||
// @fb-only: diagnostics::meta_only::is_relevant_file(sema.db.upcast(), file_id)
|
||||
true // @oss-only
|
||||
}
|
||||
}
|
||||
|
|
@ -56,16 +56,16 @@ impl FunctionCallLinter for AtomsExhaustionLinter {
|
|||
// FunctionMatch::mfa("erlang", "binary_to_term", 2),
|
||||
]
|
||||
.into_iter()
|
||||
// @fb-only
|
||||
// @fb-only: .chain(diagnostics::meta_only::atoms_exhaustion_matches().into_iter())
|
||||
.collect::<Vec<_>>()
|
||||
]
|
||||
}
|
||||
|
||||
fn check_match(&self, context: &CheckCallCtx<'_, ()>) -> Option<Self::Context> {
|
||||
#[rustfmt::skip]
|
||||
// @fb-only
|
||||
// @fb-only
|
||||
// @fb-only
|
||||
// @fb-only: let sema = context.in_clause.sema;
|
||||
// @fb-only: let is_safe =
|
||||
// @fb-only: diagnostics::meta_only::atoms_exhaustion_is_safe(sema, context.in_clause, context.parents);
|
||||
let is_safe = false; // @oss-only
|
||||
if !is_safe {
|
||||
match context.args.as_slice() {
|
||||
|
|
|
|||
|
|
@ -66,7 +66,6 @@ impl Linter for BooleanPrecedenceLinter {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub struct Context {
|
||||
range: TextRange,
|
||||
preceding_ws_range: TextRange,
|
||||
op: Op,
|
||||
lhs_complex: bool,
|
||||
|
|
@ -101,6 +100,7 @@ impl GenericLinter for BooleanPrecedenceLinter {
|
|||
fn fixes(
|
||||
&self,
|
||||
context: &Self::Context,
|
||||
range: TextRange,
|
||||
_sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
|
|
@ -109,36 +109,36 @@ impl GenericLinter for BooleanPrecedenceLinter {
|
|||
// Add "replace with preferred operator" fix
|
||||
let assist_message = format!("Replace '{}' with '{}'", context.op, context.op.preferred());
|
||||
let edit = TextEdit::replace(
|
||||
context.op.range(context.range, context.preceding_ws_range),
|
||||
context.op.range(range, context.preceding_ws_range),
|
||||
context.op.preferred().to_string(),
|
||||
);
|
||||
fixes.push(fix(
|
||||
"replace_boolean_operator",
|
||||
&assist_message,
|
||||
SourceChange::from_text_edit(file_id, edit),
|
||||
context.range,
|
||||
range,
|
||||
));
|
||||
|
||||
// Add "add parens" fixes if applicable
|
||||
if context.lhs_complex {
|
||||
fixes.push(parens_fix("LHS", file_id, context));
|
||||
fixes.push(parens_fix("LHS", file_id, context, range));
|
||||
}
|
||||
if context.rhs_complex {
|
||||
fixes.push(parens_fix("RHS", file_id, context));
|
||||
fixes.push(parens_fix("RHS", file_id, context, range));
|
||||
}
|
||||
|
||||
Some(fixes)
|
||||
}
|
||||
}
|
||||
|
||||
fn parens_fix(side: &str, file_id: FileId, context: &Context) -> Assist {
|
||||
fn parens_fix(side: &str, file_id: FileId, context: &Context, range: TextRange) -> Assist {
|
||||
let assist_message = format!("Add parens to {side}");
|
||||
let edit = add_parens_edit(&context.add_parens_range);
|
||||
fix(
|
||||
"replace_boolean_operator_add_parens",
|
||||
&assist_message,
|
||||
SourceChange::from_text_edit(file_id, edit),
|
||||
context.range,
|
||||
range,
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -231,7 +231,6 @@ fn collect_match(
|
|||
matches.push(GenericLinterMatchContext {
|
||||
range,
|
||||
context: Context {
|
||||
range,
|
||||
preceding_ws_range,
|
||||
op: binop,
|
||||
lhs_complex,
|
||||
|
|
|
|||
178
crates/ide/src/diagnostics/bound_variable.rs
Normal file
178
crates/ide/src/diagnostics/bound_variable.rs
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
/*
|
||||
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
*
|
||||
* This source code is dual-licensed under either the MIT license found in the
|
||||
* LICENSE-MIT file in the root directory of this source tree or the Apache
|
||||
* License, Version 2.0 found in the LICENSE-APACHE file in the root directory
|
||||
* of this source tree. You may select, at your option, one of the
|
||||
* above-listed licenses.
|
||||
*/
|
||||
|
||||
// Diagnostic: bound_variable
|
||||
//
|
||||
// Return a warning if the LHS of a match already contains a bound variable.
|
||||
//
|
||||
|
||||
use elp_ide_db::elp_base_db::FileId;
|
||||
use hir::AnyExpr;
|
||||
use hir::Expr;
|
||||
use hir::Semantic;
|
||||
use hir::Strategy;
|
||||
use hir::fold::MacroStrategy;
|
||||
use hir::fold::ParenStrategy;
|
||||
|
||||
use crate::diagnostics::DiagnosticCode;
|
||||
use crate::diagnostics::GenericLinter;
|
||||
use crate::diagnostics::GenericLinterMatchContext;
|
||||
use crate::diagnostics::Linter;
|
||||
|
||||
pub(crate) struct BoundVariableLinter;
|
||||
|
||||
impl Linter for BoundVariableLinter {
|
||||
fn id(&self) -> DiagnosticCode {
|
||||
DiagnosticCode::BoundVarInLhs
|
||||
}
|
||||
|
||||
fn description(&self) -> &'static str {
|
||||
"Match on a bound variable"
|
||||
}
|
||||
}
|
||||
|
||||
impl GenericLinter for BoundVariableLinter {
|
||||
type Context = ();
|
||||
|
||||
fn matches(
|
||||
&self,
|
||||
sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<GenericLinterMatchContext<Self::Context>>> {
|
||||
let bound_vars_by_function = sema.bound_vars_by_function(file_id);
|
||||
let mut res = Vec::new();
|
||||
sema.def_map(file_id)
|
||||
.get_function_clauses()
|
||||
.for_each(|(_, def)| {
|
||||
if def.file.file_id == file_id
|
||||
&& let Some(bound_vars) = bound_vars_by_function.get(&def.function_clause_id)
|
||||
{
|
||||
let in_clause = def.in_clause(sema, def);
|
||||
in_clause.fold_clause(
|
||||
Strategy {
|
||||
macros: MacroStrategy::ExpandButIncludeMacroCall,
|
||||
parens: ParenStrategy::InvisibleParens,
|
||||
},
|
||||
(),
|
||||
&mut |acc, ctx| {
|
||||
if let AnyExpr::Expr(Expr::Match { lhs, rhs: _ }) = ctx.item
|
||||
&& bound_vars.contains(&lhs)
|
||||
&& let Some(range) = in_clause.range_for_pat(lhs)
|
||||
&& range.file_id == def.file.file_id
|
||||
&& ctx.in_macro.is_none()
|
||||
{
|
||||
res.push(GenericLinterMatchContext {
|
||||
range: range.range,
|
||||
context: (),
|
||||
});
|
||||
};
|
||||
acc
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
|
||||
pub static LINTER: BoundVariableLinter = BoundVariableLinter;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use elp_ide_db::DiagnosticCode;
|
||||
use expect_test::Expect;
|
||||
|
||||
use crate::diagnostics::DiagnosticsConfig;
|
||||
use crate::tests::check_diagnostics_with_config;
|
||||
use crate::tests::check_fix_with_config;
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_diagnostics(fixture: &str) {
|
||||
let config = DiagnosticsConfig::default().disable(DiagnosticCode::UndefinedFunction);
|
||||
check_diagnostics_with_config(config, fixture)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_fix(fixture_before: &str, fixture_after: Expect) {
|
||||
let config = DiagnosticsConfig::default().disable(DiagnosticCode::UndefinedFunction);
|
||||
check_fix_with_config(config, fixture_before, fixture_after)
|
||||
}
|
||||
#[test]
|
||||
fn bound_variable() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- /src/bound.erl
|
||||
-module(bound).
|
||||
|
||||
foo() ->
|
||||
AA = bar(),
|
||||
AA = bar().
|
||||
%% ^^ 💡 warning: W0060: Match on a bound variable
|
||||
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bound_variable_not_reported_in_case() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- /src/bound.erl
|
||||
-module(bound).
|
||||
|
||||
foo(Val) ->
|
||||
case Val of
|
||||
undefined -> ok;
|
||||
Val when is_list(Val) -> ok
|
||||
end.
|
||||
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bound_variable_not_reported_in_macro() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- /src/bound.erl
|
||||
-module(bound).
|
||||
-include("inc.hrl").
|
||||
|
||||
foo(Val) ->
|
||||
?A_MACRO(Val).
|
||||
//- /src/inc.hrl
|
||||
-define(A_MACRO(X), X=X).
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bound_variable_ignore_fix() {
|
||||
check_fix(
|
||||
r#"
|
||||
//- /src/bound.erl
|
||||
-module(bound).
|
||||
|
||||
foo() ->
|
||||
AA = bar(),
|
||||
A~A = bar().
|
||||
"#,
|
||||
expect_test::expect![[r#"
|
||||
-module(bound).
|
||||
|
||||
foo() ->
|
||||
AA = bar(),
|
||||
% elp:ignore W0060 (bound_var_in_lhs)
|
||||
AA = bar().
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -22,7 +22,7 @@ use crate::diagnostics::DiagnosticCode;
|
|||
use crate::diagnostics::FunctionCallLinter;
|
||||
use crate::diagnostics::Linter;
|
||||
use crate::diagnostics::Severity;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::diagnostics::meta_only;
|
||||
use crate::lazy_function_matches;
|
||||
|
||||
pub(crate) struct NoDebuggingFunctionLinter;
|
||||
|
|
@ -52,7 +52,7 @@ impl FunctionCallLinter for NoDebuggingFunctionLinter {
|
|||
lazy_function_matches![
|
||||
vec![FunctionMatch::m("redbug")]
|
||||
.into_iter()
|
||||
// @fb-only
|
||||
// @fb-only: .chain(meta_only::debugging_function_matches().into_iter())
|
||||
.collect::<Vec<_>>()
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ use super::DiagnosticDescriptor;
|
|||
use super::Severity;
|
||||
use crate::codemod_helpers::FunctionMatch;
|
||||
use crate::codemod_helpers::FunctionMatcher;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::diagnostics;
|
||||
use crate::fix;
|
||||
|
||||
pub(crate) static DESCRIPTOR: DiagnosticDescriptor = DiagnosticDescriptor {
|
||||
|
|
@ -88,7 +88,7 @@ fn deprecated_function(diagnostics: &mut Vec<Diagnostic>, sema: &Semantic, file_
|
|||
lazy_static! {
|
||||
static ref DEPRECATED_FUNCTIONS: Vec<(FunctionMatch, DeprecationDetails)> = {
|
||||
let matches: Vec<Vec<(FunctionMatch, DeprecationDetails)>> = vec![
|
||||
// @fb-only
|
||||
// @fb-only: diagnostics::meta_only::deprecated_function_matches(),
|
||||
];
|
||||
matches.into_iter()
|
||||
.flatten()
|
||||
|
|
@ -134,8 +134,8 @@ fn check_function(
|
|||
);
|
||||
let details = match_result.map(|(_match, details)| details.clone());
|
||||
if target_def.deprecated || match_result.is_some() {
|
||||
let expr_id = if let Some(expr_id) = ctx.in_macro {
|
||||
expr_id.idx
|
||||
let expr_id = if let Some((hir_idx, _macro_def)) = ctx.in_macro {
|
||||
hir_idx.idx
|
||||
} else {
|
||||
ctx.item_id
|
||||
};
|
||||
|
|
|
|||
|
|
@ -35,9 +35,7 @@ use crate::diagnostics::Linter;
|
|||
use crate::fix;
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq)]
|
||||
pub(crate) struct MacroPrecedenceContext {
|
||||
range: TextRange,
|
||||
}
|
||||
pub(crate) struct MacroPrecedenceContext;
|
||||
|
||||
pub(crate) struct MacroPrecedenceSupriseLinter;
|
||||
|
||||
|
|
@ -96,10 +94,9 @@ impl GenericLinter for MacroPrecedenceSupriseLinter {
|
|||
{
|
||||
let range = ast.range();
|
||||
if range.file_id == file_id {
|
||||
let context = MacroPrecedenceContext { range: range.range };
|
||||
res.push(GenericLinterMatchContext {
|
||||
range: range.range,
|
||||
context,
|
||||
context: MacroPrecedenceContext,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -113,16 +110,17 @@ impl GenericLinter for MacroPrecedenceSupriseLinter {
|
|||
|
||||
fn fixes(
|
||||
&self,
|
||||
context: &Self::Context,
|
||||
_context: &Self::Context,
|
||||
range: TextRange,
|
||||
_sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
let edit = add_parens_edit(&context.range);
|
||||
let edit = add_parens_edit(&range);
|
||||
let fix = fix(
|
||||
"macro_precedence_add_parens",
|
||||
"Add parens to macro call",
|
||||
SourceChange::from_text_edit(file_id, edit),
|
||||
context.range,
|
||||
range,
|
||||
);
|
||||
Some(vec![fix])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -75,7 +75,6 @@ impl Linter for MissingCompileWarnMissingSpec {
|
|||
pub struct Context {
|
||||
found: Found,
|
||||
compile_option_id: Option<CompileOptionId>,
|
||||
target_range: TextRange,
|
||||
}
|
||||
|
||||
impl GenericLinter for MissingCompileWarnMissingSpec {
|
||||
|
|
@ -94,7 +93,6 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
|
|||
context: Context {
|
||||
found: Found::No,
|
||||
compile_option_id: None,
|
||||
target_range: DIAGNOSTIC_WHOLE_FILE_RANGE,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -149,7 +147,6 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
|
|||
context: Context {
|
||||
found: what.0,
|
||||
compile_option_id: what.1,
|
||||
target_range: range,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -160,6 +157,7 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
|
|||
fn fixes(
|
||||
&self,
|
||||
context: &Self::Context,
|
||||
range: TextRange,
|
||||
sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<elp_ide_assists::Assist>> {
|
||||
|
|
@ -184,7 +182,7 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
|
|||
"add_warn_missing_spec_all",
|
||||
"Add compile option 'warn_missing_spec_all'",
|
||||
edit,
|
||||
context.target_range,
|
||||
range,
|
||||
)])
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -55,7 +55,6 @@ impl Linter for MisspelledAttributeLinter {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub struct Context {
|
||||
range: TextRange,
|
||||
attr_name: String,
|
||||
suggested_rename: String,
|
||||
}
|
||||
|
|
@ -88,7 +87,6 @@ impl GenericLinter for MisspelledAttributeLinter {
|
|||
res.push(GenericLinterMatchContext {
|
||||
range: attr_name_range,
|
||||
context: Context {
|
||||
range: attr_name_range,
|
||||
attr_name: attr.name.to_string(),
|
||||
suggested_rename: suggested_rename.to_string(),
|
||||
},
|
||||
|
|
@ -110,16 +108,17 @@ impl GenericLinter for MisspelledAttributeLinter {
|
|||
fn fixes(
|
||||
&self,
|
||||
context: &Self::Context,
|
||||
range: TextRange,
|
||||
_sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
let edit = TextEdit::replace(context.range, context.suggested_rename.clone());
|
||||
let edit = TextEdit::replace(range, context.suggested_rename.clone());
|
||||
let msg = format!("Change to '{}'", context.suggested_rename);
|
||||
Some(vec![fix(
|
||||
"fix_misspelled_attribute",
|
||||
&msg,
|
||||
SourceChange::from_text_edit(file_id, edit),
|
||||
context.range,
|
||||
range,
|
||||
)])
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,12 +27,8 @@
|
|||
//
|
||||
|
||||
use elp_ide_db::elp_base_db::FileId;
|
||||
use fxhash::FxHashMap;
|
||||
use fxhash::FxHashSet;
|
||||
use hir::AnyExpr;
|
||||
use hir::Expr;
|
||||
use hir::FunctionClauseId;
|
||||
use hir::PatId;
|
||||
use hir::Semantic;
|
||||
use hir::Strategy;
|
||||
use hir::fold::MacroStrategy;
|
||||
|
|
@ -60,21 +56,7 @@ fn mutable_variable_bug(
|
|||
sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<()> {
|
||||
let mut bound_vars_by_function: FxHashMap<FunctionClauseId, FxHashSet<&PatId>> =
|
||||
FxHashMap::default();
|
||||
let bound_vars = sema.bound_vars_in_pattern_diagnostic(file_id);
|
||||
bound_vars.iter().for_each(|(function_id, pat_id, _var)| {
|
||||
bound_vars_by_function
|
||||
.entry(function_id.value)
|
||||
.and_modify(|vars| {
|
||||
vars.insert(pat_id);
|
||||
})
|
||||
.or_insert_with(|| {
|
||||
let mut vars = FxHashSet::default();
|
||||
vars.insert(pat_id);
|
||||
vars
|
||||
});
|
||||
});
|
||||
let bound_vars_by_function = sema.bound_vars_by_function(file_id);
|
||||
sema.def_map(file_id)
|
||||
.get_function_clauses()
|
||||
.for_each(|(_, def)| {
|
||||
|
|
|
|||
|
|
@ -29,6 +29,9 @@ impl Linter for NoErrorLoggerLinter {
|
|||
fn severity(&self, _sema: &Semantic, _file_id: FileId) -> Severity {
|
||||
Severity::Error
|
||||
}
|
||||
fn should_process_test_files(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl FunctionCallLinter for NoErrorLoggerLinter {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
* above-listed licenses.
|
||||
*/
|
||||
|
||||
// Diagnostic: edoc
|
||||
// Diagnostic: old_edoc_syntax
|
||||
|
||||
use elp_ide_assists::Assist;
|
||||
use elp_ide_assists::helpers;
|
||||
|
|
@ -31,11 +31,10 @@ use super::DiagnosticCode;
|
|||
use super::GenericLinter;
|
||||
use super::GenericLinterMatchContext;
|
||||
use super::Linter;
|
||||
use super::Severity;
|
||||
|
||||
pub(crate) struct EdocLinter;
|
||||
pub(crate) struct OldEdocSyntaxLinter;
|
||||
|
||||
impl Linter for EdocLinter {
|
||||
impl Linter for OldEdocSyntaxLinter {
|
||||
fn id(&self) -> DiagnosticCode {
|
||||
DiagnosticCode::OldEdocSyntax
|
||||
}
|
||||
|
|
@ -44,11 +43,8 @@ impl Linter for EdocLinter {
|
|||
"EDoc style comments are deprecated. Please use Markdown instead."
|
||||
}
|
||||
|
||||
fn severity(&self, sema: &Semantic, file_id: FileId) -> Severity {
|
||||
match sema.db.is_test_suite_or_test_helper(file_id) {
|
||||
Some(true) => Severity::WeakWarning,
|
||||
_ => Severity::Warning,
|
||||
}
|
||||
fn should_process_test_files(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -56,10 +52,9 @@ impl Linter for EdocLinter {
|
|||
pub struct Context {
|
||||
header_ptr: Option<InFileAstPtr<ast::Form>>,
|
||||
doc_start: TextSize,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl GenericLinter for EdocLinter {
|
||||
impl GenericLinter for OldEdocSyntaxLinter {
|
||||
type Context = Context;
|
||||
|
||||
fn matches(
|
||||
|
|
@ -77,7 +72,6 @@ impl GenericLinter for EdocLinter {
|
|||
context: Context {
|
||||
header_ptr: Some(*header_ptr),
|
||||
doc_start,
|
||||
range: doc.range,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -88,7 +82,6 @@ impl GenericLinter for EdocLinter {
|
|||
context: Context {
|
||||
header_ptr: Some(*header_ptr),
|
||||
doc_start,
|
||||
range: equiv.range,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -99,7 +92,6 @@ impl GenericLinter for EdocLinter {
|
|||
context: Context {
|
||||
header_ptr: Some(*header_ptr),
|
||||
doc_start,
|
||||
range: deprecated.range,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -111,7 +103,6 @@ impl GenericLinter for EdocLinter {
|
|||
context: Context {
|
||||
header_ptr: Some(*header_ptr),
|
||||
doc_start,
|
||||
range: hidden.range,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
@ -123,6 +114,7 @@ impl GenericLinter for EdocLinter {
|
|||
fn fixes(
|
||||
&self,
|
||||
context: &Self::Context,
|
||||
range: TextRange,
|
||||
sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
|
|
@ -134,12 +126,12 @@ impl GenericLinter for EdocLinter {
|
|||
file_id,
|
||||
header,
|
||||
context.doc_start,
|
||||
context.range,
|
||||
range,
|
||||
)])
|
||||
}
|
||||
}
|
||||
|
||||
pub static LINTER: EdocLinter = EdocLinter;
|
||||
pub static LINTER: OldEdocSyntaxLinter = OldEdocSyntaxLinter;
|
||||
|
||||
fn old_edoc_syntax_fix(
|
||||
sema: &Semantic,
|
||||
|
|
@ -302,22 +294,6 @@ mod tests {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_function_doc_in_test_file() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- /test/main_SUITE.erl extra:test
|
||||
-module(main_SUITE).
|
||||
%% @doc This is the main function documentation.
|
||||
%% ^^^^ 💡 weak: W0038: EDoc style comments are deprecated. Please use Markdown instead.
|
||||
main() ->
|
||||
dep().
|
||||
|
||||
dep() -> ok.
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_function_doc_different_arities() {
|
||||
check_diagnostics(
|
||||
|
|
@ -43,6 +43,13 @@ impl Linter for UndefinedFunctionLinter {
|
|||
fn should_process_generated_files(&self) -> bool {
|
||||
true
|
||||
}
|
||||
// Ideally, we would like to report undefined functions in all files, but
|
||||
// there are too many false positives in test files to do so.
|
||||
// This is often due to mocked modules and test suite cleverness.
|
||||
// We can revisit this decision in the future. See T249044930.
|
||||
fn should_process_test_files(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl FunctionCallLinter for UndefinedFunctionLinter {
|
||||
|
|
|
|||
|
|
@ -48,9 +48,7 @@ impl Linter for UndocumentedModuleLinter {
|
|||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct Context {
|
||||
module_name_range: TextRange,
|
||||
}
|
||||
pub struct Context;
|
||||
|
||||
impl GenericLinter for UndocumentedModuleLinter {
|
||||
type Context = Context;
|
||||
|
|
@ -71,16 +69,21 @@ impl GenericLinter for UndocumentedModuleLinter {
|
|||
if module_has_no_docs {
|
||||
let module_name = module_attribute.name()?;
|
||||
let module_name_range = module_name.syntax().text_range();
|
||||
let context = Context { module_name_range };
|
||||
res.push(GenericLinterMatchContext {
|
||||
range: module_name_range,
|
||||
context,
|
||||
context: Context,
|
||||
});
|
||||
}
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn fixes(&self, context: &Context, sema: &Semantic, file_id: FileId) -> Option<Vec<Assist>> {
|
||||
fn fixes(
|
||||
&self,
|
||||
_context: &Context,
|
||||
range: TextRange,
|
||||
sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
let insert_offset = helpers::moduledoc_insert_offset(sema, file_id)?;
|
||||
let mut builder = SourceChangeBuilder::new(file_id);
|
||||
builder.insert(insert_offset, "-moduledoc false.\n");
|
||||
|
|
@ -89,7 +92,7 @@ impl GenericLinter for UndocumentedModuleLinter {
|
|||
"add_moduledoc_false",
|
||||
"Add `-moduledoc false.` attribute",
|
||||
source_change,
|
||||
context.module_name_range,
|
||||
range,
|
||||
);
|
||||
Some(vec![fix])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ use crate::codemod_helpers::CheckCallCtx;
|
|||
use crate::codemod_helpers::MatchCtx;
|
||||
use crate::diagnostics::FunctionCallLinter;
|
||||
use crate::diagnostics::Linter;
|
||||
// @fb-only
|
||||
// @fb-only: use crate::diagnostics::meta_only;
|
||||
use crate::fix;
|
||||
use crate::lazy_function_matches;
|
||||
|
||||
|
|
@ -45,9 +45,9 @@ impl Linter for UnexportedFunctionLinter {
|
|||
}
|
||||
#[rustfmt::skip]
|
||||
fn should_process_file_id(&self, _sema: &Semantic, _file_id: FileId) -> bool { // @oss-only
|
||||
// @fb-only
|
||||
// @fb-only: fn should_process_file_id(&self, sema: &Semantic, file_id: FileId) -> bool {
|
||||
true // @oss-only
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::should_check_for_unexported(sema, file_id)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -152,7 +152,7 @@ fn replace_include_path(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use elp_ide_db::DiagnosticCode;
|
||||
// @fb-only
|
||||
// @fb-only: use elp_ide_db::meta_only::MetaOnlyDiagnosticCode;
|
||||
use expect_test::Expect;
|
||||
use expect_test::expect;
|
||||
|
||||
|
|
@ -173,7 +173,7 @@ mod tests {
|
|||
#[track_caller]
|
||||
fn check_fix(fixture_before: &str, fixture_after: Expect) {
|
||||
let config = DiagnosticsConfig::default()
|
||||
// @fb-only
|
||||
// @fb-only: .disable(DiagnosticCode::MetaOnly(MetaOnlyDiagnosticCode::MalformedInclude))
|
||||
.disable(DiagnosticCode::UnusedInclude);
|
||||
tests::check_fix_with_config(config, fixture_before, fixture_after)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -137,6 +137,7 @@ impl GenericLinter for UnusedIncludeLinter {
|
|||
fn fixes(
|
||||
&self,
|
||||
context: &Self::Context,
|
||||
_range: TextRange,
|
||||
_sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
|
|
|
|||
|
|
@ -88,7 +88,13 @@ impl GenericLinter for UnusedMacroLinter {
|
|||
Some(DiagnosticTag::Unused)
|
||||
}
|
||||
|
||||
fn fixes(&self, context: &Context, _sema: &Semantic, file_id: FileId) -> Option<Vec<Assist>> {
|
||||
fn fixes(
|
||||
&self,
|
||||
context: &Context,
|
||||
_range: TextRange,
|
||||
_sema: &Semantic,
|
||||
file_id: FileId,
|
||||
) -> Option<Vec<Assist>> {
|
||||
Some(vec![delete_unused_macro(
|
||||
file_id,
|
||||
context.delete_range,
|
||||
|
|
|
|||
|
|
@ -15,9 +15,9 @@ use elp_syntax::AstNode;
|
|||
use hir::InFile;
|
||||
use hir::Semantic;
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: use crate::meta_only::exdoc_links;
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: mod meta_only;
|
||||
mod otp_links;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
|
@ -40,10 +40,10 @@ pub(crate) fn external_docs(db: &RootDatabase, position: &FilePosition) -> Optio
|
|||
if let Some(class) = SymbolClass::classify(&sema, in_file_token.clone()) {
|
||||
class.iter().for_each(|def| {
|
||||
otp_links::links(&mut doc_links, &sema, &def);
|
||||
// @fb-only
|
||||
// @fb-only: exdoc_links::links(&mut doc_links, &sema, &def);
|
||||
});
|
||||
}
|
||||
// @fb-only
|
||||
// @fb-only: meta_only::links(&mut doc_links, node, position);
|
||||
Some(doc_links)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ pub mod diagnostics;
|
|||
pub mod diagnostics_collection;
|
||||
pub mod diff;
|
||||
mod highlight_related;
|
||||
// @fb-only
|
||||
// @fb-only: pub mod meta_only;
|
||||
|
||||
pub use annotations::Annotation;
|
||||
pub use annotations::AnnotationKind;
|
||||
|
|
@ -251,9 +251,9 @@ impl Analysis {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn should_eqwalize(&self, file_id: FileId, include_tests: bool) -> Cancellable<bool> {
|
||||
pub fn should_eqwalize(&self, file_id: FileId) -> Cancellable<bool> {
|
||||
let is_in_app = self.file_app_type(file_id).ok() == Some(Some(AppType::App));
|
||||
Ok(is_in_app && self.is_eqwalizer_enabled(file_id, include_tests)?)
|
||||
Ok(is_in_app && self.is_eqwalizer_enabled(file_id)?)
|
||||
}
|
||||
|
||||
/// Computes the set of eqwalizer diagnostics for the given files,
|
||||
|
|
@ -383,8 +383,8 @@ impl Analysis {
|
|||
/// - the app (the module belongs to) has `.eqwalizer` marker in the roof
|
||||
/// - or the module has `-typing([eqwalizer]).` pragma
|
||||
/// - or the whole project has `enable_all=true` in its `.elp.toml` file
|
||||
pub fn is_eqwalizer_enabled(&self, file_id: FileId, include_tests: bool) -> Cancellable<bool> {
|
||||
self.with_db(|db| db.is_eqwalizer_enabled(file_id, include_tests))
|
||||
pub fn is_eqwalizer_enabled(&self, file_id: FileId) -> Cancellable<bool> {
|
||||
self.with_db(|db| db.is_eqwalizer_enabled(file_id))
|
||||
}
|
||||
|
||||
/// ETF for the module's abstract forms
|
||||
|
|
|
|||
|
|
@ -194,35 +194,53 @@ pub fn rename_var(
|
|||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
|
||||
use elp_ide_db::RootDatabase;
|
||||
use elp_ide_db::elp_base_db::AnchoredPathBuf;
|
||||
use elp_ide_db::elp_base_db::FileId;
|
||||
use elp_ide_db::elp_base_db::VfsPath;
|
||||
use elp_ide_db::elp_base_db::assert_eq_text;
|
||||
use elp_ide_db::elp_base_db::fixture::ChangeFixture;
|
||||
use elp_ide_db::elp_base_db::fixture::WithFixture as _;
|
||||
use elp_ide_db::source_change::FileSystemEdit;
|
||||
use elp_ide_db::text_edit::TextEdit;
|
||||
use elp_project_model::test_fixture::trim_indent;
|
||||
use elp_syntax::AstNode;
|
||||
use elp_syntax::algo;
|
||||
use elp_syntax::ast;
|
||||
use fxhash::FxHashSet;
|
||||
use hir::AnyExprId;
|
||||
use hir::InFile;
|
||||
use hir::Semantic;
|
||||
|
||||
use super::rename_var;
|
||||
use crate::AnalysisHost;
|
||||
use crate::fixture;
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_rename(new_name: &str, fixture_before: &str, fixture_after_str: &str) {
|
||||
let fixture_after_str = &trim_indent(fixture_after_str);
|
||||
let analysis_after = fixture::multi_file(fixture_after_str);
|
||||
|
||||
let (analysis, position, _) = fixture::position(fixture_before);
|
||||
let (db_before, fixture) = RootDatabase::with_fixture(fixture_before);
|
||||
let host_before = AnalysisHost { db: db_before };
|
||||
let analysis = host_before.analysis();
|
||||
let position = fixture.position();
|
||||
|
||||
let (db_after, fixture_after) = RootDatabase::with_fixture(fixture_after_str);
|
||||
let host_after = AnalysisHost { db: db_after };
|
||||
let analysis_after = host_after.analysis();
|
||||
|
||||
let rename_result = analysis
|
||||
.rename(position, new_name)
|
||||
.unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}"));
|
||||
match rename_result {
|
||||
Ok(source_change) => {
|
||||
let mut file_ids: FxHashSet<FileId> = FxHashSet::default();
|
||||
for edit in source_change.source_file_edits {
|
||||
let mut text_edit_builder = TextEdit::builder();
|
||||
let file_id = edit.0;
|
||||
// New and old file_id are the same
|
||||
file_ids.insert(file_id);
|
||||
for indel in edit.1.into_iter() {
|
||||
text_edit_builder.replace(indel.delete, indel.insert);
|
||||
}
|
||||
|
|
@ -232,6 +250,82 @@ pub(crate) mod tests {
|
|||
let expected = analysis_after.file_text(file_id).unwrap().to_string();
|
||||
assert_eq_text!(&*expected, &*result);
|
||||
}
|
||||
for op in source_change.file_system_edits {
|
||||
let expected;
|
||||
let new_file_id;
|
||||
match op {
|
||||
FileSystemEdit::CreateFile {
|
||||
dst,
|
||||
initial_contents,
|
||||
} => {
|
||||
let new_file =
|
||||
find_new_file_id(&fixture_after, &dst).unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Fixture after:could not find file created as '{}'",
|
||||
&dst.path
|
||||
)
|
||||
});
|
||||
new_file_id = *new_file.1;
|
||||
expected = initial_contents;
|
||||
let actual = analysis_after.file_text(new_file_id).unwrap().to_string();
|
||||
assert_eq_text!(&*expected, &*actual);
|
||||
}
|
||||
FileSystemEdit::MoveFile { src: _, dst } => {
|
||||
let new_file =
|
||||
find_new_file_id(&fixture_after, &dst).unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Fixture after:could not find file renamed to '{}'",
|
||||
&dst.path
|
||||
)
|
||||
});
|
||||
new_file_id = *new_file.1;
|
||||
// We simply record the new file id for checking in `fixture_after``.
|
||||
// The expected value will be updated by the new_file_edits below,
|
||||
// and the result asserted there
|
||||
}
|
||||
}
|
||||
file_ids.insert(new_file_id);
|
||||
}
|
||||
for (dst, op) in source_change.new_file_edits {
|
||||
// When renaming a module, we move the original file, then apply fixup edits
|
||||
// to the new file
|
||||
let anchored_dst = AnchoredPathBuf {
|
||||
anchor: dst.anchor,
|
||||
path: dst.path,
|
||||
};
|
||||
let new_file =
|
||||
find_new_file_id(&fixture_after, &anchored_dst).unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Fixture after:could not find file created as '{}'",
|
||||
&anchored_dst.path
|
||||
)
|
||||
});
|
||||
|
||||
let mut text_edit_builder = TextEdit::builder();
|
||||
let file_id = *new_file.1;
|
||||
// New and old file_id are the same
|
||||
file_ids.insert(file_id);
|
||||
for indel in op.iter() {
|
||||
text_edit_builder.replace(indel.delete, indel.insert.to_string());
|
||||
}
|
||||
let mut result = analysis.file_text(file_id).unwrap().to_string();
|
||||
let edit = text_edit_builder.finish();
|
||||
edit.apply(&mut result);
|
||||
let expected = analysis_after.file_text(file_id).unwrap().to_string();
|
||||
assert_eq_text!(&*expected, &*result);
|
||||
}
|
||||
// Check the balance of the expectations in the new fixture.
|
||||
for file_id in &fixture_after.files {
|
||||
if !file_ids.contains(file_id) {
|
||||
let actual = analysis_after.file_text(*file_id).unwrap().to_string();
|
||||
let expected = if fixture.files.contains(file_id) {
|
||||
analysis.file_text(*file_id).unwrap().to_string()
|
||||
} else {
|
||||
format!("File {:?} not present in original fixture", file_id)
|
||||
};
|
||||
assert_eq_text!(&*expected, &*actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
if fixture_after_str.starts_with("error:") {
|
||||
|
|
@ -247,6 +341,16 @@ pub(crate) mod tests {
|
|||
};
|
||||
}
|
||||
|
||||
fn find_new_file_id<'a>(
|
||||
fixture: &'a ChangeFixture,
|
||||
dst: &'a AnchoredPathBuf,
|
||||
) -> Option<(&'a VfsPath, &'a FileId)> {
|
||||
fixture
|
||||
.files_by_path
|
||||
.iter()
|
||||
.find(|(name, _)| name.as_path().unwrap().to_string().ends_with(&dst.path))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rename_var_1() {
|
||||
check_rename("Y", r#"main() -> I~ = 1."#, r#"main() -> Y = 1."#);
|
||||
|
|
@ -1135,6 +1239,326 @@ pub(crate) mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
// ---------------------------------
|
||||
// Renaming modules
|
||||
|
||||
#[test]
|
||||
fn rename_module_fails_name_exists() {
|
||||
check_rename(
|
||||
"main_2",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
//- /app_a_/src/main_2.erl
|
||||
-module(main_2).
|
||||
"#,
|
||||
r#"error: module 'main_2' already exists"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_fails_bad_name_1() {
|
||||
check_rename(
|
||||
"Main",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
//- /app_a_/src/main_2.erl
|
||||
-module(main_2).
|
||||
"#,
|
||||
r#"error: Invalid new module name: 'Main'"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_simple() {
|
||||
check_rename(
|
||||
"main_2",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
"#,
|
||||
r#"
|
||||
//- /app_a/src/main_2.erl
|
||||
-module(main_2).
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_fails_dup_name() {
|
||||
check_rename(
|
||||
"main_2",
|
||||
r#"
|
||||
//- /app_a/src/main_2.erl
|
||||
-module(main_2).
|
||||
-export([foo/0]).
|
||||
foo() -> ok.
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export([foo/0]).
|
||||
foo() -> ok.
|
||||
bar() -> main:foo().
|
||||
baz() -> main:bar().
|
||||
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
bar() -> main:foo().
|
||||
"#,
|
||||
r#"error: module 'main_2' already exists"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_with_usage_internal() {
|
||||
check_rename(
|
||||
"main_2",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export([foo/0]).
|
||||
foo() -> ok.
|
||||
bar() -> main:foo().
|
||||
baz() -> main:bar().
|
||||
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
bar() -> main:foo().
|
||||
"#,
|
||||
//------------------
|
||||
r#"
|
||||
//- /app_a/src/main_2.erl
|
||||
-module(main_2).
|
||||
-export([foo/0]).
|
||||
foo() -> ok.
|
||||
bar() -> main_2:foo().
|
||||
baz() -> main_2:bar().
|
||||
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
bar() -> main_2:foo().
|
||||
"#,
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn rename_module_with_usage_type() {
|
||||
// TODO: check for compile errors in the fixture
|
||||
check_rename(
|
||||
"main_3",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export_type([foo/0]).
|
||||
-type foo() :: ok.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
-spec bar() -> main:foo().
|
||||
bar() -> ok.
|
||||
"#,
|
||||
r#"
|
||||
//- /app_a/src/main_3.erl
|
||||
-module(main_3).
|
||||
-export_type([foo/0]).
|
||||
-type foo() :: ok.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
-spec bar() -> main_3:foo().
|
||||
bar() -> ok.
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_with_usage_record() {
|
||||
check_rename(
|
||||
"main_3",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export_type([foo/0]).
|
||||
-type foo() :: ok.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
-spec bar() -> main:foo().
|
||||
bar() -> ok.
|
||||
-record(main, {field :: main:foo()}).
|
||||
"#,
|
||||
//------------------
|
||||
r#"
|
||||
//- /app_a/src/main_3.erl
|
||||
-module(main_3).
|
||||
-export_type([foo/0]).
|
||||
-type foo() :: ok.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
-spec bar() -> main_3:foo().
|
||||
bar() -> ok.
|
||||
-record(main, {field :: main_3:foo()}).
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_with_usage_fun_arg() {
|
||||
check_rename(
|
||||
"main_3",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export_type([foo/0]).
|
||||
-type foo() :: ok.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
-spec bar() -> main:foo().
|
||||
bar() ->
|
||||
meck:new(main, [passthrough]),
|
||||
meck:new([other, main] , [passthrough]),
|
||||
meck:unload(main),
|
||||
apply(main, foo, []),
|
||||
ok.
|
||||
-record(main, {field :: main:foo()}).
|
||||
"#,
|
||||
//------------------
|
||||
r#"
|
||||
//- /app_a/src/main_3.erl
|
||||
-module(main_3).
|
||||
-export_type([foo/0]).
|
||||
-type foo() :: ok.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/0]).
|
||||
-spec bar() -> main_3:foo().
|
||||
bar() ->
|
||||
meck:new(main_3, [passthrough]),
|
||||
meck:new([other, main_3] , [passthrough]),
|
||||
meck:unload(main_3),
|
||||
apply(main_3, foo, []),
|
||||
ok.
|
||||
-record(main, {field :: main_3:foo()}).
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_with_usage_fun() {
|
||||
check_rename(
|
||||
"main_3",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export([foo/1]).
|
||||
foo(X) -> {X}.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/1]).
|
||||
-spec bar(term()) -> ok.
|
||||
bar(UStrings) ->
|
||||
Jobs = [{fun main:foo/1, [U], []} || U <- UStrings],
|
||||
ok.
|
||||
"#,
|
||||
r#"
|
||||
//- /app_a/src/main_3.erl
|
||||
-module(main_3).
|
||||
-export([foo/1]).
|
||||
foo(X) -> {X}.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/1]).
|
||||
-spec bar(term()) -> ok.
|
||||
bar(UStrings) ->
|
||||
Jobs = [{fun main_3:foo/1, [U], []} || U <- UStrings],
|
||||
ok.
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_with_usage_fun_as_module() {
|
||||
check_rename(
|
||||
"main_3",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export([main/1]).
|
||||
main(X) -> {X}.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/1]).
|
||||
-spec bar(term()) -> ok.
|
||||
bar(UStrings) ->
|
||||
Jobs = [{fun main:main/1, [U], []} || U <- UStrings],
|
||||
ok.
|
||||
"#,
|
||||
r#"
|
||||
//- /app_a/src/main_3.erl
|
||||
-module(main_3).
|
||||
-export([main/1]).
|
||||
main(X) -> {X}.
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-export([bar/1]).
|
||||
-spec bar(term()) -> ok.
|
||||
bar(UStrings) ->
|
||||
Jobs = [{fun main_3:main/1, [U], []} || U <- UStrings],
|
||||
ok.
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rename_module_with_usage_define() {
|
||||
check_rename(
|
||||
"main_3",
|
||||
r#"
|
||||
//- /app_a/src/main.erl
|
||||
-module(ma~in).
|
||||
-export([foo/1]).
|
||||
foo(X) -> {X}.
|
||||
|
||||
//- /app_a/src/definer.hrl
|
||||
-define(FOO(X), main:foo(X)).
|
||||
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-include("definer.hrl").
|
||||
-export([bar/0]).
|
||||
-spec bar(term()) -> ok.
|
||||
bar(U) ->
|
||||
main:foo(U),
|
||||
?FOO(U),
|
||||
ok.
|
||||
"#,
|
||||
//------------------
|
||||
r#"
|
||||
//- /app_a/src/main_3.erl
|
||||
-module(main_3).
|
||||
-export([foo/1]).
|
||||
foo(X) -> {X}.
|
||||
|
||||
//- /app_a/src/definer.hrl
|
||||
-define(FOO(X), main_3:foo(X)).
|
||||
|
||||
//- /app_a/src/other.erl
|
||||
-module(other).
|
||||
-include("definer.hrl").
|
||||
-export([bar/0]).
|
||||
-spec bar(term()) -> ok.
|
||||
bar(U) ->
|
||||
main_3:foo(U),
|
||||
?FOO(U),
|
||||
ok.
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------
|
||||
|
||||
#[track_caller]
|
||||
|
|
|
|||
|
|
@ -378,6 +378,7 @@ pub(crate) fn check_diagnostics(fixture: &str) {
|
|||
.disable(DiagnosticCode::UnspecificInclude)
|
||||
.disable(DiagnosticCode::BinaryStringToSigil)
|
||||
.disable(DiagnosticCode::HirUnresolvedMacro)
|
||||
.disable(DiagnosticCode::BoundVarInLhs)
|
||||
.disable(DiagnosticCode::HirUnresolvedInclude);
|
||||
check_diagnostics_with_config(config, fixture)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ mod helpers;
|
|||
mod keywords;
|
||||
mod macros;
|
||||
mod maps;
|
||||
// @fb-only
|
||||
// @fb-only: mod meta_only;
|
||||
mod modules;
|
||||
mod records;
|
||||
mod spec;
|
||||
|
|
@ -176,7 +176,7 @@ pub fn completions(
|
|||
}
|
||||
CtxKind::Other => {
|
||||
let _ = attributes::add_completions(&mut acc, ctx)
|
||||
// @fb-only
|
||||
// @fb-only: || meta_only::add_completions(&mut acc, ctx)
|
||||
|| vars::add_completions(&mut acc, ctx)
|
||||
|| maps::add_completions(&mut acc, ctx)
|
||||
|| records::add_completions(&mut acc, ctx);
|
||||
|
|
|
|||
|
|
@ -20,9 +20,9 @@ use serde::de;
|
|||
use strum::IntoEnumIterator;
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: use crate::meta_only::MetaOnlyDiagnosticCode;
|
||||
|
||||
// @fb-only
|
||||
// @fb-only: pub const BASE_URL: &str = crate::meta_only::BASE_URL;
|
||||
pub const BASE_URL: &str = "https://whatsapp.github.io/erlang-language-platform/docs"; // @oss-only
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, EnumIter)]
|
||||
|
|
@ -93,6 +93,7 @@ pub enum DiagnosticCode {
|
|||
ListsReverseAppend,
|
||||
HirUnresolvedMacro,
|
||||
HirUnresolvedInclude,
|
||||
BoundVarInLhs,
|
||||
|
||||
// Wrapper for erlang service diagnostic codes
|
||||
ErlangService(String),
|
||||
|
|
@ -100,7 +101,7 @@ pub enum DiagnosticCode {
|
|||
Eqwalizer(String),
|
||||
// Used for ad-hoc diagnostics via lints/codemods
|
||||
AdHoc(String),
|
||||
// @fb-only
|
||||
// @fb-only: MetaOnly(MetaOnlyDiagnosticCode),
|
||||
}
|
||||
|
||||
// These namespaces map the error codes returned by the Erlang Service.
|
||||
|
|
@ -116,7 +117,7 @@ pub enum Namespace {
|
|||
Parser,
|
||||
EDoc,
|
||||
WhatsApp,
|
||||
// @fb-only
|
||||
// @fb-only: MetaOnly,
|
||||
}
|
||||
|
||||
impl fmt::Display for Namespace {
|
||||
|
|
@ -131,7 +132,7 @@ impl fmt::Display for Namespace {
|
|||
Namespace::Parser => "p",
|
||||
Namespace::EDoc => "o",
|
||||
Namespace::WhatsApp => "w",
|
||||
// @fb-only
|
||||
// @fb-only: Namespace::MetaOnly => "meta_only",
|
||||
};
|
||||
write!(f, "{namespace}")
|
||||
}
|
||||
|
|
@ -164,7 +165,7 @@ impl Namespace {
|
|||
pub fn supports_doc_path(&self) -> bool {
|
||||
match self {
|
||||
Namespace::WhatsApp => true,
|
||||
// @fb-only
|
||||
// @fb-only: Namespace::MetaOnly => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
@ -256,10 +257,11 @@ impl DiagnosticCode {
|
|||
DiagnosticCode::HirUnresolvedMacro => "W0057".to_string(),
|
||||
DiagnosticCode::HirUnresolvedInclude => "W0058".to_string(),
|
||||
DiagnosticCode::UnavailableType => "W0059".to_string(),
|
||||
DiagnosticCode::BoundVarInLhs => "W0060".to_string(),
|
||||
DiagnosticCode::ErlangService(c) => c.to_string(),
|
||||
DiagnosticCode::Eqwalizer(c) => format!("eqwalizer: {c}"),
|
||||
DiagnosticCode::AdHoc(c) => format!("ad-hoc: {c}"),
|
||||
// @fb-only
|
||||
// @fb-only: DiagnosticCode::MetaOnly(c) => c.as_code(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -271,6 +273,7 @@ impl DiagnosticCode {
|
|||
DiagnosticCode::HeadMismatch => "head_mismatch".to_string(),
|
||||
DiagnosticCode::SyntaxError => "syntax_error".to_string(),
|
||||
DiagnosticCode::BoundVarInPattern => "bound_var_in_pattern".to_string(),
|
||||
DiagnosticCode::BoundVarInLhs => "bound_var_in_lhs".to_string(),
|
||||
DiagnosticCode::ModuleMismatch => "module_mismatch".to_string(),
|
||||
DiagnosticCode::UnusedMacro => "unused_macro".to_string(),
|
||||
DiagnosticCode::UnusedRecordField => "unused_record_field".to_string(),
|
||||
|
|
@ -360,7 +363,7 @@ impl DiagnosticCode {
|
|||
DiagnosticCode::ErlangService(c) => c.to_string(),
|
||||
DiagnosticCode::Eqwalizer(c) => c.to_string(),
|
||||
DiagnosticCode::AdHoc(c) => format!("ad-hoc: {c}"),
|
||||
// @fb-only
|
||||
// @fb-only: DiagnosticCode::MetaOnly(c) => c.as_label(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -371,7 +374,7 @@ impl DiagnosticCode {
|
|||
pub fn maybe_from_string(s: &str) -> Option<DiagnosticCode> {
|
||||
DIAGNOSTIC_CODE_LOOKUPS
|
||||
.get(s).cloned()
|
||||
// @fb-only
|
||||
// @fb-only: .or_else(|| MetaOnlyDiagnosticCode::from_str(s).ok().map(DiagnosticCode::MetaOnly))
|
||||
.or_else( ||
|
||||
// Look for ErlangService and AdHoc
|
||||
if let Some(code) = Self::is_adhoc(s) {
|
||||
|
|
@ -388,7 +391,7 @@ impl DiagnosticCode {
|
|||
match self {
|
||||
DiagnosticCode::DefaultCodeForEnumIter => None,
|
||||
DiagnosticCode::AdHoc(_) => None,
|
||||
// @fb-only
|
||||
// @fb-only: DiagnosticCode::MetaOnly(_) => Some(Namespace::MetaOnly),
|
||||
DiagnosticCode::ErlangService(code) => Namespace::from_str(code).ok(),
|
||||
_ => Namespace::from_str(&self.as_code()).ok(),
|
||||
}
|
||||
|
|
@ -397,7 +400,7 @@ impl DiagnosticCode {
|
|||
pub fn supports_doc_path(&self) -> bool {
|
||||
match self {
|
||||
DiagnosticCode::DefaultCodeForEnumIter => false,
|
||||
// @fb-only
|
||||
// @fb-only: DiagnosticCode::MetaOnly(MetaOnlyDiagnosticCode::DefaultCodeForEnumIter) => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
|
@ -486,6 +489,7 @@ impl DiagnosticCode {
|
|||
DiagnosticCode::ModuleMismatch => false,
|
||||
DiagnosticCode::UnusedInclude => false,
|
||||
DiagnosticCode::BoundVarInPattern => false,
|
||||
DiagnosticCode::BoundVarInLhs => false,
|
||||
DiagnosticCode::UnusedMacro => false,
|
||||
DiagnosticCode::UnusedRecordField => false,
|
||||
DiagnosticCode::MutableVarBug => false,
|
||||
|
|
@ -541,7 +545,7 @@ impl DiagnosticCode {
|
|||
DiagnosticCode::ErlangService(_) => false,
|
||||
DiagnosticCode::Eqwalizer(_) => false,
|
||||
DiagnosticCode::AdHoc(_) => false,
|
||||
// @fb-only
|
||||
// @fb-only: DiagnosticCode::MetaOnly(code) => code.allows_fixme_comment(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ use std::sync::Arc;
|
|||
|
||||
use elp_base_db::FileId;
|
||||
use elp_base_db::FileRange;
|
||||
use elp_base_db::FileSource;
|
||||
use elp_base_db::ModuleName;
|
||||
use elp_base_db::ProjectId;
|
||||
use elp_base_db::SourceDatabase;
|
||||
|
|
@ -89,7 +88,7 @@ pub trait EqwalizerDatabase:
|
|||
fn types_for_file(&self, file_id: FileId) -> Option<Arc<Vec<(Pos, Type)>>>;
|
||||
fn has_eqwalizer_module_marker(&self, file_id: FileId) -> bool;
|
||||
fn has_eqwalizer_ignore_marker(&self, file_id: FileId) -> bool;
|
||||
fn is_eqwalizer_enabled(&self, file_id: FileId, include_tests: bool) -> bool;
|
||||
fn is_eqwalizer_enabled(&self, file_id: FileId) -> bool;
|
||||
}
|
||||
|
||||
pub fn eqwalizer_diagnostics_by_project(
|
||||
|
|
@ -114,7 +113,7 @@ fn type_at_position(
|
|||
db: &dyn EqwalizerDatabase,
|
||||
range: FileRange,
|
||||
) -> Option<Arc<(eqwalizer::types::Type, FileRange)>> {
|
||||
if !db.is_eqwalizer_enabled(range.file_id, false) {
|
||||
if !db.is_eqwalizer_enabled(range.file_id) {
|
||||
return None;
|
||||
}
|
||||
let project_id = db.file_app_data(range.file_id)?.project_id;
|
||||
|
|
@ -149,7 +148,7 @@ fn type_at_position(
|
|||
}
|
||||
|
||||
fn types_for_file(db: &dyn EqwalizerDatabase, file_id: FileId) -> Option<Arc<Vec<(Pos, Type)>>> {
|
||||
if !db.is_eqwalizer_enabled(file_id, false) {
|
||||
if !db.is_eqwalizer_enabled(file_id) {
|
||||
return None;
|
||||
}
|
||||
let project_id = db.file_app_data(file_id)?.project_id;
|
||||
|
|
@ -162,7 +161,7 @@ fn types_for_file(db: &dyn EqwalizerDatabase, file_id: FileId) -> Option<Arc<Vec
|
|||
None
|
||||
}
|
||||
|
||||
fn is_eqwalizer_enabled(db: &dyn EqwalizerDatabase, file_id: FileId, include_tests: bool) -> bool {
|
||||
fn is_eqwalizer_enabled(db: &dyn EqwalizerDatabase, file_id: FileId) -> bool {
|
||||
if !otp_supported_by_eqwalizer() {
|
||||
return false;
|
||||
}
|
||||
|
|
@ -178,11 +177,8 @@ fn is_eqwalizer_enabled(db: &dyn EqwalizerDatabase, file_id: FileId, include_tes
|
|||
let project = db.project_data(project_id);
|
||||
let eqwalizer_config = &project.eqwalizer_config;
|
||||
let module_index = db.module_index(project_id);
|
||||
let is_src = module_index.file_source_for_file(file_id) == Some(FileSource::Src);
|
||||
let is_test_opted_in = db.is_test_suite_or_test_helper(file_id) == Some(true) && include_tests;
|
||||
let global_opt_in = eqwalizer_config.enable_all;
|
||||
let opt_in =
|
||||
(global_opt_in && (is_src || is_test_opted_in)) || db.has_eqwalizer_module_marker(file_id);
|
||||
let opt_in = global_opt_in || db.has_eqwalizer_module_marker(file_id);
|
||||
let ignored_in_config = if let Some(module_name) = module_index.module_for_file(file_id) {
|
||||
eqwalizer_config
|
||||
.ignore_modules_compiled_patterns
|
||||
|
|
|
|||
|
|
@ -43,11 +43,22 @@ pub fn pick_best_token(
|
|||
tokens.max_by_key(move |t| f(t.kind()))
|
||||
}
|
||||
|
||||
/// Given a syntax node, check it it is immediately enclosed in a call,
|
||||
/// which can represent a function call or a type.
|
||||
/// For a remote call, the node can be the module or the function name.
|
||||
/// In the former case, there is an extra level of nesting, so we need
|
||||
/// to check up to 3 steps up
|
||||
pub fn get_call(syntax: &SyntaxNode) -> Option<ast::Call> {
|
||||
if let Some(call) = ast::Call::cast(syntax.parent()?) {
|
||||
Some(call)
|
||||
ast::Call::cast(syntax.parent()?)
|
||||
.or_else(|| ast::Call::cast(syntax.parent()?.parent()?))
|
||||
.or_else(|| ast::Call::cast(syntax.parent()?.parent()?.parent()?))
|
||||
}
|
||||
|
||||
pub fn get_external_fun(syntax: &SyntaxNode) -> Option<ast::ExternalFun> {
|
||||
if let Some(external_fun) = ast::ExternalFun::cast(syntax.parent()?) {
|
||||
Some(external_fun)
|
||||
} else {
|
||||
ast::Call::cast(syntax.parent()?.parent()?)
|
||||
ast::ExternalFun::cast(syntax.parent()?.parent()?)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ pub mod docs;
|
|||
pub mod eqwalizer;
|
||||
mod erl_ast;
|
||||
mod line_index;
|
||||
// @fb-only
|
||||
// @fb-only: pub mod meta_only;
|
||||
pub mod metadata;
|
||||
mod search;
|
||||
pub mod text_edit;
|
||||
|
|
@ -385,7 +385,7 @@ impl TypedSemantic for RootDatabase {
|
|||
|
||||
let project_id = app_data.project_id;
|
||||
|
||||
let eqwalizer_enabled = self.is_eqwalizer_enabled(file_id, false);
|
||||
let eqwalizer_enabled = self.is_eqwalizer_enabled(file_id);
|
||||
if !eqwalizer_enabled {
|
||||
return Some(vec![]);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,8 +15,10 @@
|
|||
use std::fmt;
|
||||
use std::iter::once;
|
||||
|
||||
use elp_base_db::AnchoredPathBuf;
|
||||
use elp_base_db::FileId;
|
||||
use elp_base_db::FileRange;
|
||||
use elp_base_db::ModuleName;
|
||||
use elp_syntax::AstNode;
|
||||
use elp_syntax::ast;
|
||||
use elp_syntax::ast::in_erlang_module;
|
||||
|
|
@ -25,9 +27,12 @@ use hir::Semantic;
|
|||
|
||||
use crate::SymbolDefinition;
|
||||
use crate::helpers::get_call;
|
||||
use crate::helpers::get_external_fun;
|
||||
use crate::search::NameLike;
|
||||
use crate::source_change::FileSystemEdit;
|
||||
use crate::source_change::SourceChange;
|
||||
use crate::text_edit::TextEdit;
|
||||
use crate::text_edit::TextEditBuilder;
|
||||
|
||||
pub type RenameResult<T> = Result<T, RenameError>;
|
||||
|
||||
|
|
@ -106,6 +111,18 @@ pub fn is_valid_type_name(new_name: &String) -> bool {
|
|||
false
|
||||
}
|
||||
|
||||
// Delegate checking module name validity to the parser
|
||||
pub fn is_valid_module_name(new_name: &String) -> bool {
|
||||
let parse = ast::SourceFile::parse_text(format!("-module({}).", new_name).as_str());
|
||||
match parse.tree().forms().next() {
|
||||
Some(ast::Form::ModuleAttribute(ma)) => match ma.name() {
|
||||
Some(ast::Name::Atom(atom)) => atom.syntax().text().to_string() == *new_name,
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum SafetyChecks {
|
||||
Yes,
|
||||
|
|
@ -122,7 +139,10 @@ impl SymbolDefinition {
|
|||
) -> RenameResult<SourceChange> {
|
||||
match self.clone() {
|
||||
SymbolDefinition::Module(_) => {
|
||||
rename_error!("Cannot rename module")
|
||||
if safety_check == SafetyChecks::Yes && !is_valid_module_name(new_name) {
|
||||
rename_error!("Invalid new module name: '{}'", new_name);
|
||||
}
|
||||
self.rename_reference(sema, new_name, parens_needed_in_context, safety_check)
|
||||
}
|
||||
SymbolDefinition::Function(fun) => {
|
||||
if safety_check == SafetyChecks::Yes && !is_valid_function_name(new_name) {
|
||||
|
|
@ -375,6 +395,7 @@ impl SymbolDefinition {
|
|||
);
|
||||
Ok(source_change)
|
||||
}
|
||||
SymbolDefinition::Module(_module) => self.rename_module(sema, new_name, safety_check),
|
||||
// Note: This is basically an internal error, this function is called from
|
||||
// SymbolDefinition::rename which already weeds them out
|
||||
_ => {
|
||||
|
|
@ -382,6 +403,184 @@ impl SymbolDefinition {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn rename_module(
|
||||
&self,
|
||||
sema: &Semantic,
|
||||
new_name: &str,
|
||||
safety_check: SafetyChecks,
|
||||
) -> RenameResult<SourceChange> {
|
||||
let file_id = self.file().file_id;
|
||||
if let Some(project_id) = sema.db.file_project_id(file_id) {
|
||||
let module_index = sema.db.module_index(project_id);
|
||||
if safety_check == SafetyChecks::Yes {
|
||||
let new_name_module = ModuleName::new(new_name);
|
||||
if module_index
|
||||
.all_modules()
|
||||
.iter()
|
||||
.any(|name| name == &new_name_module)
|
||||
{
|
||||
rename_error!("module '{}' already exists", new_name);
|
||||
}
|
||||
}
|
||||
|
||||
let mut source_change = SourceChange::default();
|
||||
// Step 1, rename all references
|
||||
let usages = self.clone().usages(sema).all();
|
||||
let mut renamed_module_edit: TextEdit = TextEdit::default();
|
||||
rename_remote_module_call_refs(
|
||||
usages,
|
||||
file_id,
|
||||
new_name,
|
||||
&mut source_change,
|
||||
&mut renamed_module_edit,
|
||||
);
|
||||
|
||||
// Step 2: Rename the module attribute in the module being renamed
|
||||
let form_list = sema.form_list(file_id);
|
||||
if let Some(module_attribute) = form_list.module_attribute() {
|
||||
let ast = module_attribute.form_id.get_ast(sema.db, file_id);
|
||||
if let Some(name) = ast.name() {
|
||||
let range = name.syntax().text_range();
|
||||
let mut builder = TextEdit::builder();
|
||||
builder.replace(range, new_name.to_string());
|
||||
renamed_module_edit
|
||||
.union(builder.finish())
|
||||
.expect("Could not combine TextEdits");
|
||||
}
|
||||
}
|
||||
|
||||
let anchor = file_id;
|
||||
let path = format!("{new_name}.erl");
|
||||
let dst = AnchoredPathBuf { anchor, path };
|
||||
source_change.insert_new_source_edit(dst.clone().into(), renamed_module_edit);
|
||||
source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst });
|
||||
Ok(source_change)
|
||||
} else {
|
||||
rename_error!(
|
||||
"Could not find project for '{:?}'",
|
||||
self.file().name(sema.db.upcast())
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn rename_remote_module_call_refs(
|
||||
usages: crate::UsageSearchResult,
|
||||
file_id: FileId,
|
||||
new_name: &str,
|
||||
source_change: &mut SourceChange,
|
||||
renamed_module_edit: &mut TextEdit,
|
||||
) {
|
||||
usages.iter().for_each(|(usage_file_id, refs)| {
|
||||
if let Some(edit) = rename_module_in_refs(refs, new_name) {
|
||||
if usage_file_id == file_id {
|
||||
renamed_module_edit
|
||||
.union(edit)
|
||||
.expect("Could not combine TextEdits");
|
||||
} else {
|
||||
source_change.insert_source_edit(usage_file_id, edit);
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
fn rename_module_in_refs(refs: &[NameLike], new_name: &str) -> Option<TextEdit> {
|
||||
let mut builder = TextEdit::builder();
|
||||
for usage in refs {
|
||||
// Note: we cannot blindly replace all occurrences of an
|
||||
// atom that happens to be a module name
|
||||
// We will flesh out other usages as we need them
|
||||
let _ = rename_call_module_in_ref(usage, &mut builder, new_name);
|
||||
let _ = rename_external_fun_module_in_ref(usage, &mut builder, new_name);
|
||||
}
|
||||
Some(builder.finish())
|
||||
}
|
||||
|
||||
fn rename_call_module_in_ref(
|
||||
usage: &NameLike,
|
||||
builder: &mut TextEditBuilder,
|
||||
new_name: &str,
|
||||
) -> Option<()> {
|
||||
let call = get_call(usage.syntax())?;
|
||||
// We can only rename an atom usage
|
||||
let usage_atom = match usage {
|
||||
NameLike::Name(ast::Name::Atom(atom)) => atom,
|
||||
_ => return Some(()),
|
||||
};
|
||||
|
||||
// First check if this is the module part of a remote call (e.g., module:function())
|
||||
if let Some(ast::Expr::Remote(remote)) = call.expr()
|
||||
&& let Some(module) = remote.module()
|
||||
&& let Some(ast::ExprMax::Atom(mod_atom)) = module.module()
|
||||
&& mod_atom.syntax() == usage_atom.syntax()
|
||||
{
|
||||
builder.replace(usage_atom.syntax().text_range(), new_name.to_string());
|
||||
return Some(());
|
||||
}
|
||||
|
||||
// Check if this is a known function call that takes a module as an argument
|
||||
// Extract function name and optional module name based on call type
|
||||
let (module_name, function_name) = match call.expr()? {
|
||||
ast::Expr::Remote(remote) => {
|
||||
let module = remote.module()?;
|
||||
let mod_atom = match module.module()? {
|
||||
ast::ExprMax::Atom(atom) => atom,
|
||||
_ => return Some(()),
|
||||
};
|
||||
let fun_atom = match remote.fun()? {
|
||||
ast::ExprMax::Atom(atom) => atom,
|
||||
_ => return Some(()),
|
||||
};
|
||||
(Some(mod_atom.text()?), fun_atom.text()?)
|
||||
}
|
||||
ast::Expr::ExprMax(ast::ExprMax::Atom(fun_atom)) => (None, fun_atom.text()?),
|
||||
_ => return Some(()),
|
||||
};
|
||||
|
||||
let args = call.args()?;
|
||||
let args_vec: Vec<_> = args.args().collect();
|
||||
let arity = args_vec.len();
|
||||
let pattern_key = (module_name.as_deref(), function_name.as_str(), arity);
|
||||
|
||||
// Use combined patterns that merge dynamic call patterns and module argument patterns
|
||||
let combined_patterns = hir::sema::to_def::get_module_arg_patterns();
|
||||
if let Some(pattern) = combined_patterns.get(&pattern_key)
|
||||
&& let Some(arg) = args_vec.get(pattern.index)
|
||||
{
|
||||
match arg {
|
||||
ast::Expr::ExprMax(ast::ExprMax::Atom(arg_atom))
|
||||
if pattern.accepts_atom() && arg_atom.syntax() == usage_atom.syntax() =>
|
||||
{
|
||||
builder.replace(usage_atom.syntax().text_range(), new_name.to_string());
|
||||
}
|
||||
ast::Expr::ExprMax(ast::ExprMax::List(list)) if pattern.accepts_list() => {
|
||||
// Handle list of modules (e.g., meck:new([mod1, mod2], Options))
|
||||
for expr in list.exprs() {
|
||||
if let ast::Expr::ExprMax(ast::ExprMax::Atom(list_atom)) = expr
|
||||
&& list_atom.syntax() == usage_atom.syntax()
|
||||
{
|
||||
builder.replace(usage_atom.syntax().text_range(), new_name.to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn rename_external_fun_module_in_ref(
|
||||
usage: &NameLike,
|
||||
builder: &mut TextEditBuilder,
|
||||
new_name: &str,
|
||||
) -> Option<()> {
|
||||
let external_fun = get_external_fun(usage.syntax())?;
|
||||
let module = external_fun.module()?;
|
||||
builder.replace(module.name()?.syntax().text_range(), new_name.to_string());
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn source_edit_from_usages(
|
||||
|
|
|
|||
|
|
@ -325,7 +325,7 @@ impl<'a> FindUsages<'a> {
|
|||
|
||||
/// Represents possible ast reference points -
|
||||
/// a string for header, or ast::Name for everything else
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum NameLike {
|
||||
Name(ast::Name),
|
||||
String(ast::String),
|
||||
|
|
|
|||
|
|
@ -30,9 +30,36 @@ use crate::text_edit::TextEdit;
|
|||
use crate::text_edit::TextEditBuilder;
|
||||
use crate::tree_diff::diff;
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Debug)]
|
||||
pub struct HashableAnchoredPathBuf {
|
||||
/// File that this path is relative to.
|
||||
pub anchor: FileId,
|
||||
/// Path relative to `anchor`'s containing directory.
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
impl From<AnchoredPathBuf> for HashableAnchoredPathBuf {
|
||||
fn from(value: AnchoredPathBuf) -> Self {
|
||||
HashableAnchoredPathBuf {
|
||||
anchor: value.anchor,
|
||||
path: value.path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashableAnchoredPathBuf> for AnchoredPathBuf {
|
||||
fn from(value: HashableAnchoredPathBuf) -> Self {
|
||||
AnchoredPathBuf {
|
||||
anchor: value.anchor,
|
||||
path: value.path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct SourceChange {
|
||||
pub source_file_edits: FxHashMap<FileId, TextEdit>,
|
||||
pub new_file_edits: FxHashMap<HashableAnchoredPathBuf, TextEdit>,
|
||||
pub file_system_edits: Vec<FileSystemEdit>,
|
||||
pub is_snippet: bool,
|
||||
}
|
||||
|
|
@ -46,6 +73,7 @@ impl SourceChange {
|
|||
) -> Self {
|
||||
SourceChange {
|
||||
source_file_edits,
|
||||
new_file_edits: FxHashMap::default(),
|
||||
file_system_edits,
|
||||
is_snippet: false,
|
||||
}
|
||||
|
|
@ -74,6 +102,22 @@ impl SourceChange {
|
|||
}
|
||||
}
|
||||
|
||||
/// Inserts a [`TextEdit`] for the given [`AnchoredPathBuf`]. This properly handles merging existing
|
||||
/// edits for a file if some already exist.
|
||||
pub fn insert_new_source_edit(&mut self, file_id: HashableAnchoredPathBuf, edit: TextEdit) {
|
||||
match self.new_file_edits.entry(file_id) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
never!(
|
||||
entry.get_mut().union(edit).is_err(),
|
||||
"overlapping edits for same file"
|
||||
);
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(edit);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_file_system_edit(&mut self, edit: FileSystemEdit) {
|
||||
self.file_system_edits.push(edit);
|
||||
}
|
||||
|
|
@ -85,12 +129,15 @@ impl SourceChange {
|
|||
pub fn merge(mut self, other: SourceChange) -> SourceChange {
|
||||
self.extend(other.source_file_edits);
|
||||
self.extend(other.file_system_edits);
|
||||
self.extend(other.new_file_edits);
|
||||
self.is_snippet |= other.is_snippet;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.source_file_edits.is_empty() && self.file_system_edits.is_empty()
|
||||
self.source_file_edits.is_empty()
|
||||
&& self.file_system_edits.is_empty()
|
||||
&& self.new_file_edits.is_empty()
|
||||
}
|
||||
|
||||
pub fn text_range(&self, file_id: FileId) -> Option<TextRange> {
|
||||
|
|
@ -116,10 +163,18 @@ impl Extend<FileSystemEdit> for SourceChange {
|
|||
}
|
||||
}
|
||||
|
||||
impl Extend<(HashableAnchoredPathBuf, TextEdit)> for SourceChange {
|
||||
fn extend<T: IntoIterator<Item = (HashableAnchoredPathBuf, TextEdit)>>(&mut self, iter: T) {
|
||||
iter.into_iter()
|
||||
.for_each(|(file_id, edit)| self.insert_new_source_edit(file_id, edit));
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
|
||||
fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
|
||||
SourceChange {
|
||||
source_file_edits,
|
||||
new_file_edits: FxHashMap::default(),
|
||||
file_system_edits: Vec::new(),
|
||||
is_snippet: false,
|
||||
}
|
||||
|
|
@ -265,6 +320,7 @@ impl From<FileSystemEdit> for SourceChange {
|
|||
fn from(edit: FileSystemEdit) -> SourceChange {
|
||||
SourceChange {
|
||||
source_file_edits: Default::default(),
|
||||
new_file_edits: Default::default(),
|
||||
file_system_edits: vec![edit],
|
||||
is_snippet: false,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -58,6 +58,7 @@ lazy_static! {
|
|||
}
|
||||
|
||||
const ERL_EXT: &str = "erl";
|
||||
const BUCK_ISOLATION_DIR: &str = "lsp";
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
|
|
@ -108,7 +109,7 @@ impl BuckConfig {
|
|||
cmd.env_remove("RUST_BACKTRACE")
|
||||
.env_remove("RUST_LIB_BACKTRACE");
|
||||
cmd.arg("--isolation-dir");
|
||||
cmd.arg("lsp");
|
||||
cmd.arg(BUCK_ISOLATION_DIR);
|
||||
cmd.current_dir(self.buck_root());
|
||||
CommandProxy::new(guard, cmd)
|
||||
}
|
||||
|
|
@ -1362,36 +1363,56 @@ fn include_path_from_file(path: &AbsPath) -> AbsPathBuf {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_buck_output_success(mut command: CommandProxy<'_>) -> Result<String> {
|
||||
let output = command.output()?;
|
||||
if output.status.success() {
|
||||
return String::from_utf8(output.stdout)
|
||||
.map_err(|e| anyhow::anyhow!("Invalid UTF-8 in stdout for `{command}`: {e}"));
|
||||
}
|
||||
let reason = match output.status.code() {
|
||||
Some(code) => format!("Exited with status code: {code}"),
|
||||
None => "Process terminated by signal".to_string(),
|
||||
};
|
||||
let details = String::from_utf8(output.stderr).unwrap_or_default();
|
||||
bail!("Command `{command}` failed. Reason: {reason}. Details: {details}");
|
||||
}
|
||||
|
||||
/// This is used in tests
|
||||
pub fn get_prelude_cell(buck_config: &BuckConfig) -> Result<String> {
|
||||
let output = buck_config
|
||||
.buck_command()
|
||||
let mut command = buck_config.buck_command();
|
||||
command
|
||||
.arg("audit")
|
||||
.arg("cell")
|
||||
.arg("prelude")
|
||||
.output()?;
|
||||
if !output.status.success() {
|
||||
let reason = match output.status.code() {
|
||||
Some(code) => format!("Exited with status code: {code}"),
|
||||
None => "Process terminated by signal".to_string(),
|
||||
};
|
||||
let details = match String::from_utf8(output.stderr) {
|
||||
Ok(err) => err,
|
||||
Err(_) => "".to_string(),
|
||||
};
|
||||
bail!("Error evaluating Buck2 query Reason: {reason}. Details: {details}",);
|
||||
}
|
||||
let raw_output = String::from_utf8(output.stdout)?;
|
||||
.arg("--json");
|
||||
let raw_output = check_buck_output_success(command)?;
|
||||
|
||||
lazy_static! {
|
||||
static ref RE: Regex = Regex::new(r"^prelude: ([^\s]+)").unwrap();
|
||||
let json: serde_json::Value = serde_json::from_str(&raw_output)?;
|
||||
let prelude_path = json
|
||||
.get("prelude")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| anyhow::anyhow!("Could not find prelude path in Buck2 output"))?
|
||||
.to_string();
|
||||
|
||||
if Path::new(&prelude_path).exists() {
|
||||
Ok(prelude_path)
|
||||
} else {
|
||||
get_prelude_cell_bundled(buck_config)
|
||||
}
|
||||
let string = RE
|
||||
.captures_iter(&raw_output)
|
||||
.next()
|
||||
.map(|c| c[1].to_string())
|
||||
.unwrap();
|
||||
Ok(string)
|
||||
}
|
||||
|
||||
fn get_prelude_cell_bundled(buck_config: &BuckConfig) -> Result<String> {
|
||||
let mut command = buck_config.buck_command();
|
||||
command.arg("root");
|
||||
let root = check_buck_output_success(command)?;
|
||||
let root = root.trim();
|
||||
let bundled_prelude_path = Path::new(&root)
|
||||
.join("buck-out")
|
||||
.join(BUCK_ISOLATION_DIR)
|
||||
.join("external_cells")
|
||||
.join("bundled")
|
||||
.join("prelude");
|
||||
Ok(bundled_prelude_path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
@ -1622,72 +1643,66 @@ mod tests {
|
|||
assert_eq!(expected, actual)
|
||||
}
|
||||
|
||||
// TODO: enable when buck is properly set up on github project
|
||||
// @fb-only
|
||||
const BUCK_TESTS_ENABLED: bool = false; // @oss-only
|
||||
|
||||
#[track_caller]
|
||||
fn check_buck_bxl_query(build_generated: bool, expect: Expect) {
|
||||
if BUCK_TESTS_ENABLED {
|
||||
let buck_root = to_abs_path_buf(&std::env::current_dir().unwrap()).unwrap();
|
||||
// We only need buck_config to get the buck command, everything but the buck root is ignored.
|
||||
let buck_config = BuckConfig {
|
||||
config_path: None,
|
||||
buck_root: Some(buck_root),
|
||||
enabled: true,
|
||||
deps_target: None,
|
||||
deps_targets: vec![],
|
||||
build_deps: false,
|
||||
included_targets: vec![],
|
||||
excluded_targets: vec![],
|
||||
source_root: None,
|
||||
test_application_labels: vec!["test_application".to_string()],
|
||||
};
|
||||
let generated_args = if build_generated {
|
||||
vec!["--build_generated_code", "true"]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let output = buck_config
|
||||
.buck_command()
|
||||
.arg("bxl")
|
||||
.arg("prelude//erlang/elp.bxl:elp_config")
|
||||
.arg("--")
|
||||
.args(generated_args)
|
||||
.arg("--included_targets")
|
||||
.arg("fbcode//whatsapp/elp/test_projects/buck_tests_2/auto_gen/...")
|
||||
.output()
|
||||
.unwrap();
|
||||
if !output.status.success() {
|
||||
panic!("{output:#?}");
|
||||
}
|
||||
let string = String::from_utf8(output.stdout).unwrap();
|
||||
let prelude_cell = get_prelude_cell(&buck_config).expect("could not get prelude");
|
||||
let string = string.replace(&prelude_cell, "/[prelude]/");
|
||||
|
||||
let to_replace = env!("CARGO_WORKSPACE_DIR");
|
||||
let string = string.replace(to_replace, "/[..]/");
|
||||
expect.assert_eq(&string);
|
||||
let buck_root = to_abs_path_buf(&std::env::current_dir().unwrap()).unwrap();
|
||||
// We only need buck_config to get the buck command, everything but the buck root is ignored.
|
||||
let buck_config = BuckConfig {
|
||||
config_path: None,
|
||||
buck_root: Some(buck_root),
|
||||
enabled: true,
|
||||
deps_target: None,
|
||||
deps_targets: vec![],
|
||||
build_deps: false,
|
||||
included_targets: vec![],
|
||||
excluded_targets: vec![],
|
||||
source_root: None,
|
||||
test_application_labels: vec!["test_application".to_string()],
|
||||
};
|
||||
let generated_args = if build_generated {
|
||||
vec!["--build_generated_code", "true"]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let output = buck_config
|
||||
.buck_command()
|
||||
.arg("bxl")
|
||||
.arg("prelude//erlang/elp.bxl:elp_config")
|
||||
.arg("--")
|
||||
.args(generated_args)
|
||||
.arg("--included_targets")
|
||||
.arg("root//buck_tests_2/auto_gen/...")
|
||||
.output()
|
||||
.unwrap();
|
||||
if !output.status.success() {
|
||||
panic!("{output:#?}");
|
||||
}
|
||||
let string = String::from_utf8(output.stdout).unwrap();
|
||||
let prelude_cell = get_prelude_cell(&buck_config).expect("could not get prelude");
|
||||
let string = string.replace(&prelude_cell, "/[prelude]/");
|
||||
|
||||
let to_replace = env!("CARGO_WORKSPACE_DIR");
|
||||
let string = string.replace(to_replace, "/[..]/");
|
||||
expect.assert_eq(&string);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn build_info_buck_bxl_query() {
|
||||
if BUCK_TESTS_ENABLED {
|
||||
if cfg!(feature = "buck") {
|
||||
check_buck_bxl_query(
|
||||
false,
|
||||
expect![[r#"
|
||||
{
|
||||
"fbcode//whatsapp/elp/test_projects/buck_tests_2/auto_gen/auto_gen_a:auto_gen_a": {
|
||||
"root//buck_tests_2/auto_gen/auto_gen_a:auto_gen_a": {
|
||||
"name": "auto_gen_a",
|
||||
"app_name": null,
|
||||
"suite": null,
|
||||
"srcs": [
|
||||
"/[..]/test_projects/buck_tests_2/auto_gen/auto_gen_a/src/auto_gen_a.erl"
|
||||
"/[..]/test/test_projects/buck_tests_2/auto_gen/auto_gen_a/src/auto_gen_a.erl"
|
||||
],
|
||||
"includes": [
|
||||
"/[..]/test_projects/buck_tests_2/auto_gen/auto_gen_a/include"
|
||||
"/[..]/test/test_projects/buck_tests_2/auto_gen/auto_gen_a/include"
|
||||
],
|
||||
"labels": [
|
||||
"user_application"
|
||||
|
|
@ -1697,7 +1712,7 @@ mod tests {
|
|||
"included_apps": [],
|
||||
"origin": "app"
|
||||
},
|
||||
"fbcode//whatsapp/elp/test_projects/buck_tests_2/auto_gen/auto_gen_a:generated_srcs": {
|
||||
"root//buck_tests_2/auto_gen/auto_gen_a:generated_srcs": {
|
||||
"name": "generated_srcs",
|
||||
"app_name": null,
|
||||
"suite": null,
|
||||
|
|
@ -1840,23 +1855,23 @@ mod tests {
|
|||
#[test]
|
||||
#[ignore]
|
||||
fn build_info_buck_bxl_generated_query() {
|
||||
if BUCK_TESTS_ENABLED {
|
||||
if cfg!(feature = "buck") {
|
||||
// Note that there is now a value for `srcs` in the
|
||||
// "fbcode//whatsapp/elp/test_projects/buck_tests_2/auto_gen/auto_gen_a:generated_srcs"
|
||||
// "root//buck_tests_2/auto_gen/auto_gen_a:generated_srcs"
|
||||
// target
|
||||
check_buck_bxl_query(
|
||||
true,
|
||||
expect![[r#"
|
||||
{
|
||||
"fbcode//whatsapp/elp/test_projects/buck_tests_2/auto_gen/auto_gen_a:auto_gen_a": {
|
||||
"root//buck_tests_2/auto_gen/auto_gen_a:auto_gen_a": {
|
||||
"name": "auto_gen_a",
|
||||
"app_name": null,
|
||||
"suite": null,
|
||||
"srcs": [
|
||||
"/[..]/test_projects/buck_tests_2/auto_gen/auto_gen_a/src/auto_gen_a.erl"
|
||||
"/[..]/test/test_projects/buck_tests_2/auto_gen/auto_gen_a/src/auto_gen_a.erl"
|
||||
],
|
||||
"includes": [
|
||||
"/[..]/test_projects/buck_tests_2/auto_gen/auto_gen_a/include"
|
||||
"/[..]/test/test_projects/buck_tests_2/auto_gen/auto_gen_a/include"
|
||||
],
|
||||
"labels": [
|
||||
"user_application"
|
||||
|
|
@ -1866,12 +1881,12 @@ mod tests {
|
|||
"included_apps": [],
|
||||
"origin": "app"
|
||||
},
|
||||
"fbcode//whatsapp/elp/test_projects/buck_tests_2/auto_gen/auto_gen_a:generated_srcs": {
|
||||
"root//buck_tests_2/auto_gen/auto_gen_a:generated_srcs": {
|
||||
"name": "generated_srcs",
|
||||
"app_name": null,
|
||||
"suite": null,
|
||||
"srcs": [
|
||||
"/[..]/test_projects/buck_tests_2/auto_gen/auto_gen_a/out/pretend_generated.erl"
|
||||
"/[..]/test/test_projects/buck_tests_2/auto_gen/auto_gen_a/out/pretend_generated.erl"
|
||||
],
|
||||
"includes": [],
|
||||
"labels": [
|
||||
|
|
|
|||
4
editors/code/package-lock.json
generated
4
editors/code/package-lock.json
generated
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "erlang-language-platform",
|
||||
"version": "0.46.0",
|
||||
"version": "0.47.0",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "erlang-language-platform",
|
||||
"version": "0.46.0",
|
||||
"version": "0.47.0",
|
||||
"hasInstallScript": true,
|
||||
"license": "Apache2",
|
||||
"devDependencies": {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
"description": "Erlang Language Support for VS Code, by WhatsApp.",
|
||||
"author": "Meta Platforms, Inc",
|
||||
"license": "Apache2",
|
||||
"version": "0.46.0",
|
||||
"version": "0.47.0",
|
||||
"icon": "images/elp-logo-color.png",
|
||||
"homepage": "https://whatsapp.github.io/erlang-language-platform/",
|
||||
"repository": {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit a835ce03b0308a9869af964e35a24466b49cda51
|
||||
Subproject commit 0f514eb3893fa7070835c83ecb49fbea31b0426d
|
||||
|
|
@ -4372,14 +4372,14 @@ is_format_function(io, fwrite) -> true;
|
|||
is_format_function(io, format) -> true;
|
||||
is_format_function(io_lib, fwrite) -> true;
|
||||
is_format_function(io_lib, format) -> true;
|
||||
% @fb-only
|
||||
% @fb-only
|
||||
% @fb-only: is_format_function(wa_log, send_if) -> true;
|
||||
% @fb-only: is_format_function(wa_string, format) -> true;
|
||||
is_format_function(M, F) when is_atom(M), is_atom(F) -> false.
|
||||
|
||||
%% check_format_1([Arg]) -> ok | {warn,Level,Format,[Arg]}.
|
||||
|
||||
% @fb-only
|
||||
% @fb-only
|
||||
% @fb-only[end= ]: format_args(wa_log, send_if, [_Level, _Meta, _Opts, Format, Args]) -> [Format, Args];
|
||||
% @fb-only[end= ]: format_args(wa_string, format, [Format, Args, _Options]) -> [Format, Args];
|
||||
format_args(_M, _F, As) ->
|
||||
As.
|
||||
|
||||
|
|
|
|||
24
test/test_projects/.buckconfig
Normal file
24
test/test_projects/.buckconfig
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
[cells]
|
||||
root = .
|
||||
prelude = prelude
|
||||
toolchains = toolchains
|
||||
none = none
|
||||
|
||||
[cell_aliases]
|
||||
config = prelude
|
||||
ovr_config = prelude
|
||||
fbcode = none
|
||||
fbsource = none
|
||||
fbcode_macros = none
|
||||
buck = none
|
||||
|
||||
[external_cells]
|
||||
prelude = bundled
|
||||
|
||||
[parser]
|
||||
target_platform_detector_spec = target:root//...->prelude//platforms:default \
|
||||
target:prelude//...->prelude//platforms:default \
|
||||
target:toolchains//...->prelude//platforms:default
|
||||
|
||||
[build]
|
||||
execution_platforms = prelude//platforms:default
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue