Compare commits

..

No commits in common. "main" and "2025-12-10" have entirely different histories.

601 changed files with 7253 additions and 4000 deletions

View file

@ -1,8 +1,8 @@
[alias]
xtask = "run --package xtask --"
# @fb-only: [build]
# @fb-only: target-dir = "../../../buck-out/elp"
# @fb-only
# @fb-only
[profile.release]
codegen-units = 1

View file

@ -30,7 +30,7 @@ jobs:
strategy:
fail-fast: false
matrix:
platform-arch: [ubuntu-22.04-x64, ubuntu-22.04-arm, macos-15-x64, macos-latest-arm, windows-2022-x64]
platform-arch: [ubuntu-22.04-x64, ubuntu-22.04-arm, macos-13-x64, macos-latest-arm, windows-2022-x64]
otp-version: [26.2, 27.3, 28.0]
include:
- otp-version: 26.2
@ -55,8 +55,8 @@ jobs:
os: linux
target: aarch64-unknown-linux-gnu
vscode-target: linux-arm64
- platform-arch: macos-15-x64
platform: macos-15-intel
- platform-arch: macos-13-x64
platform: macos-13
os: macos
target: x86_64-apple-darwin
vscode-target: darwin-x64
@ -97,8 +97,6 @@ jobs:
run: |
sudo apt-get update
sudo apt-get install -y crossbuild-essential-arm64
- name: Install Buck2
uses: dtolnay/install-buck2@latest
- id: setup-erlang
uses: ./.github/actions/setup-erlang
with:
@ -137,7 +135,7 @@ jobs:
- name: Test elp
# Do not run the tests in case of cross-compilation or on Windows
if: matrix.platform-arch != 'macos-latest-arm' && matrix.os != 'windows'
run: 'cargo test --workspace --target ${{ matrix.target }}'
run: 'cargo test --no-default-features --workspace --target ${{ matrix.target }}'
- name: Build elp (No Windows)
if: matrix.os != 'windows'
run: 'cargo build --release --target ${{ matrix.target }} --config target.aarch64-unknown-linux-gnu.linker=\"aarch64-linux-gnu-gcc\"'

12
.vscode/tasks.json vendored
View file

@ -4,7 +4,7 @@
{
"label": "ELP: build (debug)",
"type": "shell",
// @fb-only: "command": "./meta/cargo.sh build",
// @fb-only
"command": "cargo build", // @oss-only
"group": {
"kind": "build",
@ -19,7 +19,7 @@
{
"label": "ELP: build (release)",
"type": "shell",
// @fb-only: "command": "./meta/cargo.sh build --release",
// @fb-only
"command": "cargo build --release", // @oss-only
"group": {
"kind": "build",
@ -34,7 +34,7 @@
{
"label": "ELP: build (release-thin)",
"type": "shell",
// @fb-only: "command": "./meta/cargo.sh build --profile release-thin --bins",
// @fb-only
"command": "cargo build --profile release-thin --bins", // @oss-only
"group": {
"kind": "build",
@ -49,7 +49,7 @@
{
"label": "ELP: run clippy on workspace",
"type": "shell",
// @fb-only: "command": "./meta/clippy.sh --workspace --tests",
// @fb-only
"command": "cargo clippy --workspace --tests", // @oss-only
"group": {
"kind": "build",
@ -64,7 +64,7 @@
{
"label": "ELP: run clippy on workspace, apply fixes",
"type": "shell",
// @fb-only: "command": "./meta/clippy.sh --workspace --tests --fix",
// @fb-only
"command": "cargo clippy --workspace --tests --fix", // @oss-only
"group": {
"kind": "build",
@ -79,7 +79,7 @@
{
"label": "ELP: run tests on workspace",
"type": "shell",
// @fb-only: "command": "./meta/cargo.sh test --workspace",
// @fb-only
"command": "cargo test --workspace", // @oss-only
"group": {
"kind": "build",

View file

@ -87,7 +87,6 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
let (fixture, change) = ChangeFixture::parse(fixture_str);
let mut db = Self::default();
change.apply(&mut db, &|path| fixture.resolve_file_id(path));
fixture.validate(&db);
(db, fixture)
}
}
@ -102,7 +101,6 @@ pub struct ChangeFixture {
pub diagnostics_enabled: DiagnosticsEnabled,
pub tags: FxHashMap<FileId, Vec<(TextRange, Option<String>)>>,
pub annotations: FxHashMap<FileId, Vec<(TextRange, String)>>,
pub expect_parse_errors: bool,
}
struct Builder {
@ -174,7 +172,6 @@ impl ChangeFixture {
let FixtureWithProjectMeta {
fixture,
mut diagnostics_enabled,
expect_parse_errors,
} = fixture_with_meta.clone();
let builder = Builder::new(diagnostics_enabled.clone());
@ -347,7 +344,6 @@ impl ChangeFixture {
diagnostics_enabled,
tags,
annotations,
expect_parse_errors,
},
change,
project,
@ -409,64 +405,6 @@ impl ChangeFixture {
.get(&VfsPath::from(path.clone()))
.cloned()
}
/// Validate all files in the fixture for syntax errors.
/// Panics with context if any syntax errors are found.
/// Skips validation if `expect_parse_errors` is set to true.
#[track_caller]
pub fn validate<DB: SourceDatabaseExt>(&self, db: &DB) {
if self.expect_parse_errors {
return;
}
let mut errors_found = Vec::new();
for file_id in &self.files {
let parse = db.parse(*file_id);
let errors = parse.errors();
if !errors.is_empty() {
let path = self
.files_by_path
.iter()
.find_map(|(vfs_path, id)| {
if id == file_id {
Some(
vfs_path
.as_path()
.map(|p| p.to_string())
.unwrap_or_else(|| format!("{:?}", vfs_path)),
)
} else {
None
}
})
.unwrap_or_else(|| format!("FileId({:?})", file_id));
let file_text = SourceDatabaseExt::file_text(db, *file_id);
let tree = parse.tree();
errors_found.push((path, file_text.to_string(), errors.to_vec(), tree));
}
}
if !errors_found.is_empty() {
let mut message =
String::from("Fixture validation failed: syntax errors found in test fixture\n\n");
for (path, text, errors, tree) in errors_found {
message.push_str(&format!("File: {}\n", path));
message.push_str(&format!("Errors: {:?}\n", errors));
message.push_str(&format!("Content:\n{}\n", text));
message.push_str(&format!("Parse Tree:\n{:#?}\n", tree));
message.push_str("---\n");
}
message.push_str(
"If this is expected, add `//- expect_parse_errors` to the start of the fixture\n",
);
panic!("{}", message);
}
}
}
fn inc_file_id(file_id: &mut FileId) {

View file

@ -32,7 +32,7 @@ mod module_index;
// Public API
pub mod fixture;
// @fb-only: mod meta_only;
// @fb-only
pub mod test_utils;
pub use change::Change;
pub use elp_project_model::AppType;
@ -476,7 +476,7 @@ static ref IGNORED_SOURCES: Vec<Regex> = {
let regexes: Vec<Vec<Regex>> = vec![
vec![Regex::new(r"^.*_SUITE_data/.+$").unwrap()],
//ignore sources goes here
// @fb-only: meta_only::ignored_sources_regexes()
// @fb-only
];
regexes.into_iter().flatten().collect::<Vec<Regex>>()
};

View file

@ -8,8 +8,8 @@
* above-listed licenses.
*/
// @fb-only: /// Types as defined in https://www.internalfb.com/intern/wiki/Linting/adding-linters/#flow-type
// @fb-only: /// and https://www.internalfb.com/code/fbsource/[1238f73dac0efd4009443fee6a345a680dc9401b]/whatsapp/server/erl/tools/lint/arcanist.py?lines=17
// @fb-only
// @fb-only
use std::path::Path;
use serde::Serialize;

View file

@ -72,17 +72,6 @@ pub struct ParseAllElp {
/// Report system memory usage and other statistics
#[bpaf(long("report-system-stats"))]
pub report_system_stats: bool,
/// Minimum severity level to report. Valid values: error, warning, weak_warning, information
#[bpaf(
argument("SEVERITY"),
complete(severity_completer),
fallback(None),
guard(
severity_guard,
"Please use error, warning, weak_warning, or information"
)
)]
pub severity: Option<String>,
}
#[derive(Clone, Debug, Bpaf)]
@ -155,6 +144,8 @@ pub struct EqwalizeAll {
/// Also eqwalize opted-in generated modules from project (deprecated)
#[bpaf(hide)]
pub include_generated: bool,
/// Also eqwalize test modules from project
pub include_tests: bool,
/// Exit with a non-zero status code if any errors are found
pub bail_on_error: bool,
/// Print statistics when done
@ -171,6 +162,8 @@ pub struct EqwalizeTarget {
/// Also eqwalize opted-in generated modules from application (deprecated)
#[bpaf(hide)]
pub include_generated: bool,
/// Also eqwalize test modules from project
pub include_tests: bool,
/// Exit with a non-zero status code if any errors are found
pub bail_on_error: bool,
/// target, like //erl/chatd/...
@ -189,6 +182,8 @@ pub struct EqwalizeApp {
/// Also eqwalize opted-in generated modules from project (deprecated)
#[bpaf(hide)]
pub include_generated: bool,
/// Also eqwalize test modules from project
pub include_tests: bool,
/// Run with rebar
pub rebar: bool,
/// Exit with a non-zero status code if any errors are found
@ -211,6 +206,8 @@ pub struct EqwalizeStats {
/// Also eqwalize opted-in generated modules from project (deprecated)
#[bpaf(hide)]
pub include_generated: bool,
/// Also eqwalize test modules from project
pub include_tests: bool,
/// If specified, use the provided CLI severity mapping instead of the default one
pub use_cli_severity: bool,
}
@ -786,25 +783,6 @@ fn format_guard(format: &Option<String>) -> bool {
}
}
fn severity_completer(_: &Option<String>) -> Vec<(String, Option<String>)> {
vec![
("error".to_string(), None),
("warning".to_string(), None),
("weak_warning".to_string(), None),
("information".to_string(), None),
]
}
fn severity_guard(severity: &Option<String>) -> bool {
match severity {
None => true,
Some(s) if s == "error" || s == "warning" || s == "weak_warning" || s == "information" => {
true
}
_ => false,
}
}
fn macros_completer(_: &Option<String>) -> Vec<(String, Option<String>)> {
vec![
("expand".to_string(), None),
@ -911,7 +889,7 @@ impl Lint {
/// To prevent flaky test results we allow disabling streaming when applying fixes
pub fn skip_stream_print(&self) -> bool {
self.apply_fix || self.no_stream
self.apply_fix && self.no_stream
}
}

View file

@ -57,35 +57,6 @@ use crate::args::ParseAllElp;
use crate::reporting;
use crate::reporting::print_memory_usage;
fn parse_severity(severity: &str) -> Option<diagnostics::Severity> {
match severity {
"error" => Some(diagnostics::Severity::Error),
"warning" => Some(diagnostics::Severity::Warning),
"weak_warning" => Some(diagnostics::Severity::WeakWarning),
"information" => Some(diagnostics::Severity::Information),
_ => None,
}
}
fn severity_rank(severity: diagnostics::Severity) -> u8 {
match severity {
diagnostics::Severity::Error => 1,
diagnostics::Severity::Warning => 2,
diagnostics::Severity::WeakWarning => 3,
diagnostics::Severity::Information => 4,
}
}
fn meets_severity_threshold(
diag_severity: diagnostics::Severity,
min_severity: Option<diagnostics::Severity>,
) -> bool {
match min_severity {
None => true,
Some(min) => severity_rank(diag_severity) <= severity_rank(min),
}
}
#[derive(Debug)]
struct ParseResult {
name: String,
@ -176,19 +147,6 @@ pub fn parse_all(
let memory_end = MemoryUsage::now();
let memory_used = memory_end - memory_start;
let min_severity = args
.severity
.as_ref()
.and_then(|s| parse_severity(s.as_str()));
res.retain(|parse_result| {
parse_result
.diagnostics
.diagnostics_for(parse_result.file_id)
.iter()
.any(|diag| meets_severity_threshold(diag.severity, min_severity))
});
if res.is_empty() {
if args.is_format_normal() {
writeln!(cli, "No errors reported")?;
@ -207,7 +165,6 @@ pub fn parse_all(
for diags in res {
let mut combined: Vec<diagnostics::Diagnostic> =
diags.diagnostics.diagnostics_for(diags.file_id);
combined.retain(|diag| meets_severity_threshold(diag.severity, min_severity));
if args.is_format_normal() {
writeln!(cli, " {}: {}", diags.name, combined.len())?;
}

View file

@ -186,7 +186,10 @@ pub fn do_eqwalize_all(
.par_bridge()
.progress_with(pb.clone())
.map_with(analysis.clone(), |analysis, (name, _source, file_id)| {
if analysis.should_eqwalize(file_id).unwrap() && !otp_file_to_ignore(analysis, file_id)
if analysis
.should_eqwalize(file_id, args.include_tests)
.unwrap()
&& !otp_file_to_ignore(analysis, file_id)
{
if args.stats {
add_stat(name.to_string());
@ -266,7 +269,9 @@ pub fn do_eqwalize_app(
.iter_own()
.filter_map(|(_name, _source, file_id)| {
if analysis.file_app_name(file_id).ok()? == Some(AppName(args.app.clone()))
&& analysis.should_eqwalize(file_id).unwrap()
&& analysis
.should_eqwalize(file_id, args.include_tests)
.unwrap()
&& !otp_file_to_ignore(analysis, file_id)
{
Some(file_id)
@ -334,7 +339,9 @@ pub fn eqwalize_target(
let vfs_path = VfsPath::from(src.clone());
if let Some((file_id, _)) = loaded.vfs.file_id(&vfs_path) {
at_least_one_found = true;
if analysis.should_eqwalize(file_id).unwrap()
if analysis
.should_eqwalize(file_id, args.include_tests)
.unwrap()
&& !otp_file_to_ignore(analysis, file_id)
{
file_ids.push(file_id);
@ -401,7 +408,9 @@ pub fn eqwalize_stats(
.par_bridge()
.progress_with(pb.clone())
.map_with(analysis.clone(), |analysis, (name, _source, file_id)| {
if analysis.should_eqwalize(file_id).expect("cancelled")
if analysis
.should_eqwalize(file_id, args.include_tests)
.expect("cancelled")
&& !otp_file_to_ignore(analysis, file_id)
{
analysis

View file

@ -84,7 +84,7 @@ const REC_ARITY: u32 = 99;
const HEADER_ARITY: u32 = 100;
const FACTS_FILE: &str = "facts.json";
// @fb-only: mod meta_only;
// @fb-only
#[derive(Serialize, Debug, Eq, Hash, PartialEq, Clone)]
struct GleanFileId(u32);
@ -994,7 +994,7 @@ impl GleanIndexer {
.filter(|text| !text.is_empty())
});
// @fb-only: let exdoc_link = elp_ide::meta_only::exdoc_links::module_exdoc_link(&module, &sema);
// @fb-only
let exdoc_link: Option<String> = None; // @oss-only
ModuleFact::new(
@ -1532,7 +1532,7 @@ impl GleanIndexer {
}) => {
let def = macro_def.as_ref()?;
let mut resolved = Self::resolve_macro_v2(sema, def, source_file, ctx)?;
// @fb-only: meta_only::resolve_macro_expansion(sema, *expansion, ctx, &mut resolved);
// @fb-only
Some(resolved)
}
hir::AnyExpr::Pat(Pat::MacroCall { macro_def, .. })
@ -1560,7 +1560,7 @@ impl GleanIndexer {
vars: FxHashMap<&Location, &String>,
) -> Vec<VarDecl> {
let mut result = vec![];
if !db.is_eqwalizer_enabled(file_id) {
if !db.is_eqwalizer_enabled(file_id, false) {
return result;
}
let module_diagnostics = db.eqwalizer_diagnostics_by_project(project_id, vec![file_id]);
@ -1875,9 +1875,9 @@ impl GleanIndexer {
let source_file = sema.parse(file_id);
let range = Self::find_range(sema, ctx, &source_file, &expr_source)?;
// @fb-only: use elp_ide::meta_only::wam_links;
// @fb-only: let wam_ctx = wam_links::WamEventCtx::new(sema.db.upcast());
// @fb-only: let wam_url = wam_ctx.build_wam_link(name).map(|link| link.url());
// @fb-only
// @fb-only
// @fb-only
let wam_url = None; // @oss-only
Some(XRef {
@ -2335,10 +2335,10 @@ mod tests {
fn xref_types_test() {
let spec = r#"
//- /glean/app_glean/src/glean_module81.erl
-type small() :: {non_neg_integer() | infinity}.
-type small() :: #{non_neg_integer() | infinity}.
//- /glean/app_glean/src/glean_module8.erl
-type huuuge() :: {non_neg_integer() | infinity}.
-type huuuge() :: #{non_neg_integer() | infinity}.
-spec baz(
A :: huuuge(),
%% ^^^^^^ glean_module8/huuuge/0
@ -2393,10 +2393,10 @@ mod tests {
fn xref_types_v2_test() {
let spec = r#"
//- /glean/app_glean/src/glean_module81.erl
-type small() :: {non_neg_integer() | infinity}.
-type small() :: #{non_neg_integer() | infinity}.
//- /glean/app_glean/src/glean_module8.erl
-type huuuge() :: {non_neg_integer() | infinity}.
-type huuuge() :: #{non_neg_integer() | infinity}.
-spec baz(
A :: huuuge(),
%% ^^^^^^ glean_module8.erl/type/huuuge/0

View file

@ -295,7 +295,7 @@ pub fn do_codemod(
let res;
let streamed_err_in_diag;
let mut any_diagnostics_printed = false;
let mut initial_diags = {
let initial_diags = {
// We put this in its own block so that analysis is
// freed before we apply lints. To apply lints
// recursively, we need to update the underlying
@ -394,54 +394,30 @@ pub fn do_codemod(
let mut err_in_diag = streamed_err_in_diag;
// At this point, the analysis variable from above is dropped
// When streaming is disabled (--no-stream) and we're not applying fixes,
// we need to print diagnostics now since they weren't printed during streaming
if args.no_stream && !args.apply_fix && !initial_diags.is_empty() {
let analysis = loaded.analysis();
let mut module_count = 0;
initial_diags.sort_by(|(a, _, _), (b, _, _)| a.cmp(b));
for result in &initial_diags {
let printed = print_diagnostic_result(
cli,
&analysis,
diagnostics_config,
args,
loaded,
&args.module,
&mut err_in_diag,
&mut module_count,
result,
)?;
any_diagnostics_printed = any_diagnostics_printed || printed;
// Print "No diagnostics reported" if no diagnostics were found after filtering
if !any_diagnostics_printed {
if args.is_format_normal() {
writeln!(cli, "No diagnostics reported")?;
}
}
// Handle apply_fix case separately since it needs to filter diagnostics anyway
if args.apply_fix {
if diagnostics_config.enabled.all_enabled() {
} else {
if args.apply_fix && diagnostics_config.enabled.all_enabled() {
bail!(
"We cannot apply fixes if all diagnostics enabled. Perhaps provide --diagnostic-filter"
);
}
let mut filtered_diags = {
let analysis = loaded.analysis();
filter_diagnostics(
&analysis,
&args.module,
Some(&diagnostics_config.enabled),
&initial_diags,
&FxHashSet::default(),
)?
};
if filtered_diags.is_empty() {
if args.is_format_normal() {
writeln!(cli, "No diagnostics reported")?;
}
} else {
if args.apply_fix && !diagnostics_config.enabled.all_enabled() {
let mut initial_diags = {
let analysis = loaded.analysis();
filter_diagnostics(
&analysis,
&args.module,
Some(&diagnostics_config.enabled),
&initial_diags,
&FxHashSet::default(),
)?
};
if args.skip_stream_print() {
filtered_diags.sort_by(|(a, _, _), (b, _, _)| a.cmp(b));
initial_diags.sort_by(|(a, _, _), (b, _, _)| a.cmp(b));
let module_count: &mut i32 = &mut 0;
let has_diagnostics: &mut bool = &mut false;
if args.is_format_json() {
@ -452,7 +428,7 @@ pub fn do_codemod(
&mut err_in_diag,
module_count,
has_diagnostics,
&filtered_diags,
&initial_diags,
)?;
} else {
{
@ -466,7 +442,7 @@ pub fn do_codemod(
&mut err_in_diag,
module_count,
has_diagnostics,
&filtered_diags,
&initial_diags,
)?;
// Analysis is dropped here
}
@ -480,7 +456,7 @@ pub fn do_codemod(
&mut loaded.vfs,
args,
&mut changed_files,
filtered_diags,
initial_diags,
);
// We handle the fix application result here, so
// the overall status of whether error-severity
@ -492,19 +468,8 @@ pub fn do_codemod(
writeln!(cli, "Apply fix failed: {err:#}").ok();
}
};
if err_in_diag {
bail!("Errors found")
}
}
} else {
// Non-apply-fix case: rely on any_diagnostics_printed which is set
// correctly based on filtered diagnostics during streaming/batch printing
if !any_diagnostics_printed {
if args.is_format_normal() {
writeln!(cli, "No diagnostics reported")?;
}
} else if err_in_diag {
if err_in_diag {
bail!("Errors found")
}
}

View file

@ -40,7 +40,7 @@ mod erlang_service_cli;
mod explain_cli;
mod glean;
mod lint_cli;
// @fb-only: mod meta_only;
// @fb-only
mod reporting;
mod shell;
mod ssr_cli;
@ -110,7 +110,7 @@ fn setup_cli_telemetry(args: &Args) {
}
_ => {
// Initialize CLI telemetry, if used
// @fb-only: meta_only::initialize_telemetry();
// @fb-only
}
}
}
@ -288,7 +288,7 @@ mod tests {
let (_stdout, stderr, code) = elp(args_vec![
"parse-all",
"--project",
"../../test/test_projects/standard",
"../../test_projects/standard",
"--to",
tmp.path(),
]);
@ -306,7 +306,7 @@ mod tests {
fn parse_all_complete(project: &str) -> Result<i32> {
// Just check the command returns.
let project_path = format!("../../test/test_projects/{project}");
let project_path = format!("../../test_projects/{project}");
let tmp = Builder::new().prefix("elp_parse_all_").tempdir().unwrap();
let (_stdout, _stderr, code) = elp(args_vec![
"parse-all",
@ -443,34 +443,33 @@ mod tests {
})
.unwrap();
let exp_path = expect_file!(format!(
"../resources/test/{}/{}/{}.pretty",
project,
app,
module.as_str(),
));
let (stdout, _) = cli.to_strings();
let otp_version = OTP_VERSION.as_ref().expect("MISSING OTP VERSION");
let otp_version_regex =
regex::bytes::Regex::new(&format!("{}OTP([0-9]+)Only", "@")).unwrap();
regex::bytes::Regex::new(&format!("{}OTPVersionDependent", "@"))
.unwrap();
let contents = analysis.file_text(file_id).unwrap();
let otp_version_capture = otp_version_regex
.captures(&contents.as_bytes()[0..(2001.min(contents.len()))]);
if let Some((_, [otp_version_only])) =
otp_version_capture.map(|cap| cap.extract())
{
if otp_version_only == otp_version.as_bytes() {
assert_normalised_file(
exp_path,
&stdout,
project_path.into(),
false,
);
let otp_version_dependent = otp_version_regex
.is_match(&contents.as_bytes()[0..(2001.min(contents.len()))]);
let exp_path = {
if otp_version_dependent {
expect_file!(format!(
"../resources/test/{}/{}/{}-OTP-{}.pretty",
project,
app,
module.as_str(),
otp_version,
))
} else {
expect_file!(format!(
"../resources/test/{}/{}/{}.pretty",
project,
app,
module.as_str(),
))
}
} else {
assert_normalised_file(exp_path, &stdout, project_path.into(), false);
}
};
let (stdout, _) = cli.to_strings();
assert_normalised_file(exp_path, &stdout, project_path.into(), false);
}
}
EqwalizerDiagnostics::NoAst { module } => {
@ -605,7 +604,10 @@ mod tests {
fn eqwalize_target_diagnostics_match_snapshot_pretty() {
if cfg!(feature = "buck") {
simple_snapshot(
args_vec!["eqwalize-target", "//standard:app_a",],
args_vec![
"eqwalize-target",
"//whatsapp/elp/test_projects/standard:app_a",
],
"standard",
expect_file!("../resources/test/standard/eqwalize_target_diagnostics.pretty"),
true,
@ -669,24 +671,6 @@ mod tests {
);
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn parse_all_diagnostics_severity(buck: bool) {
simple_snapshot_expect_error(
args_vec![
"parse-elp",
"--module",
"diagnostics",
"--severity",
"error"
],
"diagnostics",
expect_file!("../resources/test/diagnostics/parse_all_diagnostics_error.stdout"),
buck,
None,
);
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn parse_elp_file_attribute(buck: bool) {
@ -970,9 +954,7 @@ mod tests {
assert!(tmp_file.clone().exists());
let content = fs::read_to_string(tmp_file).unwrap();
let mut buck_config = BuckConfig::default();
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(
current_dir().unwrap().join(path_str.clone()),
));
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(current_dir().unwrap()));
let prelude_cell = get_prelude_cell(&buck_config).expect("could not get prelude");
let prelude_cell = prelude_cell.strip_prefix("/").unwrap();
let content = content.replace(prelude_cell, "/[prelude]/");
@ -984,13 +966,38 @@ mod tests {
Some(AbsPathBuf::assert(Utf8PathBuf::from_path_buf(abs).unwrap()));
let content = normalise_prelude_path(content, buck_config);
let content = sort_json(&content);
expect![[r#"
{
"apps": [
{
"name": "test_exec",
"dir": "/[prelude]//erlang/common_test/test_exec/src",
"src_dirs": [
""
],
"extra_src_dirs": [],
"include_dirs": [],
"macros": {}
},
{
"name": "diagnostics_app_a",
"dir": "app_a",
"src_dirs": [
"src"
],
"extra_src_dirs": [],
"include_dirs": [
"include"
],
"macros": {
"COMMON_TEST": "true",
"TEST": "true"
}
},
{
"name": "app_a_SUITE",
"dir": "app_a/test",
"src_dirs": [],
"extra_src_dirs": [
""
],
@ -998,88 +1005,61 @@ mod tests {
"macros": {
"COMMON_TEST": "true",
"TEST": "true"
},
"name": "app_a_SUITE",
"src_dirs": []
}
},
{
"dir": "/[prelude]//erlang/common_test/test_exec/src",
"extra_src_dirs": [],
"include_dirs": [],
"macros": {},
"name": "test_exec",
"src_dirs": [
""
]
},
{
"dir": "/[prelude]//erlang/common_test/common",
"extra_src_dirs": [],
"include_dirs": [
"include"
],
"macros": {},
"name": "common",
"dir": "/[prelude]//erlang/common_test/common",
"src_dirs": [
"src"
]
},
{
"dir": "/[prelude]//erlang/common_test/cth_hooks/src",
"extra_src_dirs": [],
"include_dirs": [
""
],
"macros": {},
"name": "cth_hooks",
"src_dirs": [
""
]
},
{
"dir": "/[prelude]//erlang/shell/src",
"extra_src_dirs": [],
"include_dirs": [],
"macros": {},
"name": "buck2_shell_utils",
"src_dirs": [
""
]
},
{
"dir": "app_a",
"extra_src_dirs": [],
"include_dirs": [
"include"
],
"macros": {
"COMMON_TEST": "true",
"TEST": "true"
},
"name": "diagnostics_app_a",
"src_dirs": [
"src"
]
"macros": {}
},
{
"dir": "/[prelude]//erlang/common_test/test_binary/src",
"name": "cth_hooks",
"dir": "/[prelude]//erlang/common_test/cth_hooks/src",
"src_dirs": [
""
],
"extra_src_dirs": [],
"include_dirs": [
""
],
"macros": {}
},
{
"name": "buck2_shell_utils",
"dir": "/[prelude]//erlang/shell/src",
"src_dirs": [
""
],
"extra_src_dirs": [],
"include_dirs": [],
"macros": {},
"macros": {}
},
{
"name": "test_binary",
"dir": "/[prelude]//erlang/common_test/test_binary/src",
"src_dirs": [
""
]
},
{
"dir": "/[prelude]//erlang/common_test/test_cli_lib/src",
],
"extra_src_dirs": [],
"include_dirs": [],
"macros": {},
"macros": {}
},
{
"name": "test_cli_lib",
"dir": "/[prelude]//erlang/common_test/test_cli_lib/src",
"src_dirs": [
""
]
],
"extra_src_dirs": [],
"include_dirs": [],
"macros": {}
}
],
"deps": []
@ -1094,12 +1074,6 @@ mod tests {
content.replace(prelude_cell, "/[prelude]/")
}
fn sort_json(content: &str) -> String {
let mut json: serde_json::Value = serde_json::from_str(content).unwrap();
json.sort_all_objects();
serde_json::to_string_pretty(&json).unwrap()
}
#[test]
#[ignore]
fn build_info_json_buck_bxl_generated() {
@ -1113,7 +1087,7 @@ mod tests {
"--to",
tmp_file.clone(),
"--project",
path_str.clone()
path_str
];
let (stdout, stderr, code) = elp(args);
assert_eq!(
@ -1128,9 +1102,7 @@ mod tests {
assert!(tmp_file.clone().exists());
let content = fs::read_to_string(tmp_file).unwrap();
let mut buck_config = BuckConfig::default();
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(
current_dir().unwrap().join(path_str.clone()),
));
buck_config.buck_root = Some(AbsPathBuf::assert_utf8(current_dir().unwrap()));
let prelude_cell = get_prelude_cell(&buck_config).expect("could not get prelude");
let prelude_cell = prelude_cell.strip_prefix("/").unwrap();
let content = content.replace(prelude_cell, "/[prelude]/");
@ -1454,7 +1426,7 @@ mod tests {
"lint",
"--experimental",
"--config-file",
"../../test/test_projects/linter/does_not_exist.toml"
"../../test_projects/linter/does_not_exist.toml"
],
"linter",
expect_file!("../resources/test/linter/parse_elp_lint_custom_config_invalid_output.stdout"),
@ -1466,7 +1438,7 @@ mod tests {
&[],
false,
Some(expect![[r#"
unable to read "../../test/test_projects/linter/does_not_exist.toml": No such file or directory (os error 2)
unable to read "../../test_projects/linter/does_not_exist.toml": No such file or directory (os error 2)
"#]]),
)
.expect("bad test");
@ -1482,7 +1454,7 @@ mod tests {
"lint",
"--experimental",
"--config-file",
"../../test/test_projects/linter/elp_lint_test1.toml"
"../../test_projects/linter/elp_lint_test1.toml"
],
"linter",
expect_file!("../resources/test/linter/parse_elp_lint_custom_config_output.stdout"),
@ -1508,7 +1480,7 @@ mod tests {
"lint",
"--experimental",
"--config-file",
"../../test/test_projects/linter/elp_lint_adhoc.toml",
"../../test_projects/linter/elp_lint_adhoc.toml",
"--module",
"app_b",
"--apply-fix",
@ -1539,7 +1511,7 @@ mod tests {
"--diagnostic-ignore",
"W0011",
"--config-file",
"../../test/test_projects/linter/elp_lint_test_ignore.toml"
"../../test_projects/linter/elp_lint_test_ignore.toml"
],
"linter",
expect_file!("../resources/test/linter/parse_elp_lint_ignore.stdout"),
@ -1583,7 +1555,7 @@ mod tests {
&[],
false,
Some(expect![[r#"
failed to read "../../test/test_projects/linter_bad_config/.elp_lint.toml":expected a right bracket, found an identifier at line 6 column 4
failed to read "../../test_projects/linter_bad_config/.elp_lint.toml":expected a right bracket, found an identifier at line 6 column 4
"#]]),
)
.expect("bad test");
@ -1601,20 +1573,6 @@ mod tests {
);
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn lint_no_stream_produces_output(buck: bool) {
if otp::supports_eep66_sigils() {
simple_snapshot_expect_error(
args_vec!["lint", "--no-stream"],
"diagnostics",
expect_file!("../resources/test/diagnostics/lint_no_stream.stdout"),
buck,
None,
);
}
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn lint_no_diagnostics_filter_all_enabled_json(buck: bool) {
@ -1649,7 +1607,7 @@ mod tests {
args_vec![
"lint",
"--config-file",
"../../test/test_projects/linter/elp_lint_test2.toml"
"../../test_projects/linter/elp_lint_test2.toml"
],
"linter",
expect_file!("../resources/test/linter/parse_elp_lint_explicit_enable_output.stdout"),
@ -1952,8 +1910,7 @@ mod tests {
simple_snapshot_expect_stderror(
args_vec!["lint",],
"buck_bad_config",
// @fb-only: expect_file!("../resources/test/buck_bad_config/bxl_error_message.stdout"),
expect_file!("../resources/test/buck_bad_config/bxl_error_message_oss.stdout"), // @oss-only
expect_file!("../resources/test/buck_bad_config/bxl_error_message.stdout"),
true,
None,
true,
@ -1968,7 +1925,7 @@ mod tests {
"lint",
"--no-stream"
"--config-file",
"../../test/test_projects/linter/elp_lint_warnings_as_errors.toml"
"../../test_projects/linter/elp_lint_warnings_as_errors.toml"
],
"linter",
expect_file!("../resources/test/linter/warnings_as_errors.stdout"),
@ -1983,7 +1940,7 @@ mod tests {
args_vec![
"lint",
"--config-file",
"../../test/test_projects/linter/elp_lint_custom_function_matches.toml",
"../../test_projects/linter/elp_lint_custom_function_matches.toml",
"--module",
"custom_function_matches"
],
@ -2000,7 +1957,7 @@ mod tests {
args_vec![
"lint",
"--config-file",
"../../test/test_projects/xref/elp_lint_unavailable_type.toml",
"../../test_projects/xref/elp_lint_unavailable_type.toml",
"--module",
"unavailable_type"
],
@ -2017,7 +1974,7 @@ mod tests {
args_vec![
"lint",
"--config-file",
"../../test/test_projects/linter/elp_lint_ssr_adhoc.toml",
"../../test_projects/linter/elp_lint_ssr_adhoc.toml",
],
"linter",
expect_file!("../resources/test/linter/ssr_ad_hoc.stdout"),
@ -2032,7 +1989,7 @@ mod tests {
args_vec![
"lint",
"--config-file",
"../../test/test_projects/linter/elp_lint_ssr_adhoc_parse_fail.toml",
"../../test_projects/linter/elp_lint_ssr_adhoc_parse_fail.toml",
],
"linter",
expect_file!("../resources/test/linter/ssr_ad_hoc_parse_fail.stdout"),
@ -2218,41 +2175,6 @@ mod tests {
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn ssr_exclude_generated_by_default(buck: bool) {
simple_snapshot(
args_vec!["ssr", "--module", "erlang_diagnostics_errors_gen", "ok"],
"diagnostics",
expect_file!("../resources/test/diagnostics/ssr_exclude_generated.stdout"),
buck,
None,
);
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn ssr_include_generated_when_requested(buck: bool) {
simple_snapshot(
args_vec![
"ssr",
"--module",
"erlang_diagnostics_errors_gen",
"--include-generated",
"ok"
],
"diagnostics",
expect_file!("../resources/test/diagnostics/ssr_include_generated.stdout"),
buck,
None,
);
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
// We cannot use `should_panic` for this test, since the OSS CI runs with the `buck` feature disabled.
// When this happens the test is translated into a no-op, which does not panic.
// TODO(T248259687): Switch to should_panic once Buck2 is available on GitHub.
// Or remove the ignore once hierarchical support is implemented.
#[ignore] // Support for hierarchical config is not implemented yet
fn lint_hierarchical_config_basic(buck: bool) {
simple_snapshot_sorted(
args_vec!["lint", "--read-config"],
@ -2263,18 +2185,6 @@ mod tests {
);
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn lint_linter_config_basic(buck: bool) {
simple_snapshot_sorted(
args_vec!["lint", "--read-config", "--no-stream"],
"linter_config",
expect_file!("../resources/test/linter_config/basic.stdout"),
buck,
None,
);
}
#[test_case(false ; "rebar")]
#[test_case(true ; "buck")]
fn eqwalizer_tests_check(buck: bool) {
@ -3136,7 +3046,7 @@ mod tests {
}
fn project_path(project: &str) -> String {
format!("../../test/test_projects/{project}")
format!("../../test_projects/{project}")
}
fn strip_ansi_codes(s: &str) -> String {

View file

@ -227,6 +227,9 @@ impl Reporter for JsonReporter<'_> {
diagnostics: &[EqwalizerDiagnostic],
) -> Result<()> {
let line_index = self.analysis.line_index(file_id)?;
// Pass include_Tests = false so that errors for tests files that are not opted-in are tagged as
// arc_types::Severity::Disabled and don't break CI.
let eqwalizer_enabled = self.analysis.is_eqwalizer_enabled(file_id, false).unwrap();
let file_path = &self.loaded.vfs.file_path(file_id);
let root_path = &self
.analysis
@ -235,8 +238,12 @@ impl Reporter for JsonReporter<'_> {
.root_dir;
let relative_path = get_relative_path(root_path, file_path);
for diagnostic in diagnostics {
let diagnostic =
convert::eqwalizer_to_arc_diagnostic(diagnostic, &line_index, relative_path);
let diagnostic = convert::eqwalizer_to_arc_diagnostic(
diagnostic,
&line_index,
relative_path,
eqwalizer_enabled,
);
let diagnostic = serde_json::to_string(&diagnostic)?;
writeln!(self.cli, "{diagnostic}")?;
}

View file

@ -157,9 +157,10 @@ impl ShellCommand {
}
"eqwalize-app" => {
let include_generated = options.contains(&"--include-generated");
let include_tests = options.contains(&"--include-tests");
if let Some(other) = options
.into_iter()
.find(|&opt| opt != "--include-generated")
.find(|&opt| opt != "--include-generated" && opt != "--include-tests")
{
return Err(ShellError::UnexpectedOption(
"eqwalize-app".into(),
@ -176,6 +177,7 @@ impl ShellCommand {
rebar,
app: app.into(),
include_generated,
include_tests,
bail_on_error: false,
})));
}
@ -183,9 +185,10 @@ impl ShellCommand {
}
"eqwalize-all" => {
let include_generated = options.contains(&"--include-generated");
let include_tests = options.contains(&"--include-tests");
if let Some(other) = options
.into_iter()
.find(|&opt| opt != "--include-generated")
.find(|&opt| opt != "--include-generated" && opt != "--include-tests")
{
return Err(ShellError::UnexpectedOption(
"eqwalize-all".into(),
@ -201,6 +204,7 @@ impl ShellCommand {
rebar,
format: None,
include_generated,
include_tests,
bail_on_error: false,
stats: false,
list_modules: false,
@ -222,8 +226,10 @@ COMMANDS:
eqwalize <modules> Eqwalize specified modules
--clause-coverage Use experimental clause coverage checker
eqwalize-all Eqwalize all modules in the current project
--include-tests Also eqwalize test modules from project
--clause-coverage Use experimental clause coverage checker
eqwalize-app <app> Eqwalize all modules in specified application
--include-tests Also eqwalize test modules from project
--clause-coverage Use experimental clause coverage checker
";

View file

@ -401,9 +401,6 @@ fn do_parse_one(
name: &str,
args: &Ssr,
) -> Result<Option<(String, FileId, Vec<diagnostics::Diagnostic>)>> {
if !args.include_generated && db.is_generated(file_id)? {
return Ok(None);
}
if !args.include_tests && db.is_test_suite_or_test_helper(file_id)?.unwrap_or(false) {
return Ok(None);
}

View file

@ -30,7 +30,7 @@ use serde::de::DeserializeOwned;
use serde_json::json;
use crate::from_json;
// @fb-only: use crate::meta_only;
// @fb-only
// Defines the server-side configuration of ELP. We generate *parts*
// of VS Code's `package.json` config from this.
@ -180,7 +180,7 @@ impl Config {
return;
}
self.data = ConfigData::from_json(json);
// @fb-only: meta_only::harmonise_gks(self);
// @fb-only
}
pub fn update_gks(&mut self, json: serde_json::Value) {

View file

@ -126,11 +126,18 @@ pub fn eqwalizer_to_arc_diagnostic(
d: &EqwalizerDiagnostic,
line_index: &LineIndex,
relative_path: &Path,
eqwalizer_enabled: bool,
) -> arc_types::Diagnostic {
let pos = position(line_index, d.range.start());
let line_num = pos.line + 1;
let character = Some(pos.character + 1);
let severity = arc_types::Severity::Error;
let severity = if eqwalizer_enabled {
arc_types::Severity::Error
} else {
// We use Severity::Disabled so that diagnostics are reported in cont lint
// but not in CI.
arc_types::Severity::Disabled
};
// formatting: https://fburl.com/max_wiki_link_to_phabricator_rich_text
let explanation = match &d.explanation {
Some(s) => format!("```\n{s}\n```"),

View file

@ -37,7 +37,7 @@ pub mod line_endings;
pub mod lsp_ext;
mod mem_docs;
pub mod memory_usage;
// @fb-only: mod meta_only;
// @fb-only
mod op_queue;
mod project_loader;
pub mod reload;
@ -108,7 +108,7 @@ pub fn otp_file_to_ignore(db: &Analysis, file_id: FileId) -> bool {
"redbug_dtop",
]
.iter()
// @fb-only: .chain(meta_only::FILES_TO_IGNORE.iter())
// @fb-only
.map(SmolStr::new)
.collect();
}

View file

@ -1 +0,0 @@
Project Initialisation Failed: invalid or missing buck 2 configuration

View file

@ -1,138 +0,0 @@
Reporting all diagnostics codes
Diagnostics reported:
app_a/src/app_a.erl:52:3-52:23::[Warning] [W0006] this statement has no effect
app_a/src/app_a.erl:3:10-3:21::[WeakWarning] [W0037] Unspecific include.
app_a/src/app_a.erl:27:3-27:9::[Warning] [W0017] Function 'foo:ok/0' is undefined.
app_a/src/app_a.erl:28:4-28:11::[Warning] [W0017] Function 'mod:foo/0' is undefined.
app_a/src/app_a.erl:72:4-72:11::[Warning] [W0017] Function 'foo:bar/2' is undefined.
app_a/src/app_a.erl:37:11-37:28::[Warning] [W0017] Function 'mod_name:fun_name/2' is undefined.
app_a/src/app_a.erl:58:11-58:24::[WeakWarning] [W0051] Binary string can be written using sigil syntax.
app_a/src/app_a.erl:4:1-4:41::[Warning] [W0020] Unused file: inets/include/httpd.hrl
app_a/src/app_a.erl:39:7-39:28::[Error] [L1267] variable 'A' shadowed in 'named fun'
app_a/src/app_a.erl:55:32-55:35::[Error] [L1295] type uri/0 undefined
app_a/src/app_a.erl:56:20-56:26::[Error] [L1295] type binary/1 undefined
app_a/src/app_a.erl:72:3-72:34::[Error] [L1252] record record undefined
app_a/src/app_a.erl:75:5-75:16::[Error] [L1252] record record undefined
app_a/src/app_a.erl:35:1-35:2::[Warning] [L1230] function g/1 is unused
app_a/src/app_a.erl:35:3-35:4::[Warning] [L1268] variable 'A' is unused
app_a/src/app_a.erl:36:3-36:4::[Warning] [L1268] variable 'F' is unused
app_a/src/app_a.erl:37:3-37:4::[Warning] [L1268] variable 'G' is unused
app_a/src/app_a.erl:38:3-38:4::[Warning] [L1268] variable 'H' is unused
app_a/src/app_a.erl:39:3-39:4::[Warning] [L1268] variable 'I' is unused
app_a/src/app_a.erl:39:7-39:28::[Warning] [L1268] variable 'A' is unused
app_a/src/app_a.erl:41:1-41:2::[Warning] [L1230] function h/0 is unused
app_a/src/app_a.erl:45:1-45:2::[Warning] [L1230] function i/0 is unused
app_a/src/app_a.erl:50:1-50:2::[Warning] [L1230] function j/2 is unused
app_a/src/app_a.erl:50:15-50:16::[Warning] [L1268] variable 'A' is unused
app_a/src/app_a.erl:50:23-50:24::[Warning] [L1268] variable 'B' is unused
app_a/src/app_a.erl:55:1-55:46::[Warning] [L1296] type session(_) is unused
app_a/src/app_a.erl:55:1-55:46::[Warning] [L1313] opaque type session(_) is not exported
app_a/src/app_a.erl:56:7-56:13::[Warning] [L1296] type source(_) is unused
app_a/src/app_a.erl:58:1-58:4::[Warning] [L1230] function map/2 is unused
app_a/src/app_a.erl:60:1-60:9::[Warning] [L1230] function with_dot/0 is unused
app_a/src/app_a.erl:62:1-62:9::[Warning] [L1230] function lang_dir/1 is unused
app_a/src/app_a.erl:66:1-66:7::[Warning] [L1230] function escape/1 is unused
app_a/src/app_a.erl:66:13-66:17::[Warning] [L1268] variable 'T' is unused
app_a/src/app_a.erl:67:9-67:25::[Warning] [L1260] record all_configs_file is unused
app_a/src/app_a.erl:71:1-71:2::[Warning] [L1230] function k/0 is unused
app_a/src/app_a.erl:74:1-74:2::[Warning] [L1230] function l/1 is unused
app_a/src/app_a.erl:77:1-77:2::[Warning] [L1230] function m/0 is unused
app_a/src/broken_parse_trans.erl:10:21-10:22::[Error] [L1256] field b undefined in record a
app_a/src/broken_parse_trans.erl:10:32-10:33::[Error] [L1262] variable 'B' is unbound
app_a/src/cascading.erl:9:5-9:6::[Error] [W0004] Missing ')'
3:10-3:15: function foo/0 undefined
6:10-6:15: function foo/0 undefined
8:7-8:10: spec for undefined function foo/0
app_a/src/diagnostics.erl:3:10-3:27::[WeakWarning] [W0037] Unspecific include.
app_a/src/diagnostics.erl:4:10-4:34::[WeakWarning] [W0037] Unspecific include.
app_a/src/diagnostics.erl:12:8-12:12::[Warning] [W0060] Match on a bound variable
app_a/src/diagnostics.erl:4:1-4:36::[Error] [L0000] Issue in included file
[app_a/include/broken_diagnostics.hrl] 1:8-1:15: P1702: bad attribute
[app_a/include/broken_diagnostics.hrl] 3:6-3:15: P1702: bad attribute
app_a/src/diagnostics.erl:6:31-6:45::[Error] [L1295] type undefined_type/0 undefined
app_a/src/diagnostics.erl:7:1-7:5::[Warning] [L1230] function main/1 is unused
app_a/src/diagnostics.erl:10:1-10:4::[Warning] [L1230] function foo/0 is unused
app_a/src/lint_recursive.erl:23:5-23:14::[Warning] [W0006] this statement has no effect
app_a/src/lint_recursive.erl:6:5-6:7::[Warning] [W0006] this statement has no effect
app_a/src/lint_recursive.erl:14:5-14:12::[Warning] [L1268] variable 'Config1' is unused
app_a/src/lint_recursive.erl:19:5-19:12::[Warning] [L1268] variable 'Config1' is unused
app_a/src/lints.erl:5:1-5:14::[Error] [P1700] head mismatch 'head_mismatcX' vs 'head_mismatch'
4:1-4:14: Mismatched clause name
app_a/src/lints.erl:4:22-4:23::[Warning] [W0018] Unexpected ';'
app_a/src/lints.erl:2:10-2:25::[Error] [L1227] function head_mismatch/1 undefined
app_a/src/otp27_docstrings.erl:34:9-34:24::[Warning] [W0002] Unused macro (THIS_IS_THE_END)
app_a/src/otp27_docstrings.erl:24:5-24:6::[Warning] [W0060] Match on a bound variable
app_a/src/otp27_docstrings.erl:30:5-30:6::[Warning] [W0060] Match on a bound variable
app_a/src/otp27_sigils.erl:11:6-11:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:12:5-12:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:13:5-13:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:14:5-14:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:15:5-15:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:17:6-17:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:18:5-18:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:19:5-19:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:20:5-20:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:21:5-21:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:23:6-23:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:24:5-24:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:25:5-25:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:26:5-26:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:27:5-27:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:29:6-29:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:30:5-30:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:31:5-31:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:32:5-32:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:33:5-33:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:35:6-35:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:36:5-36:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:37:5-37:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:38:5-38:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:39:5-39:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:41:6-41:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:42:5-42:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:43:5-43:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:44:5-44:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:45:5-45:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:47:6-47:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:48:5-48:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:49:5-49:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:50:5-50:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:51:5-51:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:53:6-53:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:53:6-53:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:54:5-54:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:54:5-54:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:55:5-55:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:55:5-55:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:56:5-56:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:57:5-57:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:59:6-59:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:60:5-60:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:61:5-61:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:62:5-62:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:63:5-63:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:65:6-65:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:66:5-66:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:67:5-67:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:68:5-68:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:69:5-69:24::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:76:5-79:8::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:76:5-79:8::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:80:5-84:9::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:80:5-84:9::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:85:5-89:10::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:85:5-89:10::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:90:5-94:11::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:95:5-99:12::[Warning] [W0006] this statement has no effect
app_a/src/otp27_sigils.erl:102:5-102:24::[WeakWarning] [W0051] Binary string can be written using sigil syntax.
app_a/src/otp27_sigils.erl:128:9-128:24::[Warning] [W0002] Unused macro (THIS_IS_THE_END)
app_a/src/otp27_sigils.erl:112:4-112:5::[Error] [P1711] syntax error before: X
4:15-4:18: function g/0 undefined
74:7-74:8: spec for undefined function g/0
app_a/src/otp27_sigils.erl:71:5-71:6::[Warning] [L1268] variable 'X' is unused
app_a/src/otp_7655.erl:5:1-5:28::[Error] [L1201] no module definition
app_a/src/parse_error_a_cascade.erl:10:20-11:1::[Error] [W0004] Missing 'atom'
6:6-6:11: function bar/0 undefined
app_a/src/suppressed.erl:8:5-8:9::[Warning] [L1268] variable 'Life' is unused
app_a/src/syntax.erl:5:46-5:47::[Error] [P1711] syntax error before: ')'
app_a/src/syntax.erl:11:9-11:10::[Error] [W0004] Missing ')'

View file

@ -1,10 +1,9 @@
module specified: diagnostics
Diagnostics reported in 1 modules:
diagnostics: 7
diagnostics: 6
2:9-2:26::[Hint] [W0037] Unspecific include.
3:0-3:35::[Error] [L0000] Issue in included file
3:9-3:33::[Hint] [W0037] Unspecific include.
5:30-5:44::[Error] [L1295] type undefined_type/0 undefined
6:0-6:4::[Warning] [L1230] function main/1 is unused
9:0-9:3::[Warning] [L1230] function foo/0 is unused
11:7-11:11::[Warning] [W0060] Match on a bound variable

View file

@ -1,5 +0,0 @@
module specified: diagnostics
Diagnostics reported in 1 modules:
diagnostics: 2
3:0-3:35::[Error] [L0000] Issue in included file
5:30-5:44::[Error] [L1295] type undefined_type/0 undefined

View file

@ -4,4 +4,3 @@
{"path":"app_a/src/diagnostics.erl","line":6,"char":31,"code":"ELP","severity":"error","name":"L1295 (L1295)","original":null,"replacement":null,"description":"type undefined_type/0 undefined\n\nFor more information see: /erlang-error-index/l/L1295","docPath":null}
{"path":"app_a/src/diagnostics.erl","line":7,"char":1,"code":"ELP","severity":"warning","name":"L1230 (L1230)","original":null,"replacement":null,"description":"function main/1 is unused\n\nFor more information see: /erlang-error-index/l/L1230","docPath":null}
{"path":"app_a/src/diagnostics.erl","line":10,"char":1,"code":"ELP","severity":"warning","name":"L1230 (L1230)","original":null,"replacement":null,"description":"function foo/0 is unused\n\nFor more information see: /erlang-error-index/l/L1230","docPath":null}
{"path":"app_a/src/diagnostics.erl","line":12,"char":8,"code":"ELP","severity":"warning","name":"W0060 (bound_var_in_lhs)","original":null,"replacement":null,"description":"Match on a bound variable\n\nFor more information see: /erlang-error-index/w/W0060","docPath":"website/docs/erlang-error-index/w/W0060.md"}

View file

@ -1,6 +1,4 @@
module specified: otp27_docstrings
Diagnostics reported in 1 modules:
otp27_docstrings: 3
23:4-23:5::[Warning] [W0060] Match on a bound variable
29:4-29:5::[Warning] [W0060] Match on a bound variable
otp27_docstrings: 1
33:8-33:23::[Warning] [W0002] Unused macro (THIS_IS_THE_END)

View file

@ -1,2 +0,0 @@
module specified: erlang_diagnostics_errors_gen
No matches found

View file

@ -1,5 +0,0 @@
module specified: erlang_diagnostics_errors_gen
erlang_diagnostics_errors_gen: 1
6:5-6:7::[WeakWarning] [ad-hoc: ssr-match] SSR pattern matched: ssr: ok.
Matches found in 1 modules

View file

@ -1,10 +1,11 @@
Usage: [--project PROJECT] [--as PROFILE] [[--format FORMAT]] [--rebar] [--bail-on-error] [--stats] [--list-modules]
Usage: [--project PROJECT] [--as PROFILE] [[--format FORMAT]] [--rebar] [--include-tests] [--bail-on-error] [--stats] [--list-modules]
Available options:
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
--as <PROFILE> Rebar3 profile to pickup (default is test)
--format <FORMAT> Show diagnostics in JSON format
--rebar Run with rebar
--include-tests Also eqwalize test modules from project
--bail-on-error Exit with a non-zero status code if any errors are found
--stats Print statistics when done
--list-modules When printing statistics, include the list of modules parsed

View file

@ -1,4 +1,4 @@
Usage: [--project PROJECT] [--as PROFILE] [--rebar] [--bail-on-error] <APP>
Usage: [--project PROJECT] [--as PROFILE] [--include-tests] [--rebar] [--bail-on-error] <APP>
Available positional items:
<APP> app name
@ -6,6 +6,7 @@ Available positional items:
Available options:
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
--as <PROFILE> Rebar3 profile to pickup (default is test)
--include-tests Also eqwalize test modules from project
--rebar Run with rebar
--bail-on-error Exit with a non-zero status code if any errors are found
-h, --help Prints help information

View file

@ -1,9 +1,10 @@
Usage: [--project PROJECT] [--bail-on-error] <TARGET>
Usage: [--project PROJECT] [--include-tests] [--bail-on-error] <TARGET>
Available positional items:
<TARGET> target, like //erl/chatd/...
Available options:
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
--include-tests Also eqwalize test modules from project
--bail-on-error Exit with a non-zero status code if any errors are found
-h, --help Prints help information

View file

@ -0,0 +1,14 @@
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
┌─ check/src/callbacks3_neg.erl:12:1
12 │ -behavior(gen_server).
│ ^^^^^^^^^^^^^^^^^^^^^
│ │
│ Incorrect return type for implementation of gen_server:handle_cast/2. Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}, Got: 'wrong_ret'.
'wrong_ret' is not compatible with {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
because
'wrong_ret' is not compatible with {'noreply', term()}
1 ERROR

View file

@ -0,0 +1,14 @@
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
┌─ check/src/callbacks3_neg.erl:12:1
12 │ -behavior(gen_server).
│ ^^^^^^^^^^^^^^^^^^^^^
│ │
│ Incorrect return type for implementation of gen_server:handle_cast/2. Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}, Got: 'wrong_ret'.
'wrong_ret' is not compatible with {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
because
'wrong_ret' is not compatible with {'noreply', term()}
1 ERROR

View file

@ -0,0 +1,14 @@
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
┌─ check/src/callbacks3_neg.erl:12:1
12 │ -behavior(gen_server).
│ ^^^^^^^^^^^^^^^^^^^^^
│ │
│ Incorrect return type for implementation of gen_server:handle_cast/2. Expected: {'noreply', term()} | {'noreply', term(), gen_server:action()} | {'stop', term(), term()}, Got: 'wrong_ret'.
'wrong_ret' is not compatible with {'noreply', term()} | {'noreply', term(), gen_server:action()} | {'stop', term(), term()}
because
'wrong_ret' is not compatible with {'noreply', term()}
1 ERROR

View file

@ -1,27 +0,0 @@
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
┌─ check/src/callbacks3_neg.erl:13:1
13 │ -behavior(gen_server).
│ ^^^^^^^^^^^^^^^^^^^^^ Incorrect return type for implementation of gen_server:handle_cast/2.
Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
Got: 'wrong_ret'
error: incorrect_return_type_in_cb_implementation (See https://fb.me/eqwalizer_errors#incorrect_return_type_in_cb_implementation)
┌─ check/src/callbacks3_neg.erl:13:1
13 │ -behavior(gen_server).
│ ^^^^^^^^^^^^^^^^^^^^^
│ │
│ Incorrect return type for implementation of gen_server:handle_info/2.
Expected: {'noreply', term()} | {'noreply', term(), timeout() | 'hibernate' | {'continue', term()}} | {'stop', term(), term()}
Got: {'noreply', 'ok', 'wrong_atom'}
Because in the expression's type:
{ 'noreply', 'ok',
Here the type is: 'wrong_atom'
Context expects type: 'infinity' | number() | 'hibernate' | {'continue', term()}
No candidate matches in the expected union.
}
2 ERRORS

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -70,20 +70,4 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
Expression has type: #{a := dynamic(), dynamic() => dynamic()}
Context expected type: 'err'
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
┌─ eqwater/src/eqwater_maps.erl:70:29
70 │ (_, #{a := V}) -> is_ok(V)
│ ^
│ │
│ V.
Expression has type: 'ok' | 'err'
Context expected type: 'ok'
Because in the expression's type:
Here the type is a union type with some valid candidates: 'ok'
However the following candidate: 'err'
Differs from the expected type: 'ok'
6 ERRORS
5 ERRORS

View file

@ -1,5 +1,6 @@
Diagnostics reported:
Reporting all diagnostics codes
app_a/src/app_a.erl:4:9-4:16::[Warning] [W0002] Unused macro (MACRO_B)
app_a/src/app_a.erl:6:1-6:5::[Warning] [L1230] function main/0 is unused
app_b/src/app_b.erl:4:9-4:16::[Warning] [W0002] Unused macro (MACRO_B)
app_b/src/app_b.erl:6:1-6:5::[Warning] [L1230] function main/0 is unused
app_b/src/app_b.erl:6:1-6:5::[Warning] [L1230] function main/0 is unused

View file

@ -1,3 +1,3 @@
Diagnostics reported:
app_a/src/app_a.erl:9:6-9:7::[Warning] [W0010] this variable is unused
app_a/src/app_a_unused_param.erl:5:5-5:6::[Warning] [W0010] this variable is unused
app_a/src/app_a.erl:9:6-9:7::[Warning] [W0010] this variable is unused

View file

@ -5,8 +5,6 @@
{"path":"app_a/src/app_a.erl","line":5,"char":5,"code":"ELP","severity":"warning","name":"W0011 (application_get_env)","original":null,"replacement":null,"description":"module `app_a` belongs to app `app_a`, but reads env for `misc`\n\nFor more information see: /erlang-error-index/w/W0011","docPath":"website/docs/erlang-error-index/w/W0011.md"}
{"path":"app_a/src/app_a.erl","line":8,"char":7,"code":"ELP","severity":"warning","name":"W0018 (unexpected_semi_or_dot)","original":null,"replacement":null,"description":"Unexpected ';'\n\nFor more information see: /erlang-error-index/w/W0018","docPath":"website/docs/erlang-error-index/w/W0018.md"}
{"path":"app_a/src/app_a.erl","line":9,"char":1,"code":"ELP","severity":"error","name":"P1700 (head_mismatch)","original":null,"replacement":null,"description":"head mismatch 'fooX' vs 'food'\n\nFor more information see: /erlang-error-index/p/P1700","docPath":null}
{"path":"app_a/src/app_a_ssr.erl","line":7,"char":6,"code":"ELP","severity":"warning","name":"W0060 (bound_var_in_lhs)","original":null,"replacement":null,"description":"Match on a bound variable\n\nFor more information see: /erlang-error-index/w/W0060","docPath":"website/docs/erlang-error-index/w/W0060.md"}
{"path":"app_a/src/app_a_ssr.erl","line":8,"char":6,"code":"ELP","severity":"warning","name":"W0060 (bound_var_in_lhs)","original":null,"replacement":null,"description":"Match on a bound variable\n\nFor more information see: /erlang-error-index/w/W0060","docPath":"website/docs/erlang-error-index/w/W0060.md"}
{"path":"app_a/src/app_a_unused_param.erl","line":5,"char":5,"code":"ELP","severity":"warning","name":"L1268 (L1268)","original":null,"replacement":null,"description":"variable 'X' is unused\n\nFor more information see: /erlang-error-index/l/L1268","docPath":null}
{"path":"app_a/src/custom_function_matches.erl","line":13,"char":5,"code":"ELP","severity":"warning","name":"W0017 (undefined_function)","original":null,"replacement":null,"description":"Function 'excluded:function/0' is undefined.\n\nFor more information see: /erlang-error-index/w/W0017","docPath":"website/docs/erlang-error-index/w/W0017.md"}
{"path":"app_a/src/custom_function_matches.erl","line":14,"char":5,"code":"ELP","severity":"warning","name":"W0017 (undefined_function)","original":null,"replacement":null,"description":"Function 'not_excluded:function/0' is undefined.\n\nFor more information see: /erlang-error-index/w/W0017","docPath":"website/docs/erlang-error-index/w/W0017.md"}

View file

@ -8,8 +8,6 @@ app_a/src/app_a.erl:20:1-20:4::[Warning] [L1230] function bat/2 is unused
app_a/src/app_a.erl:5:5-5:35::[Warning] [W0011] module `app_a` belongs to app `app_a`, but reads env for `misc`
app_a/src/app_a.erl:8:7-8:8::[Warning] [W0018] Unexpected ';'
app_a/src/app_a.erl:9:1-9:5::[Error] [P1700] head mismatch 'fooX' vs 'food'
app_a/src/app_a_ssr.erl:7:6-7:7::[Warning] [W0060] Match on a bound variable
app_a/src/app_a_ssr.erl:8:6-8:7::[Warning] [W0060] Match on a bound variable
app_a/src/app_a_unused_param.erl:5:5-5:6::[Warning] [L1268] variable 'X' is unused
app_a/src/custom_function_matches.erl:13:5-13:22::[Warning] [W0017] Function 'excluded:function/0' is undefined.
app_a/src/custom_function_matches.erl:14:5-14:26::[Warning] [W0017] Function 'not_excluded:function/0' is undefined.

View file

@ -8,8 +8,6 @@ app_a/src/app_a.erl:20:1-20:4::[Error] [L1230] function bat/2 is unused
app_a/src/app_a.erl:5:5-5:35::[Warning] [W0011] module `app_a` belongs to app `app_a`, but reads env for `misc`
app_a/src/app_a.erl:8:7-8:8::[Warning] [W0018] Unexpected ';'
app_a/src/app_a.erl:9:1-9:5::[Error] [P1700] head mismatch 'fooX' vs 'food'
app_a/src/app_a_ssr.erl:7:6-7:7::[Warning] [W0060] Match on a bound variable
app_a/src/app_a_ssr.erl:8:6-8:7::[Warning] [W0060] Match on a bound variable
app_a/src/app_a_unused_param.erl:5:5-5:6::[Error] [L1268] variable 'X' is unused
app_a/src/custom_function_matches.erl:13:5-13:22::[Warning] [W0017] Function 'excluded:function/0' is undefined.
app_a/src/custom_function_matches.erl:14:5-14:26::[Warning] [W0017] Function 'not_excluded:function/0' is undefined.

View file

@ -1,5 +0,0 @@
Diagnostics reported:
Reporting all diagnostics codes
app_a/src/app_a.erl:3:9-3:16::[Warning] [W0002] Unused macro (MACRO_A)
app_a/src/app_a.erl:4:9-4:14::[Warning] [L1260] record rec_a is unused
app_b/src/app_b.erl:3:9-3:16::[Warning] [W0002] Unused macro (MACRO_B)

View file

@ -1,4 +1,4 @@
Usage: [--project PROJECT] [--module MODULE] [--file ARG] [--to TO] [--no-diags] [--experimental] [--as PROFILE] [--dump-includes] [--rebar] [--include-generated] [--serial] [--use-cli-severity] [[--format FORMAT]] [--report-system-stats] [[--severity SEVERITY]]
Usage: [--project PROJECT] [--module MODULE] [--file ARG] [--to TO] [--no-diags] [--experimental] [--as PROFILE] [--dump-includes] [--rebar] [--include-generated] [--serial] [--use-cli-severity] [[--format FORMAT]] [--report-system-stats]
Available options:
--project <PROJECT> Path to directory with project, or to a JSON file (defaults to `.`)
@ -15,5 +15,4 @@ Available options:
--use-cli-severity If specified, use the provided CLI severity mapping instead of the default one
--format <FORMAT> Show diagnostics in JSON format
--report-system-stats Report system memory usage and other statistics
--severity <SEVERITY> Minimum severity level to report. Valid values: error, warning, weak_warning, information
-h, --help Prints help information

View file

@ -192,14 +192,6 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
Expression has type: 'wrong_ret'
Context expected type: 'error'
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
5 │ fail() -> error.
│ ^^^^^ 'error'.
Expression has type: 'error'
Context expected type: 'ok'
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
┌─ app_b/src/app_b.erl:16:5
@ -208,4 +200,4 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
Expression has type: [T]
Context expected type: T
21 ERRORS
20 ERRORS

View file

@ -17,5 +17,4 @@
{"path":"app_a/src/app_a_mod2.erl","line":22,"char":1,"code":"ELP","severity":"error","name":"eqWAlizer: type_alias_is_non_productive","original":null,"replacement":null,"description":"```lang=error,counterexample\n\nrecursive type invalid/0 is not productive\n```\n\n> [docs on `type_alias_is_non_productive`](https://fb.me/eqwalizer_errors#type_alias_is_non_productive)","docPath":null}
{"path":"app_a/src/app_a_mod2.erl","line":31,"char":9,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'an_atom'","replacement":null,"description":"```lang=error,counterexample\n`'an_atom'`.\n\nExpression has type: 'an_atom'\nContext expected type: number()\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
{"path":"app_a/test/app_a_test_helpers.erl","line":6,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'wrong_ret'","replacement":null,"description":"```lang=error,counterexample\n`'wrong_ret'`.\n\nExpression has type: 'wrong_ret'\nContext expected type: 'error'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
{"path":"app_a/test/app_a_test_helpers_not_opted_in.erl","line":5,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'error'","replacement":null,"description":"```lang=error,counterexample\n`'error'`.\n\nExpression has type: 'error'\nContext expected type: 'ok'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
{"path":"app_b/src/app_b.erl","line":16,"char":5,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"L","replacement":null,"description":"```lang=error,counterexample\n`L`.\n\nExpression has type: [T]\nContext expected type: T\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}

View file

@ -192,14 +192,6 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
Expression has type: 'wrong_ret'
Context expected type: 'error'
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
5 │ fail() -> error.
│ ^^^^^ 'error'.
Expression has type: 'error'
Context expected type: 'ok'
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
┌─ app_b/src/app_b.erl:16:5
@ -208,4 +200,4 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
Expression has type: [T]
Context expected type: T
21 ERRORS
20 ERRORS

View file

@ -17,5 +17,4 @@
{"path":"app_a/src/app_a_mod2.erl","line":22,"char":1,"code":"ELP","severity":"error","name":"eqWAlizer: type_alias_is_non_productive","original":null,"replacement":null,"description":"```lang=error,counterexample\n\nrecursive type invalid/0 is not productive\n```\n\n> [docs on `type_alias_is_non_productive`](https://fb.me/eqwalizer_errors#type_alias_is_non_productive)","docPath":null}
{"path":"app_a/src/app_a_mod2.erl","line":31,"char":9,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'an_atom'","replacement":null,"description":"```lang=error,counterexample\n`'an_atom'`.\n\nExpression has type: 'an_atom'\nContext expected type: number()\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
{"path":"app_a/test/app_a_test_helpers.erl","line":6,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'wrong_ret'","replacement":null,"description":"```lang=error,counterexample\n`'wrong_ret'`.\n\nExpression has type: 'wrong_ret'\nContext expected type: 'error'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
{"path":"app_a/test/app_a_test_helpers_not_opted_in.erl","line":5,"char":11,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"'error'","replacement":null,"description":"```lang=error,counterexample\n`'error'`.\n\nExpression has type: 'error'\nContext expected type: 'ok'\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}
{"path":"app_b/src/app_b.erl","line":16,"char":5,"code":"ELP","severity":"error","name":"eqWAlizer: incompatible_types","original":"L","replacement":null,"description":"```lang=error,counterexample\n`L`.\n\nExpression has type: [T]\nContext expected type: T\n```\n\n> [docs on `incompatible_types`](https://fb.me/eqwalizer_errors#incompatible_types)","docPath":null}

View file

@ -192,12 +192,4 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
Expression has type: 'wrong_ret'
Context expected type: 'error'
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
5 │ fail() -> error.
│ ^^^^^ 'error'.
Expression has type: 'error'
Context expected type: 'ok'
20 ERRORS
19 ERRORS

View file

@ -192,12 +192,4 @@ error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types
Expression has type: 'wrong_ret'
Context expected type: 'error'
error: incompatible_types (See https://fb.me/eqwalizer_errors#incompatible_types)
┌─ app_a/test/app_a_test_helpers_not_opted_in.erl:5:11
5 │ fail() -> error.
│ ^^^^^ 'error'.
Expression has type: 'error'
Context expected type: 'ok'
20 ERRORS
19 ERRORS

View file

@ -1,5 +1,5 @@
Reporting all diagnostics codes
module specified: unavailable_type
Diagnostics reported:
app_a/src/unavailable_type.erl:10:43-10:58::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'root//xref:app_a').
app_a/src/unavailable_type.erl:6:16-6:31::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'root//xref:app_a').
app_a/src/unavailable_type.erl:10:43-10:58::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'fbcode//whatsapp/elp/test_projects/xref:app_a').
app_a/src/unavailable_type.erl:6:16-6:31::[Warning] [W0059] The type 'app_c:my_type_c/0' is defined in application 'app_c', but the application is not a dependency of 'app_a' (defined in 'fbcode//whatsapp/elp/test_projects/xref:app_a').

View file

@ -2073,7 +2073,7 @@ impl Server {
};
for (_, _, file_id) in module_index.iter_own() {
match snapshot.analysis.should_eqwalize(file_id) {
match snapshot.analysis.should_eqwalize(file_id, false) {
Ok(true) => {
files.push(file_id);
}

View file

@ -33,7 +33,7 @@ use super::FILE_WATCH_LOGGER_NAME;
use super::logger::LspLogger;
use crate::config::Config;
use crate::from_json;
// @fb-only: use crate::meta_only::get_log_dir;
// @fb-only
use crate::server::Handle;
use crate::server::LOGGER_NAME;
use crate::server::Server;
@ -126,7 +126,7 @@ impl ServerSetup {
// Set up a logger for tracking down why we are seeing stale
// results when branches are switched, as per T218973130
// @fb-only: let log_dir = get_log_dir();
// @fb-only
let log_dir = format!("{}/elp", std::env::temp_dir().display()); // @oss-only
let _ = fs::create_dir_all(&log_dir);
let log_file = format!(

View file

@ -36,11 +36,9 @@ use parking_lot::Mutex;
use parking_lot::RwLock;
use serde::Deserialize;
use serde::Serialize;
use vfs::AnchoredPathBuf;
use crate::config::Config;
use crate::convert;
use crate::convert::url_from_abs_path;
use crate::line_endings::LineEndings;
use crate::mem_docs::MemDocs;
use crate::server::EqwalizerTypes;
@ -188,14 +186,6 @@ impl Snapshot {
self.line_ending_map.read()[&id]
}
pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Option<Url> {
let mut base = self.vfs.read().file_path(path.anchor).clone();
base.pop();
let path = base.join(&path.path)?;
let path = path.as_path()?;
Some(url_from_abs_path(path))
}
pub fn update_cache_for_file(
&self,
file_id: FileId,
@ -203,7 +193,7 @@ impl Snapshot {
) -> Result<()> {
let _ = self.analysis.def_map(file_id)?;
if optimize_for_eqwalizer {
let should_eqwalize = self.analysis.should_eqwalize(file_id)?;
let should_eqwalize = self.analysis.should_eqwalize(file_id, false)?;
if should_eqwalize {
let _ = self.analysis.module_ast(file_id)?;
}
@ -252,7 +242,7 @@ impl Snapshot {
let file_ids: Vec<FileId> = module_index
.iter_own()
.filter_map(|(_, _, file_id)| {
if let Ok(true) = self.analysis.should_eqwalize(file_id) {
if let Ok(true) = self.analysis.should_eqwalize(file_id, false) {
Some(file_id)
} else {
None

View file

@ -10,7 +10,6 @@
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
use std::mem;
use std::sync::atomic::AtomicU32;
use std::sync::atomic::Ordering;
@ -48,7 +47,6 @@ use elp_ide::elp_ide_db::elp_base_db::FileId;
use elp_ide::elp_ide_db::elp_base_db::FilePosition;
use elp_ide::elp_ide_db::elp_base_db::FileRange;
use elp_ide::elp_ide_db::rename::RenameError;
use elp_ide::elp_ide_db::source_change::FileSystemEdit;
use elp_ide::elp_ide_db::source_change::SourceChange;
use elp_ide_db::text_edit::Indel;
use elp_ide_db::text_edit::TextEdit;
@ -123,9 +121,9 @@ pub(crate) fn optional_versioned_text_document_identifier(
pub(crate) fn text_document_edit(
snap: &Snapshot,
file_id: FileId,
text_document: lsp_types::OptionalVersionedTextDocumentIdentifier,
edit: TextEdit,
) -> Result<lsp_types::TextDocumentEdit> {
let text_document = optional_versioned_text_document_identifier(snap, file_id);
let line_index = snap.analysis.line_index(file_id)?;
let line_endings = snap.line_endings(file_id);
let edits: Vec<lsp_types::OneOf<lsp_types::TextEdit, lsp_types::AnnotatedTextEdit>> = edit
@ -133,131 +131,34 @@ pub(crate) fn text_document_edit(
.map(|it| lsp_types::OneOf::Left(text_edit(&line_index, line_endings, it)))
.collect();
// if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
// for edit in &mut edits {
// edit.annotation_id = Some(outside_workspace_annotation_id())
// }
// }
Ok(lsp_types::TextDocumentEdit {
text_document,
edits,
})
}
pub(crate) fn text_document_ops(
snap: &Snapshot,
file_system_edit: FileSystemEdit,
) -> Cancellable<Vec<lsp_types::DocumentChangeOperation>> {
let mut ops = Vec::new();
match file_system_edit {
FileSystemEdit::CreateFile {
dst,
initial_contents,
} => {
if let Some(uri) = snap.anchored_path(&dst) {
let create_file = lsp_types::ResourceOp::Create(lsp_types::CreateFile {
uri: uri.clone(),
options: None,
annotation_id: None,
});
ops.push(lsp_types::DocumentChangeOperation::Op(create_file));
if !initial_contents.is_empty() {
let text_document =
lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
let text_edit = lsp_types::TextEdit {
range: lsp_types::Range::default(),
new_text: initial_contents,
};
let edit_file = lsp_types::TextDocumentEdit {
text_document,
edits: vec![lsp_types::OneOf::Left(text_edit)],
};
ops.push(lsp_types::DocumentChangeOperation::Edit(edit_file));
}
} else {
log::warn!("create file failed: {:?}", dst);
}
}
FileSystemEdit::MoveFile { src, dst } => {
if let Some(new_uri) = snap.anchored_path(&dst) {
let old_uri = snap.file_id_to_url(src);
let rename_file = lsp_types::RenameFile {
old_uri,
new_uri,
options: None,
annotation_id: None,
};
ops.push(lsp_types::DocumentChangeOperation::Op(
lsp_types::ResourceOp::Rename(rename_file),
))
} else {
log::warn!("rename file failed: {:?} -> {:?}", src, dst);
}
}
}
Ok(ops)
}
pub(crate) fn workspace_edit(
snap: &Snapshot,
mut source_change: SourceChange,
source_change: SourceChange,
) -> Result<lsp_types::WorkspaceEdit> {
let mut document_changes: Vec<lsp_types::DocumentChangeOperation> = Vec::new();
// This is copying RA's order of operations, first file creates,
// then edits, then file moves.
// This allows us to apply edits to the file once it has
// moved. Except we have no FileId at that point
for op in &mut source_change.file_system_edits {
if let FileSystemEdit::CreateFile {
dst,
initial_contents,
} = op
{
// replace with a placeholder to avoid cloning the edit
let op = FileSystemEdit::CreateFile {
dst: dst.clone(),
initial_contents: mem::take(initial_contents),
};
let ops = text_document_ops(snap, op)?;
document_changes.extend_from_slice(&ops);
}
}
for op in source_change.file_system_edits {
if !matches!(op, FileSystemEdit::CreateFile { .. }) {
let ops = text_document_ops(snap, op)?;
document_changes.extend_from_slice(&ops);
}
}
let mut edits: Vec<_> = vec![];
for (file_id, edit) in source_change.source_file_edits {
let text_document = optional_versioned_text_document_identifier(snap, file_id);
let edit = text_document_edit(snap, file_id, text_document, edit)?;
document_changes.push(lsp_types::DocumentChangeOperation::Edit(
lsp_types::TextDocumentEdit {
text_document: edit.text_document,
edits: edit.edits.into_iter().collect(),
},
));
// let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
let edit = text_document_edit(snap, file_id, edit)?;
edits.push(lsp_types::TextDocumentEdit {
text_document: edit.text_document,
edits: edit.edits.into_iter().collect(),
});
}
// Edits on renamed files. The LineIndex from the original can be used.
for (file_ref, edit) in source_change.new_file_edits {
if let Some(uri) = snap.anchored_path(&file_ref.clone().into()) {
let version = snap.url_file_version(&uri);
let text_document = lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version };
let edit = text_document_edit(snap, file_ref.anchor, text_document, edit)?;
document_changes.push(lsp_types::DocumentChangeOperation::Edit(
lsp_types::TextDocumentEdit {
text_document: edit.text_document,
edits: edit.edits.into_iter().collect(),
},
));
} else {
log::warn!("new file edit failed: {:?}", file_ref);
}
}
let document_changes = lsp_types::DocumentChanges::Edits(edits);
let workspace_edit = lsp_types::WorkspaceEdit {
changes: None,
document_changes: Some(lsp_types::DocumentChanges::Operations(document_changes)),
document_changes: Some(document_changes),
change_annotations: None,
};
Ok(workspace_edit)
@ -281,6 +182,10 @@ pub(crate) fn code_action(
) -> Result<lsp_types::CodeActionOrCommand> {
let mut res = lsp_types::CodeAction {
title: assist.label.to_string(),
// group: assist
// .group
// .filter(|_| snap.config.code_action_group())
// .map(|gr| gr.0),
kind: Some(code_action_kind(assist.id.1)),
edit: None,
is_preferred: None,

View file

@ -31,7 +31,7 @@ mod tests {
#[test]
#[ignore]
fn test_success_case() {
let path_str = "../../test/test_projects/buck_tests";
let path_str = "../../test_projects/buck_tests";
let path: PathBuf = path_str.into();
let cli = Fake::default();
@ -65,7 +65,7 @@ mod tests {
let ast = analysis.module_ast(file_id).unwrap();
assert_eq!(ast.errors, vec![]);
let eq_enabled = analysis
.is_eqwalizer_enabled(file_id)
.is_eqwalizer_enabled(file_id, false)
.unwrap_or_else(|_| panic!("Failed to check if eqwalizer enabled for {module}"));
assert_eq!(eq_enabled, eqwalizer_enabled);
let project_data = analysis.project_data(file_id).unwrap();
@ -76,7 +76,7 @@ mod tests {
#[test]
#[ignore]
fn test_load_buck_targets() {
let path_str = "../../test/test_projects/buck_tests";
let path_str = "../../test_projects/buck_tests";
let path: PathBuf = path_str.into();
let (elp_config, buck_config) =

View file

@ -36,7 +36,7 @@ use crate::support::diagnostic_project;
fn test_run_mock_lsp() {
if cfg!(feature = "buck") {
let workspace_root = AbsPathBuf::assert(
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test/test_projects/end_to_end"),
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test_projects/end_to_end"),
);
// Sanity check
@ -70,7 +70,7 @@ fn test_run_mock_lsp() {
}
],
"textDocument": {
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"version": 0
}
}
@ -99,7 +99,7 @@ fn test_run_mock_lsp() {
}
],
"textDocument": {
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"version": 0
}
}
@ -128,7 +128,7 @@ fn test_run_mock_lsp() {
}
],
"textDocument": {
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"version": 0
}
}
@ -157,7 +157,7 @@ fn test_run_mock_lsp() {
}
],
"textDocument": {
"uri": "file:///[..]/test/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"uri": "file:///[..]/test_projects/end_to_end/assist_examples/src/head_mismatch.erl",
"version": 0
}
}
@ -175,7 +175,7 @@ fn test_run_mock_lsp() {
fn test_e2e_eqwalizer_module() {
if cfg!(feature = "buck") {
let workspace_root = AbsPathBuf::assert(
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test/test_projects/standard"),
Utf8Path::new(env!("CARGO_WORKSPACE_DIR")).join("test_projects/standard"),
);
// Sanity check
@ -321,7 +321,7 @@ fn test_e2e_eqwalizer_module() {
"source": "eqWAlizer"
}
],
"uri": "file:///[..]/test/test_projects/standard/app_a/src/app_a.erl",
"uri": "file:///[..]/test_projects/standard/app_a/src/app_a.erl",
"version": 0
}"#]],
);
@ -334,7 +334,7 @@ fn test_e2e_eqwalizer_module() {
// #[test]
// fn test_e2e_eqwalizer_header() {
// let workspace_root =
// AbsPathBuf::assert(Path::new(env!("CARGO_WORKSPACE_DIR")).join("test/test_projects/standard"));
// AbsPathBuf::assert(Path::new(env!("CARGO_WORKSPACE_DIR")).join("test_projects/standard"));
// // Sanity check
// assert!(std::fs::metadata(&workspace_root).is_ok());

View file

@ -865,7 +865,7 @@ mod tests {
f() ->
As = [1,2,3],
Bs = [4,5,6],
[{X,Y, ~} || X <- As && Y <- Bs]
[{X,Y}~ || X <- As && Y <- Bs]
.
",
&["Y", "X", "As", "Bs"],

View file

@ -448,7 +448,6 @@ foo(#record.field) -> #record.field.
fn record() {
check(
r#"
//- expect_parse_errors
foo1(#record{field = 1}) -> #record{field = A + B}.
foo2(#record{field}) -> #record{field = }.
"#,
@ -474,7 +473,6 @@ foo2(#record{field}) -> #record{field = }.
fn record_update() {
check(
r#"
//- expect_parse_errors
foo1() -> Expr#record{field = undefined}.
foo2() -> Expr#record{field = ok, missing = }.
"#,
@ -559,7 +557,7 @@ fn case() {
r#"
foo() ->
case 1 + 2 of
X when X andalso true; X =< 100, X >= 5 -> ok;
X when X andalso true; X <= 100, X >= 5 -> ok;
_ -> error
end.
"#,
@ -568,7 +566,7 @@ foo() ->
case (1 + 2) of
X when
(X andalso true);
(X =< 100),
(X < 100),
(X >= 5)
->
ok;
@ -838,7 +836,6 @@ foo() ->
fn parens() {
check(
r#"
//- expect_parse_errors
foo((ok), ()) ->
(ok),
().
@ -998,7 +995,6 @@ foo(fun() -> ok end) -> ok.
fn invalid_comprehension() {
check(
r#"
//- expect_parse_errors
foo(<<Byte || Byte <- List>>, [Byte || Byte <- List]]) -> ok.
"#,
expect![[r#"
@ -1396,7 +1392,6 @@ fn call_type_erlang_bif() {
fn record_type() {
check(
r#"
//- expect_parse_errors
-type foo1() :: #record{}.
-type foo2(B) :: #record{a :: integer(), b :: B}.
-type foo3() :: #record{a ::}.
@ -1534,7 +1529,6 @@ fn record_definition() {
fn simple_term() {
check(
r#"
//- expect_parse_errors
-foo(ok).
-missing_value().
"#,
@ -2699,7 +2693,6 @@ fn verbatim_binary_sigil_in_type() {
// Note: \~ gets replaced by ~ in the fixture parsing
check(
r#"
//- expect_parse_errors
-type foo() :: \~B"ab\"c\"\d").
-type bar() :: "hello").
"#,
@ -2730,7 +2723,6 @@ fn verbatim_binary_sigil_in_term() {
fn lowering_with_error_nodes() {
check(
r#"
//- expect_parse_errors
f(1a) -> ok begin 1 end.
"#,
expect![[r#"
@ -2997,10 +2989,8 @@ fn tree_print_record() {
#[test]
fn tree_print_attribute() {
// TODO: fix wild attribute parsing, T246546041, to remove expect_parse_errors
check_ast(
r#"
//- expect_parse_errors
-wild(foo, []).
-compile({inline, [foo/1]}).
-compile({a/a, 1/1}).

View file

@ -2340,7 +2340,7 @@ mod tests {
r#"
foo() ->
case 1 + 2 of
X when X andalso true; X =< 100, X >= 5 -> ok;
X when X andalso true; X <= 100, X >= 5 -> ok;
_ -> error
end.
"#,
@ -2381,7 +2381,7 @@ mod tests {
rhs
Expr<8>:Literal(Integer(100))
op
CompOp(Ord { ordering: Less, strict: false }),
CompOp(Ord { ordering: Less, strict: true }),
},
Expr<12>:Expr::BinaryOp {
lhs
@ -3114,7 +3114,6 @@ mod tests {
fn type_record() {
check(
r#"
//- expect_parse_errors
-type foo1() :: #record{}.
-type foo2(B) :: #record{a :: integer(), b :: B}.
-type foo3() :: #record{a ::}.
@ -3499,7 +3498,6 @@ mod tests {
fn top_level_forms() {
check(
r#"
//- expect_parse_errors
-module(main).
bug
-compile([export_all]).

View file

@ -339,7 +339,7 @@ pub enum ParentId {
#[derive(Debug)]
pub struct AnyCallBackCtx<'a> {
pub in_macro: Option<(HirIdx, Option<InFile<DefineId>>)>,
pub in_macro: Option<HirIdx>,
pub parents: &'a Vec<ParentId>,
pub item_id: AnyExprId,
pub item: AnyExpr,
@ -426,7 +426,7 @@ pub struct FoldCtx<'a, T> {
body_origin: BodyOrigin,
body: &'a FoldBody<'a>,
strategy: Strategy,
macro_stack: Vec<(HirIdx, Option<InFile<DefineId>>)>,
macro_stack: Vec<HirIdx>,
parents: Vec<ParentId>,
callback: AnyCallBack<'a, T>,
}
@ -594,7 +594,7 @@ impl<'a, T> FoldCtx<'a, T> {
.do_fold_pat(pat_id, initial)
}
fn in_macro(&self) -> Option<(HirIdx, Option<InFile<DefineId>>)> {
fn in_macro(&self) -> Option<HirIdx> {
self.macro_stack.first().copied()
}
@ -752,19 +752,16 @@ impl<'a, T> FoldCtx<'a, T> {
crate::Expr::MacroCall {
expansion,
args,
macro_def,
macro_def: _,
macro_name: _,
} => {
if self.strategy.macros == MacroStrategy::DoNotExpand {
self.do_fold_exprs(args, acc)
} else {
self.macro_stack.push((
HirIdx {
body_origin: self.body_origin,
idx: AnyExprId::Expr(expr_id),
},
*macro_def,
));
self.macro_stack.push(HirIdx {
body_origin: self.body_origin,
idx: AnyExprId::Expr(expr_id),
});
let e = self.do_fold_expr(*expansion, acc);
self.macro_stack.pop();
e
@ -953,19 +950,16 @@ impl<'a, T> FoldCtx<'a, T> {
crate::Pat::MacroCall {
expansion,
args,
macro_def,
macro_def: _,
macro_name: _,
} => {
if self.strategy.macros == MacroStrategy::DoNotExpand {
self.do_fold_exprs(args, acc)
} else {
self.macro_stack.push((
HirIdx {
body_origin: self.body_origin,
idx: AnyExprId::Pat(pat_id),
},
*macro_def,
));
self.macro_stack.push(HirIdx {
body_origin: self.body_origin,
idx: AnyExprId::Pat(pat_id),
});
let e = self.do_fold_pat(*expansion, acc);
self.macro_stack.pop();
e
@ -1171,19 +1165,16 @@ impl<'a, T> FoldCtx<'a, T> {
TypeExpr::MacroCall {
expansion,
args,
macro_def,
macro_def: _,
macro_name: _,
} => {
if self.strategy.macros == MacroStrategy::DoNotExpand {
self.do_fold_exprs(args, acc)
} else {
self.macro_stack.push((
HirIdx {
body_origin: self.body_origin,
idx: AnyExprId::TypeExpr(type_expr_id),
},
*macro_def,
));
self.macro_stack.push(HirIdx {
body_origin: self.body_origin,
idx: AnyExprId::TypeExpr(type_expr_id),
});
let e = self.do_fold_type_expr(*expansion, acc);
self.macro_stack.pop();
e
@ -2221,9 +2212,7 @@ bar() ->
#[test]
fn traverse_attribute() {
// TODO: fix wild attribute parsing, T246546041, to remove expect_parse_errors
let fixture_str = r#"
//- expect_parse_errors
-module(foo).
-wild(r1, {f1, f~oo}).
"#;

View file

@ -318,7 +318,6 @@ fn export() {
fn import() {
check(
r#"
//- expect_parse_errors
-import(, []).
-import(foo, []).
-import(foo, [foo/1]).

View file

@ -155,7 +155,7 @@ pub use name::MacroName;
pub use name::Name;
pub use name::NameArity;
pub use name::known;
// @fb-only: pub use name::meta_only;
// @fb-only
pub use sema::AtomDef;
pub use sema::CallDef;
pub use sema::DefinitionOrReference;
@ -232,10 +232,6 @@ impl HirIdx {
}
}
pub fn file_id(&self) -> FileId {
self.body_origin.file_id()
}
/// This function is used to print a representation of the HIR AST
/// corresponding to the given `HirIdx`. It is used for debugging
/// and testing.

View file

@ -10,7 +10,7 @@
//! See [`Name`].
// @fb-only: pub mod meta_only;
// @fb-only
use std::borrow::Cow;
use std::collections::HashSet;

View file

@ -102,7 +102,7 @@ use crate::resolver::Resolution;
use crate::resolver::Resolver;
mod find;
// @fb-only: pub mod meta_only;
// @fb-only
pub mod to_def;
pub struct ModuleIter(Arc<ModuleIndex>);
@ -1006,28 +1006,6 @@ impl Semantic<'_> {
// Folds end
// -----------------------------------------------------------------
pub fn bound_vars_by_function(
&self,
file_id: FileId,
) -> FxHashMap<FunctionClauseId, FxHashSet<PatId>> {
let bound_vars = self.bound_vars_in_pattern_diagnostic(file_id);
let mut bound_vars_by_function: FxHashMap<FunctionClauseId, FxHashSet<PatId>> =
FxHashMap::default();
bound_vars.iter().for_each(|(function_id, pat_id, _var)| {
bound_vars_by_function
.entry(function_id.value)
.and_modify(|vars| {
vars.insert(*pat_id);
})
.or_insert_with(|| {
let mut vars = FxHashSet::default();
vars.insert(*pat_id);
vars
});
});
bound_vars_by_function
}
pub fn bound_vars_in_pattern_diagnostic(
&self,
file_id: FileId,

View file

@ -42,7 +42,7 @@ use crate::macro_exp;
use crate::macro_exp::BuiltInMacro;
use crate::macro_exp::MacroExpCtx;
use crate::resolver::Resolver;
// @fb-only: use crate::sema::meta_only;
// @fb-only
pub trait ToDef: Clone {
type Def;
@ -567,7 +567,7 @@ pub fn resolve_call_target(
let fn_name: Name = sema.db.lookup_atom(body[*name].as_atom()?);
let mo =
None; // @oss-only
// @fb-only: meta_only::resolve_handle_call_target(sema, arity, file_id, &module_name, &fn_name);
// @fb-only
if let Some(r) = mo {
r
} else {
@ -885,183 +885,12 @@ fn add_dynamic_call_patterns(patterns: &mut FxHashMap<PatternKey, DynamicCallPat
);
}
/// Specifies what forms a module argument can take.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ModuleArgType {
/// The argument must be a single module atom (e.g., `apply(Mod, Fun, Args)`)
Atom,
/// The argument must be a list of module atoms (e.g., some batch operations)
List,
/// The argument can be either a single module atom or a list of modules
/// (e.g., `meck:new(Mod | [Mod], Opts)`)
AtomOrList,
}
/// Pattern for matching module argument positions in function calls.
/// Used by rename operations to identify which argument contains a module name.
#[derive(Debug, Clone, Copy)]
pub struct ModuleArgPattern {
/// Index of the argument containing the module name (0-based)
pub index: usize,
/// The type of the module argument (atom, list, or either)
pub arg_type: ModuleArgType,
}
impl ModuleArgPattern {
/// Creates a pattern where the argument is a single module atom.
pub const fn atom(index: usize) -> Self {
Self {
index,
arg_type: ModuleArgType::Atom,
}
}
/// Creates a pattern where the argument is a list of module atoms.
pub const fn list(index: usize) -> Self {
Self {
index,
arg_type: ModuleArgType::List,
}
}
/// Creates a pattern where the argument can be either a single atom or a list.
pub const fn atom_or_list(index: usize) -> Self {
Self {
index,
arg_type: ModuleArgType::AtomOrList,
}
}
/// Returns true if this pattern accepts a single atom.
pub const fn accepts_atom(&self) -> bool {
matches!(
self.arg_type,
ModuleArgType::Atom | ModuleArgType::AtomOrList
)
}
/// Returns true if this pattern accepts a list of atoms.
pub const fn accepts_list(&self) -> bool {
matches!(
self.arg_type,
ModuleArgType::List | ModuleArgType::AtomOrList
)
}
}
fn add_module_argument_patterns(patterns: &mut FxHashMap<PatternKey, ModuleArgPattern>) {
// Each entry follows the format:
// (module, function, arity) -> ModuleArgPattern
//
// Where:
// module: Module name (Some("meck"), Some("application"), etc.)
// function: Function name as string literal (e.g., "new", "get_env")
// arity: Number of arguments this function pattern expects
// ModuleArgPattern: Contains the argument index and the expected type
//
// All indexes are 0-based.
// meck - mocking library
// meck:new/2 accepts either a single module atom or a list of modules
patterns.insert((Some("meck"), "called", 3), ModuleArgPattern::atom(0));
patterns.insert((Some("meck"), "called", 4), ModuleArgPattern::atom(0));
patterns.insert((Some("meck"), "capture", 5), ModuleArgPattern::atom(1));
patterns.insert((Some("meck"), "capture", 6), ModuleArgPattern::atom(1));
patterns.insert(
(Some("meck"), "delete", 3),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert(
(Some("meck"), "delete", 4),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert(
(Some("meck"), "expect", 3),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert(
(Some("meck"), "expect", 4),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert(
(Some("meck"), "expects", 2),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert((Some("meck"), "history", 1), ModuleArgPattern::atom(0));
patterns.insert((Some("meck"), "history", 2), ModuleArgPattern::atom(0));
patterns.insert((Some("meck"), "loop", 4), ModuleArgPattern::atom_or_list(0));
patterns.insert((Some("meck"), "new", 1), ModuleArgPattern::atom_or_list(0));
patterns.insert((Some("meck"), "new", 2), ModuleArgPattern::atom_or_list(0));
patterns.insert((Some("meck"), "num_calls", 3), ModuleArgPattern::atom(0));
patterns.insert((Some("meck"), "num_calls", 4), ModuleArgPattern::atom(0));
patterns.insert(
(Some("meck"), "reset", 1),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert(
(Some("meck"), "sequence", 4),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert(
(Some("meck"), "unload", 1),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert(
(Some("meck"), "validate", 1),
ModuleArgPattern::atom_or_list(0),
);
patterns.insert((Some("meck"), "wait", 4), ModuleArgPattern::atom(0));
patterns.insert((Some("meck"), "wait", 5), ModuleArgPattern::atom(1));
patterns.insert((Some("meck"), "wait", 6), ModuleArgPattern::atom(1));
// code module - module loading and management
// These functions from the Erlang stdlib take module() as their argument
patterns.insert((Some("code"), "load_file", 1), ModuleArgPattern::atom(0));
patterns.insert(
(Some("code"), "ensure_loaded", 1),
ModuleArgPattern::atom(0),
);
patterns.insert((Some("code"), "delete", 1), ModuleArgPattern::atom(0));
patterns.insert((Some("code"), "purge", 1), ModuleArgPattern::atom(0));
patterns.insert((Some("code"), "soft_purge", 1), ModuleArgPattern::atom(0));
patterns.insert((Some("code"), "is_loaded", 1), ModuleArgPattern::atom(0));
patterns.insert(
(Some("code"), "get_object_code", 1),
ModuleArgPattern::atom(0),
);
patterns.insert((Some("code"), "module_md5", 1), ModuleArgPattern::atom(0));
patterns.insert((Some("code"), "is_sticky", 1), ModuleArgPattern::atom(0));
}
// Lazy static initialization for the patterns maps
// Lazy static initialization for the patterns map
lazy_static! {
static ref DYNAMIC_CALL_PATTERNS: FxHashMap<PatternKey, DynamicCallPattern> = {
let mut patterns = FxHashMap::default();
add_dynamic_call_patterns(&mut patterns);
// @fb-only: meta_only::add_dynamic_call_patterns(&mut patterns);
patterns
};
static ref MODULE_ARGUMENT_PATTERNS: FxHashMap<PatternKey, ModuleArgPattern> = {
let mut patterns = FxHashMap::default();
add_module_argument_patterns(&mut patterns);
// @fb-only: meta_only::add_module_argument_patterns(&mut patterns);
patterns
};
/// Combined patterns for module argument positions.
/// Merges dynamic call patterns (that have module_arg_index) with simple module argument patterns.
/// Used by rename operations where we only care about the module argument position.
static ref COMBINED_MODULE_ARG_PATTERNS: FxHashMap<PatternKey, ModuleArgPattern> = {
let mut patterns: FxHashMap<PatternKey, ModuleArgPattern> = FxHashMap::default();
// Add module_arg_index from dynamic call patterns (where present)
for (key, pattern) in DYNAMIC_CALL_PATTERNS.iter() {
if let Some(module_idx) = pattern.module_arg_index {
patterns.insert(*key, ModuleArgPattern::atom(module_idx));
}
}
// Add from simple module argument patterns
for (key, module_arg_pattern) in MODULE_ARGUMENT_PATTERNS.iter() {
patterns.insert(*key, *module_arg_pattern);
}
// @fb-only
patterns
};
}
@ -1070,10 +899,6 @@ fn get_dynamic_call_patterns() -> &'static FxHashMap<PatternKey, DynamicCallPatt
&DYNAMIC_CALL_PATTERNS
}
pub fn get_module_arg_patterns() -> &'static FxHashMap<PatternKey, ModuleArgPattern> {
&COMBINED_MODULE_ARG_PATTERNS
}
fn look_for_dynamic_call(
sema: &Semantic,
file_id: FileId,

View file

@ -17,7 +17,7 @@ use elp_syntax::TextRange;
use fxhash::FxHashMap;
use fxhash::FxHashSet;
// @fb-only: use crate::meta_only;
// @fb-only
use crate::runnables::Runnable;
use crate::runnables::runnables;
@ -57,11 +57,11 @@ pub struct Link {
}
#[rustfmt::skip]
// @fb-only: pub(crate) fn annotations(db: &RootDatabase, file_id: FileId) -> Vec<Annotation> {
// @fb-only
pub(crate) fn annotations(_db: &RootDatabase, _file_id: FileId) -> Vec<Annotation> { // @oss-only
// @fb-only: let mut annotations = Vec::default();
// @fb-only
let annotations = Vec::default(); // @oss-only
// @fb-only: meta_only::annotations(db, file_id, &mut annotations);
// @fb-only
annotations
}

View file

@ -573,8 +573,8 @@ pub(crate) fn find_call_in_function<CallCtx, MakeCtx, Res>(
};
if let Some(extra) = check_call(context) {
// Got one.
let call_expr_id = if let Some((hir_idx, _macro_def)) = ctx.in_macro {
hir_idx.idx
let call_expr_id = if let Some(expr_id) = ctx.in_macro {
expr_id.idx
} else {
ctx.item_id
};

View file

@ -50,7 +50,6 @@ use elp_ide_db::text_edit::TextEdit;
use elp_ide_ssr::Match;
use elp_ide_ssr::SsrSearchScope;
use elp_ide_ssr::match_pattern;
use elp_project_model::AppName;
use elp_syntax::NodeOrToken;
use elp_syntax::Parse;
use elp_syntax::SourceFile;
@ -97,13 +96,13 @@ mod application_env;
mod atoms_exhaustion;
mod binary_string_to_sigil;
mod boolean_precedence;
mod bound_variable;
mod could_be_a_string_literal;
mod cross_node_eval;
mod debugging_function;
mod dependent_header;
mod deprecated_function;
mod duplicate_module;
mod edoc;
mod effect_free_statement;
mod equality_check_with_unnecessary_operator;
mod eqwalizer_assists;
@ -118,7 +117,7 @@ mod macro_precedence_suprise;
mod map_find_to_syntax;
mod map_insertion_to_syntax;
mod meck;
// @fb-only: mod meta_only;
// @fb-only
mod missing_compile_warn_missing_spec;
mod missing_module;
mod missing_separator;
@ -132,7 +131,6 @@ mod no_garbage_collect;
mod no_nowarn_suppressions;
mod no_size;
mod nonstandard_integer_formatting;
mod old_edoc_syntax;
mod record_tuple_match;
mod redundant_assignment;
mod replace_call;
@ -551,37 +549,12 @@ pub(crate) trait Linter {
}
}
fn should_process_app(
app_name: &Option<AppName>,
config: &DiagnosticsConfig,
diagnostic_code: &DiagnosticCode,
) -> bool {
let app = match app_name {
Some(app) => app.to_string(),
None => return true,
};
if let Some(lint_config) = config.lint_config.as_ref()
&& let Some(linter_config) = lint_config.linters.get(diagnostic_code)
&& let Some(ref excluded) = linter_config.exclude_apps
&& excluded.contains(&app)
{
return false;
}
true
}
fn should_run(
linter: &dyn Linter,
config: &DiagnosticsConfig,
app_name: &Option<AppName>,
is_generated: bool,
is_test: bool,
) -> bool {
if !should_process_app(app_name, config, &linter.id()) {
return false;
}
let is_enabled = if let Some(lint_config) = config.lint_config.as_ref() {
lint_config
.get_is_enabled_override(&linter.id())
@ -892,7 +865,6 @@ pub(crate) trait GenericLinter: Linter {
fn fixes(
&self,
_context: &Self::Context,
_range: TextRange,
_sema: &Semantic,
_file_id: FileId,
) -> Option<Vec<Assist>> {
@ -926,7 +898,7 @@ impl<T: GenericLinter> GenericDiagnostics for T {
if let Some(matches) = self.matches(sema, file_id) {
for matched in matches {
let message = self.match_description(&matched.context);
let fixes = self.fixes(&matched.context, matched.range, sema, file_id);
let fixes = self.fixes(&matched.context, sema, file_id);
let tag = self.tag(&matched.context);
let mut d = Diagnostic::new(self.id(), message, matched.range)
.with_fixes(fixes)
@ -1244,16 +1216,6 @@ impl LintConfig {
self.linters.get(diagnostic_code)?.experimental
}
/// Get the exclude_apps override for a linter based on its diagnostic code.
pub fn get_exclude_apps_override(
&self,
diagnostic_code: &DiagnosticCode,
) -> Option<Vec<String>> {
self.linters
.get(diagnostic_code)
.and_then(|c| c.exclude_apps.clone())
}
pub fn get_function_call_linter_config(
&self,
diagnostic_code: &DiagnosticCode,
@ -1377,7 +1339,6 @@ pub struct LinterConfig {
pub include_tests: Option<bool>,
pub include_generated: Option<bool>,
pub experimental: Option<bool>,
pub exclude_apps: Option<Vec<String>>,
#[serde(flatten)]
pub config: Option<LinterTraitConfig>,
}
@ -1398,7 +1359,6 @@ impl LinterConfig {
include_tests: other.include_tests.or(self.include_tests),
include_generated: other.include_generated.or(self.include_generated),
experimental: other.experimental.or(self.experimental),
exclude_apps: other.exclude_apps.or(self.exclude_apps),
config: merged_config,
}
}
@ -1564,7 +1524,7 @@ pub fn native_diagnostics(
config
.lints_from_config
.get_diagnostics(&mut res, &sema, file_id);
// @fb-only: meta_only::diagnostics(&mut res, &sema, file_id, file_kind, config);
// @fb-only
syntax_diagnostics(&sema, &parse, &mut res, file_id);
diagnostics_from_descriptors(
&mut res,
@ -1593,7 +1553,6 @@ pub fn native_diagnostics(
} else {
FxHashMap::default()
};
let app_name = db.file_app_name(file_id);
let metadata = db.elp_metadata(file_id);
// TODO: can we ever disable DiagnosticCode::SyntaxError?
// In which case we must check labeled_syntax_errors
@ -1602,7 +1561,6 @@ pub fn native_diagnostics(
&& (config.experimental && d.has_category(Category::Experimental)
|| !d.has_category(Category::Experimental))
&& !d.should_be_suppressed(&metadata, config)
&& should_process_app(&app_name, config, &d.code)
});
LabeledDiagnostics {
@ -1653,20 +1611,20 @@ pub fn diagnostics_from_descriptors(
.db
.is_test_suite_or_test_helper(file_id)
.unwrap_or(false);
let app_name = sema.db.file_app_name(file_id);
descriptors.iter().for_each(|descriptor| {
if descriptor.conditions.enabled(config, is_generated, is_test) {
let mut diags: Vec<Diagnostic> = Vec::default();
(descriptor.checker)(&mut diags, sema, file_id, file_kind);
for diag in diags {
// Check if this diagnostic is enabled (for default_disabled descriptors)
// and if the app is not excluded for this diagnostic code
let is_enabled =
!descriptor.conditions.default_disabled || config.enabled.contains(&diag.code);
let app_allowed = should_process_app(&app_name, config, &diag.code);
if is_enabled && app_allowed {
res.push(diag);
if descriptor.conditions.default_disabled {
// Filter the returned diagnostics to ensure they are
// enabled
let mut diags: Vec<Diagnostic> = Vec::default();
(descriptor.checker)(&mut diags, sema, file_id, file_kind);
for diag in diags {
if config.enabled.contains(&diag.code) {
res.push(diag);
}
}
} else {
(descriptor.checker)(res, sema, file_id, file_kind);
}
}
});
@ -1723,12 +1681,11 @@ const GENERIC_LINTERS: &[&dyn GenericDiagnostics] = &[
&duplicate_module::LINTER,
&no_nowarn_suppressions::LINTER,
&macro_precedence_suprise::LINTER,
&old_edoc_syntax::LINTER,
&edoc::LINTER,
&missing_module::LINTER,
&unused_include::LINTER,
&misspelled_attribute::LINTER,
&boolean_precedence::LINTER,
&bound_variable::LINTER,
];
/// Unified registry for all types of linters
@ -1757,7 +1714,7 @@ pub(crate) fn linters() -> Vec<DiagnosticLinter> {
);
// Add meta-only linters
// @fb-only: all_linters.extend(meta_only::linters());
// @fb-only
all_linters
}
@ -1774,12 +1731,11 @@ fn diagnostics_from_linters(
.db
.is_test_suite_or_test_helper(file_id)
.unwrap_or(false);
let app_name = sema.db.file_app_name(file_id);
for l in linters {
let linter = l.as_linter();
if linter.should_process_file_id(sema, file_id)
&& should_run(linter, config, &app_name, is_generated, is_test)
&& should_run(linter, config, is_generated, is_test)
{
let severity = if let Some(lint_config) = config.lint_config.as_ref() {
lint_config
@ -2341,14 +2297,11 @@ pub fn erlang_service_diagnostics(
diags
};
let app_name = db.file_app_name(file_id);
let metadata = db.elp_metadata(file_id);
let diags = diags
.into_iter()
.filter(|(_file_id, d)| {
!d.should_be_suppressed(&metadata, config)
&& !config.disabled.contains(&d.code)
&& should_process_app(&app_name, config, &d.code)
!d.should_be_suppressed(&metadata, config) && !config.disabled.contains(&d.code)
})
.map(|(file_id, d)| {
(
@ -2639,7 +2592,7 @@ pub fn ct_diagnostics(
CommonTestInfo::Result { all, groups } => {
let testcases = common_test::runnable_names(&sema, file_id, all, groups).ok();
common_test::unreachable_test(&mut res, &sema, file_id, &testcases);
// @fb-only: meta_only::ct_diagnostics(&mut res, &sema, file_id, testcases);
// @fb-only
}
CommonTestInfo::EvalError(_error) => {
// The error currently does not contain anything useful, so we ignore it
@ -3236,7 +3189,6 @@ mod tests {
fn syntax_error() {
check_diagnostics(
r#"
//- expect_parse_errors
-module(main).
foo() -> XX 3.0.
%% ^^ error: P1711: Syntax Error
@ -3414,7 +3366,6 @@ main(X) ->
#[test]
fn label_syntax_error_not_function() {
let fixture_str = r#"
//- expect_parse_errors
-module(main).
-record(person, {(name + XXX)}).
%% ^^^^^^^ error: P1711: Syntax Error
@ -3430,7 +3381,7 @@ main(X) ->
expect![[r#"
Some(
Range(
5..45,
24..56,
),
)
"#]]
@ -3589,7 +3540,6 @@ main(X) ->
config,
&extra_diags,
r#"
//- expect_parse_errors
-module(main).
-export([foo/0,bar/0]).
@ -3610,7 +3560,7 @@ main(X) ->
#[test]
fn group_related_diagnostics_elp_only() {
// Demonstrate that ELP does not pick up a syntax error in the
// spec, same code as in test/test_projects/diagnostics/app_a/src/syntax.erl
// spec, same code as in test_projects/diagnostics/app_a/src/syntax.erl
check_diagnostics(
r#"
-module(main).
@ -3627,7 +3577,6 @@ main(X) ->
check_diagnostics(
r#"
//- erlang_service
//- expect_parse_errors
//- /src/a_mod.erl app:app_a
-module(a_mod).
-export([foo/0]).
@ -3646,7 +3595,6 @@ main(X) ->
check_diagnostics(
r#"
//- erlang_service
//- expect_parse_errors
//- native
//- /src/a_mod.erl app:app_a
-module(a_mod).
@ -3725,7 +3673,6 @@ main(X) ->
fn test_nested_syntax_errors() {
check_diagnostics(
r#"
//- expect_parse_errors
-module(main).
run() ->
ExitCode =
@ -4039,7 +3986,6 @@ main(X) ->
include_tests: None,
include_generated: None,
experimental: None,
exclude_apps: None,
config: None,
},
);
@ -4082,7 +4028,6 @@ main(X) ->
include_tests: Some(true),
include_generated: None,
experimental: None,
exclude_apps: None,
config: None,
},
);
@ -4124,7 +4069,6 @@ main(X) ->
include_tests: None,
include_generated: Some(true),
experimental: None,
exclude_apps: None,
config: None,
},
);
@ -4167,7 +4111,6 @@ main(X) ->
include_tests: None,
include_generated: None,
experimental: Some(true),
exclude_apps: None,
config: None,
},
);
@ -4212,7 +4155,6 @@ main(X) ->
include_tests: None,
include_generated: None,
experimental: None,
exclude_apps: None,
config: None,
},
);
@ -4243,47 +4185,6 @@ main(X) ->
);
}
#[test]
fn test_linter_exclude_apps_override() {
let mut lint_config = LintConfig::default();
lint_config.linters.insert(
DiagnosticCode::NoGarbageCollect,
LinterConfig {
is_enabled: Some(false),
severity: None,
include_tests: None,
include_generated: None,
experimental: None,
exclude_apps: Some(vec!["my_app".to_string()]),
config: None,
},
);
let config = DiagnosticsConfig::default()
.configure_diagnostics(
&lint_config,
&Some("no_garbage_collect".to_string()),
&None,
FallBackToAll::No,
)
.unwrap();
check_diagnostics_with_config(
config,
r#"
//- /src/main.erl app:my_app
-module(main).
-export([warning/0]).
warning() ->
erlang:garbage_collect().
//- /opt/lib/stdlib-3.17/src/erlang.erl otp_app:/opt/lib/stdlib-3.17
-module(erlang).
-export([garbage_collect/0]).
garbage_collect() -> ok.
"#,
);
}
#[test]
fn no_unused_macro_in_macro_rhs_for_function_name() {
let config = DiagnosticsConfig::default()
@ -4331,7 +4232,6 @@ main(X) ->
include_tests: None,
include_generated: None,
experimental: None,
exclude_apps: None,
config: Some(LinterTraitConfig::FunctionCallLinterConfig(
FunctionCallLinterConfig {
include: Some(vec![FunctionMatch::mf("mod_a", "func_a")]),
@ -4364,7 +4264,6 @@ main(X) ->
include_tests: Some(true),
include_generated: None,
experimental: None,
exclude_apps: None,
config: Some(LinterTraitConfig::FunctionCallLinterConfig(
FunctionCallLinterConfig {
include: Some(vec![FunctionMatch::mf("mod_b", "func_b")]),
@ -4382,7 +4281,6 @@ main(X) ->
include_tests: None,
include_generated: Some(true),
experimental: None,
exclude_apps: None,
config: None,
},
);

View file

@ -28,7 +28,7 @@ use crate::codemod_helpers::CheckCallCtx;
use crate::codemod_helpers::FunctionMatch;
use crate::codemod_helpers::MatchCtx;
use crate::codemod_helpers::find_call_in_function;
// @fb-only: use crate::diagnostics;
// @fb-only
use crate::diagnostics::DiagnosticCode;
use crate::diagnostics::Severity;
@ -36,7 +36,7 @@ pub(crate) static DESCRIPTOR: DiagnosticDescriptor = DiagnosticDescriptor {
conditions: DiagnosticConditions {
experimental: false,
include_generated: true,
include_tests: false,
include_tests: true,
default_disabled: false,
},
checker: &|diags, sema, file_id, _ext| {
@ -108,7 +108,7 @@ fn check_function(diags: &mut Vec<Diagnostic>, sema: &Semantic, def: &FunctionDe
vec![2, 3],
BadEnvCallAction::AppArg(0),
),
// @fb-only: diagnostics::meta_only::application_env_bad_matches(),
// @fb-only
]
.into_iter()
.flatten()

View file

@ -13,7 +13,7 @@ use hir::Semantic;
use crate::FunctionMatch;
use crate::codemod_helpers::CheckCallCtx;
// @fb-only: use crate::diagnostics;
// @fb-only
use crate::diagnostics::DiagnosticCode;
use crate::diagnostics::FunctionCallLinter;
use crate::diagnostics::Linter;
@ -35,9 +35,9 @@ impl Linter for AtomsExhaustionLinter {
false
}
#[rustfmt::skip]
// @fb-only: fn should_process_file_id(&self, sema: &Semantic, file_id: FileId) -> bool {
// @fb-only
fn should_process_file_id(&self, _sema: &Semantic, _file_id: FileId) -> bool { // @oss-only
// @fb-only: diagnostics::meta_only::is_relevant_file(sema.db.upcast(), file_id)
// @fb-only
true // @oss-only
}
}
@ -56,16 +56,16 @@ impl FunctionCallLinter for AtomsExhaustionLinter {
// FunctionMatch::mfa("erlang", "binary_to_term", 2),
]
.into_iter()
// @fb-only: .chain(diagnostics::meta_only::atoms_exhaustion_matches().into_iter())
// @fb-only
.collect::<Vec<_>>()
]
}
fn check_match(&self, context: &CheckCallCtx<'_, ()>) -> Option<Self::Context> {
#[rustfmt::skip]
// @fb-only: let sema = context.in_clause.sema;
// @fb-only: let is_safe =
// @fb-only: diagnostics::meta_only::atoms_exhaustion_is_safe(sema, context.in_clause, context.parents);
// @fb-only
// @fb-only
// @fb-only
let is_safe = false; // @oss-only
if !is_safe {
match context.args.as_slice() {

View file

@ -66,6 +66,7 @@ impl Linter for BooleanPrecedenceLinter {
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Context {
range: TextRange,
preceding_ws_range: TextRange,
op: Op,
lhs_complex: bool,
@ -100,7 +101,6 @@ impl GenericLinter for BooleanPrecedenceLinter {
fn fixes(
&self,
context: &Self::Context,
range: TextRange,
_sema: &Semantic,
file_id: FileId,
) -> Option<Vec<Assist>> {
@ -109,36 +109,36 @@ impl GenericLinter for BooleanPrecedenceLinter {
// Add "replace with preferred operator" fix
let assist_message = format!("Replace '{}' with '{}'", context.op, context.op.preferred());
let edit = TextEdit::replace(
context.op.range(range, context.preceding_ws_range),
context.op.range(context.range, context.preceding_ws_range),
context.op.preferred().to_string(),
);
fixes.push(fix(
"replace_boolean_operator",
&assist_message,
SourceChange::from_text_edit(file_id, edit),
range,
context.range,
));
// Add "add parens" fixes if applicable
if context.lhs_complex {
fixes.push(parens_fix("LHS", file_id, context, range));
fixes.push(parens_fix("LHS", file_id, context));
}
if context.rhs_complex {
fixes.push(parens_fix("RHS", file_id, context, range));
fixes.push(parens_fix("RHS", file_id, context));
}
Some(fixes)
}
}
fn parens_fix(side: &str, file_id: FileId, context: &Context, range: TextRange) -> Assist {
fn parens_fix(side: &str, file_id: FileId, context: &Context) -> Assist {
let assist_message = format!("Add parens to {side}");
let edit = add_parens_edit(&context.add_parens_range);
fix(
"replace_boolean_operator_add_parens",
&assist_message,
SourceChange::from_text_edit(file_id, edit),
range,
context.range,
)
}
@ -231,6 +231,7 @@ fn collect_match(
matches.push(GenericLinterMatchContext {
range,
context: Context {
range,
preceding_ws_range,
op: binop,
lhs_complex,

View file

@ -1,178 +0,0 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is dual-licensed under either the MIT license found in the
* LICENSE-MIT file in the root directory of this source tree or the Apache
* License, Version 2.0 found in the LICENSE-APACHE file in the root directory
* of this source tree. You may select, at your option, one of the
* above-listed licenses.
*/
// Diagnostic: bound_variable
//
// Return a warning if the LHS of a match already contains a bound variable.
//
use elp_ide_db::elp_base_db::FileId;
use hir::AnyExpr;
use hir::Expr;
use hir::Semantic;
use hir::Strategy;
use hir::fold::MacroStrategy;
use hir::fold::ParenStrategy;
use crate::diagnostics::DiagnosticCode;
use crate::diagnostics::GenericLinter;
use crate::diagnostics::GenericLinterMatchContext;
use crate::diagnostics::Linter;
pub(crate) struct BoundVariableLinter;
impl Linter for BoundVariableLinter {
fn id(&self) -> DiagnosticCode {
DiagnosticCode::BoundVarInLhs
}
fn description(&self) -> &'static str {
"Match on a bound variable"
}
}
impl GenericLinter for BoundVariableLinter {
type Context = ();
fn matches(
&self,
sema: &Semantic,
file_id: FileId,
) -> Option<Vec<GenericLinterMatchContext<Self::Context>>> {
let bound_vars_by_function = sema.bound_vars_by_function(file_id);
let mut res = Vec::new();
sema.def_map(file_id)
.get_function_clauses()
.for_each(|(_, def)| {
if def.file.file_id == file_id
&& let Some(bound_vars) = bound_vars_by_function.get(&def.function_clause_id)
{
let in_clause = def.in_clause(sema, def);
in_clause.fold_clause(
Strategy {
macros: MacroStrategy::ExpandButIncludeMacroCall,
parens: ParenStrategy::InvisibleParens,
},
(),
&mut |acc, ctx| {
if let AnyExpr::Expr(Expr::Match { lhs, rhs: _ }) = ctx.item
&& bound_vars.contains(&lhs)
&& let Some(range) = in_clause.range_for_pat(lhs)
&& range.file_id == def.file.file_id
&& ctx.in_macro.is_none()
{
res.push(GenericLinterMatchContext {
range: range.range,
context: (),
});
};
acc
},
);
}
});
Some(res)
}
}
pub static LINTER: BoundVariableLinter = BoundVariableLinter;
#[cfg(test)]
mod test {
use elp_ide_db::DiagnosticCode;
use expect_test::Expect;
use crate::diagnostics::DiagnosticsConfig;
use crate::tests::check_diagnostics_with_config;
use crate::tests::check_fix_with_config;
#[track_caller]
pub(crate) fn check_diagnostics(fixture: &str) {
let config = DiagnosticsConfig::default().disable(DiagnosticCode::UndefinedFunction);
check_diagnostics_with_config(config, fixture)
}
#[track_caller]
pub(crate) fn check_fix(fixture_before: &str, fixture_after: Expect) {
let config = DiagnosticsConfig::default().disable(DiagnosticCode::UndefinedFunction);
check_fix_with_config(config, fixture_before, fixture_after)
}
#[test]
fn bound_variable() {
check_diagnostics(
r#"
//- /src/bound.erl
-module(bound).
foo() ->
AA = bar(),
AA = bar().
%% ^^ 💡 warning: W0060: Match on a bound variable
"#,
)
}
#[test]
fn bound_variable_not_reported_in_case() {
check_diagnostics(
r#"
//- /src/bound.erl
-module(bound).
foo(Val) ->
case Val of
undefined -> ok;
Val when is_list(Val) -> ok
end.
"#,
)
}
#[test]
fn bound_variable_not_reported_in_macro() {
check_diagnostics(
r#"
//- /src/bound.erl
-module(bound).
-include("inc.hrl").
foo(Val) ->
?A_MACRO(Val).
//- /src/inc.hrl
-define(A_MACRO(X), X=X).
"#,
)
}
#[test]
fn bound_variable_ignore_fix() {
check_fix(
r#"
//- /src/bound.erl
-module(bound).
foo() ->
AA = bar(),
A~A = bar().
"#,
expect_test::expect![[r#"
-module(bound).
foo() ->
AA = bar(),
% elp:ignore W0060 (bound_var_in_lhs)
AA = bar().
"#]],
)
}
}

View file

@ -22,7 +22,7 @@ use crate::diagnostics::DiagnosticCode;
use crate::diagnostics::FunctionCallLinter;
use crate::diagnostics::Linter;
use crate::diagnostics::Severity;
// @fb-only: use crate::diagnostics::meta_only;
// @fb-only
use crate::lazy_function_matches;
pub(crate) struct NoDebuggingFunctionLinter;
@ -52,7 +52,7 @@ impl FunctionCallLinter for NoDebuggingFunctionLinter {
lazy_function_matches![
vec![FunctionMatch::m("redbug")]
.into_iter()
// @fb-only: .chain(meta_only::debugging_function_matches().into_iter())
// @fb-only
.collect::<Vec<_>>()
]
}

View file

@ -41,7 +41,7 @@ use super::DiagnosticDescriptor;
use super::Severity;
use crate::codemod_helpers::FunctionMatch;
use crate::codemod_helpers::FunctionMatcher;
// @fb-only: use crate::diagnostics;
// @fb-only
use crate::fix;
pub(crate) static DESCRIPTOR: DiagnosticDescriptor = DiagnosticDescriptor {
@ -88,7 +88,7 @@ fn deprecated_function(diagnostics: &mut Vec<Diagnostic>, sema: &Semantic, file_
lazy_static! {
static ref DEPRECATED_FUNCTIONS: Vec<(FunctionMatch, DeprecationDetails)> = {
let matches: Vec<Vec<(FunctionMatch, DeprecationDetails)>> = vec![
// @fb-only: diagnostics::meta_only::deprecated_function_matches(),
// @fb-only
];
matches.into_iter()
.flatten()
@ -134,8 +134,8 @@ fn check_function(
);
let details = match_result.map(|(_match, details)| details.clone());
if target_def.deprecated || match_result.is_some() {
let expr_id = if let Some((hir_idx, _macro_def)) = ctx.in_macro {
hir_idx.idx
let expr_id = if let Some(expr_id) = ctx.in_macro {
expr_id.idx
} else {
ctx.item_id
};

View file

@ -8,7 +8,7 @@
* above-listed licenses.
*/
// Diagnostic: old_edoc_syntax
// Diagnostic: edoc
use elp_ide_assists::Assist;
use elp_ide_assists::helpers;
@ -31,10 +31,11 @@ use super::DiagnosticCode;
use super::GenericLinter;
use super::GenericLinterMatchContext;
use super::Linter;
use super::Severity;
pub(crate) struct OldEdocSyntaxLinter;
pub(crate) struct EdocLinter;
impl Linter for OldEdocSyntaxLinter {
impl Linter for EdocLinter {
fn id(&self) -> DiagnosticCode {
DiagnosticCode::OldEdocSyntax
}
@ -43,8 +44,11 @@ impl Linter for OldEdocSyntaxLinter {
"EDoc style comments are deprecated. Please use Markdown instead."
}
fn should_process_test_files(&self) -> bool {
false
fn severity(&self, sema: &Semantic, file_id: FileId) -> Severity {
match sema.db.is_test_suite_or_test_helper(file_id) {
Some(true) => Severity::WeakWarning,
_ => Severity::Warning,
}
}
}
@ -52,9 +56,10 @@ impl Linter for OldEdocSyntaxLinter {
pub struct Context {
header_ptr: Option<InFileAstPtr<ast::Form>>,
doc_start: TextSize,
range: TextRange,
}
impl GenericLinter for OldEdocSyntaxLinter {
impl GenericLinter for EdocLinter {
type Context = Context;
fn matches(
@ -72,6 +77,7 @@ impl GenericLinter for OldEdocSyntaxLinter {
context: Context {
header_ptr: Some(*header_ptr),
doc_start,
range: doc.range,
},
});
}
@ -82,6 +88,7 @@ impl GenericLinter for OldEdocSyntaxLinter {
context: Context {
header_ptr: Some(*header_ptr),
doc_start,
range: equiv.range,
},
});
}
@ -92,6 +99,7 @@ impl GenericLinter for OldEdocSyntaxLinter {
context: Context {
header_ptr: Some(*header_ptr),
doc_start,
range: deprecated.range,
},
});
}
@ -103,6 +111,7 @@ impl GenericLinter for OldEdocSyntaxLinter {
context: Context {
header_ptr: Some(*header_ptr),
doc_start,
range: hidden.range,
},
});
}
@ -114,7 +123,6 @@ impl GenericLinter for OldEdocSyntaxLinter {
fn fixes(
&self,
context: &Self::Context,
range: TextRange,
sema: &Semantic,
file_id: FileId,
) -> Option<Vec<Assist>> {
@ -126,12 +134,12 @@ impl GenericLinter for OldEdocSyntaxLinter {
file_id,
header,
context.doc_start,
range,
context.range,
)])
}
}
pub static LINTER: OldEdocSyntaxLinter = OldEdocSyntaxLinter;
pub static LINTER: EdocLinter = EdocLinter;
fn old_edoc_syntax_fix(
sema: &Semantic,
@ -294,6 +302,22 @@ mod tests {
)
}
#[test]
fn test_function_doc_in_test_file() {
check_diagnostics(
r#"
//- /test/main_SUITE.erl extra:test
-module(main_SUITE).
%% @doc This is the main function documentation.
%% ^^^^ 💡 weak: W0038: EDoc style comments are deprecated. Please use Markdown instead.
main() ->
dep().
dep() -> ok.
"#,
)
}
#[test]
fn test_function_doc_different_arities() {
check_diagnostics(

View file

@ -35,7 +35,9 @@ use crate::diagnostics::Linter;
use crate::fix;
#[derive(Debug, Default, Clone, PartialEq)]
pub(crate) struct MacroPrecedenceContext;
pub(crate) struct MacroPrecedenceContext {
range: TextRange,
}
pub(crate) struct MacroPrecedenceSupriseLinter;
@ -94,9 +96,10 @@ impl GenericLinter for MacroPrecedenceSupriseLinter {
{
let range = ast.range();
if range.file_id == file_id {
let context = MacroPrecedenceContext { range: range.range };
res.push(GenericLinterMatchContext {
range: range.range,
context: MacroPrecedenceContext,
context,
});
}
}
@ -110,17 +113,16 @@ impl GenericLinter for MacroPrecedenceSupriseLinter {
fn fixes(
&self,
_context: &Self::Context,
range: TextRange,
context: &Self::Context,
_sema: &Semantic,
file_id: FileId,
) -> Option<Vec<Assist>> {
let edit = add_parens_edit(&range);
let edit = add_parens_edit(&context.range);
let fix = fix(
"macro_precedence_add_parens",
"Add parens to macro call",
SourceChange::from_text_edit(file_id, edit),
range,
context.range,
);
Some(vec![fix])
}

View file

@ -75,6 +75,7 @@ impl Linter for MissingCompileWarnMissingSpec {
pub struct Context {
found: Found,
compile_option_id: Option<CompileOptionId>,
target_range: TextRange,
}
impl GenericLinter for MissingCompileWarnMissingSpec {
@ -93,6 +94,7 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
context: Context {
found: Found::No,
compile_option_id: None,
target_range: DIAGNOSTIC_WHOLE_FILE_RANGE,
},
});
}
@ -147,6 +149,7 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
context: Context {
found: what.0,
compile_option_id: what.1,
target_range: range,
},
});
}
@ -157,7 +160,6 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
fn fixes(
&self,
context: &Self::Context,
range: TextRange,
sema: &Semantic,
file_id: FileId,
) -> Option<Vec<elp_ide_assists::Assist>> {
@ -182,7 +184,7 @@ impl GenericLinter for MissingCompileWarnMissingSpec {
"add_warn_missing_spec_all",
"Add compile option 'warn_missing_spec_all'",
edit,
range,
context.target_range,
)])
}
}

View file

@ -55,6 +55,7 @@ impl Linter for MisspelledAttributeLinter {
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Context {
range: TextRange,
attr_name: String,
suggested_rename: String,
}
@ -87,6 +88,7 @@ impl GenericLinter for MisspelledAttributeLinter {
res.push(GenericLinterMatchContext {
range: attr_name_range,
context: Context {
range: attr_name_range,
attr_name: attr.name.to_string(),
suggested_rename: suggested_rename.to_string(),
},
@ -108,17 +110,16 @@ impl GenericLinter for MisspelledAttributeLinter {
fn fixes(
&self,
context: &Self::Context,
range: TextRange,
_sema: &Semantic,
file_id: FileId,
) -> Option<Vec<Assist>> {
let edit = TextEdit::replace(range, context.suggested_rename.clone());
let edit = TextEdit::replace(context.range, context.suggested_rename.clone());
let msg = format!("Change to '{}'", context.suggested_rename);
Some(vec![fix(
"fix_misspelled_attribute",
&msg,
SourceChange::from_text_edit(file_id, edit),
range,
context.range,
)])
}
}

View file

@ -27,8 +27,12 @@
//
use elp_ide_db::elp_base_db::FileId;
use fxhash::FxHashMap;
use fxhash::FxHashSet;
use hir::AnyExpr;
use hir::Expr;
use hir::FunctionClauseId;
use hir::PatId;
use hir::Semantic;
use hir::Strategy;
use hir::fold::MacroStrategy;
@ -56,7 +60,21 @@ fn mutable_variable_bug(
sema: &Semantic,
file_id: FileId,
) -> Option<()> {
let bound_vars_by_function = sema.bound_vars_by_function(file_id);
let mut bound_vars_by_function: FxHashMap<FunctionClauseId, FxHashSet<&PatId>> =
FxHashMap::default();
let bound_vars = sema.bound_vars_in_pattern_diagnostic(file_id);
bound_vars.iter().for_each(|(function_id, pat_id, _var)| {
bound_vars_by_function
.entry(function_id.value)
.and_modify(|vars| {
vars.insert(pat_id);
})
.or_insert_with(|| {
let mut vars = FxHashSet::default();
vars.insert(pat_id);
vars
});
});
sema.def_map(file_id)
.get_function_clauses()
.for_each(|(_, def)| {

View file

@ -29,9 +29,6 @@ impl Linter for NoErrorLoggerLinter {
fn severity(&self, _sema: &Semantic, _file_id: FileId) -> Severity {
Severity::Error
}
fn should_process_test_files(&self) -> bool {
false
}
}
impl FunctionCallLinter for NoErrorLoggerLinter {

View file

@ -43,13 +43,6 @@ impl Linter for UndefinedFunctionLinter {
fn should_process_generated_files(&self) -> bool {
true
}
// Ideally, we would like to report undefined functions in all files, but
// there are too many false positives in test files to do so.
// This is often due to mocked modules and test suite cleverness.
// We can revisit this decision in the future. See T249044930.
fn should_process_test_files(&self) -> bool {
false
}
}
impl FunctionCallLinter for UndefinedFunctionLinter {

View file

@ -48,7 +48,9 @@ impl Linter for UndocumentedModuleLinter {
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct Context;
pub struct Context {
module_name_range: TextRange,
}
impl GenericLinter for UndocumentedModuleLinter {
type Context = Context;
@ -69,21 +71,16 @@ impl GenericLinter for UndocumentedModuleLinter {
if module_has_no_docs {
let module_name = module_attribute.name()?;
let module_name_range = module_name.syntax().text_range();
let context = Context { module_name_range };
res.push(GenericLinterMatchContext {
range: module_name_range,
context: Context,
context,
});
}
Some(res)
}
fn fixes(
&self,
_context: &Context,
range: TextRange,
sema: &Semantic,
file_id: FileId,
) -> Option<Vec<Assist>> {
fn fixes(&self, context: &Context, sema: &Semantic, file_id: FileId) -> Option<Vec<Assist>> {
let insert_offset = helpers::moduledoc_insert_offset(sema, file_id)?;
let mut builder = SourceChangeBuilder::new(file_id);
builder.insert(insert_offset, "-moduledoc false.\n");
@ -92,7 +89,7 @@ impl GenericLinter for UndocumentedModuleLinter {
"add_moduledoc_false",
"Add `-moduledoc false.` attribute",
source_change,
range,
context.module_name_range,
);
Some(vec![fix])
}

View file

@ -27,7 +27,7 @@ use crate::codemod_helpers::CheckCallCtx;
use crate::codemod_helpers::MatchCtx;
use crate::diagnostics::FunctionCallLinter;
use crate::diagnostics::Linter;
// @fb-only: use crate::diagnostics::meta_only;
// @fb-only
use crate::fix;
use crate::lazy_function_matches;
@ -45,9 +45,9 @@ impl Linter for UnexportedFunctionLinter {
}
#[rustfmt::skip]
fn should_process_file_id(&self, _sema: &Semantic, _file_id: FileId) -> bool { // @oss-only
// @fb-only: fn should_process_file_id(&self, sema: &Semantic, file_id: FileId) -> bool {
// @fb-only
true // @oss-only
// @fb-only: meta_only::should_check_for_unexported(sema, file_id)
// @fb-only
}
}

View file

@ -152,7 +152,7 @@ fn replace_include_path(
#[cfg(test)]
mod tests {
use elp_ide_db::DiagnosticCode;
// @fb-only: use elp_ide_db::meta_only::MetaOnlyDiagnosticCode;
// @fb-only
use expect_test::Expect;
use expect_test::expect;
@ -173,7 +173,7 @@ mod tests {
#[track_caller]
fn check_fix(fixture_before: &str, fixture_after: Expect) {
let config = DiagnosticsConfig::default()
// @fb-only: .disable(DiagnosticCode::MetaOnly(MetaOnlyDiagnosticCode::MalformedInclude))
// @fb-only
.disable(DiagnosticCode::UnusedInclude);
tests::check_fix_with_config(config, fixture_before, fixture_after)
}

View file

@ -137,7 +137,6 @@ impl GenericLinter for UnusedIncludeLinter {
fn fixes(
&self,
context: &Self::Context,
_range: TextRange,
_sema: &Semantic,
file_id: FileId,
) -> Option<Vec<Assist>> {

View file

@ -88,13 +88,7 @@ impl GenericLinter for UnusedMacroLinter {
Some(DiagnosticTag::Unused)
}
fn fixes(
&self,
context: &Context,
_range: TextRange,
_sema: &Semantic,
file_id: FileId,
) -> Option<Vec<Assist>> {
fn fixes(&self, context: &Context, _sema: &Semantic, file_id: FileId) -> Option<Vec<Assist>> {
Some(vec![delete_unused_macro(
file_id,
context.delete_range,

View file

@ -415,7 +415,6 @@ mod tests {
config,
&extra_diags,
r#"
//- expect_parse_errors
-module(main).
-export([foo/0,bar/0]).

View file

@ -15,9 +15,9 @@ use elp_syntax::AstNode;
use hir::InFile;
use hir::Semantic;
// @fb-only: use crate::meta_only::exdoc_links;
// @fb-only
// @fb-only: mod meta_only;
// @fb-only
mod otp_links;
#[derive(Debug, Clone, PartialEq, Eq)]
@ -40,10 +40,10 @@ pub(crate) fn external_docs(db: &RootDatabase, position: &FilePosition) -> Optio
if let Some(class) = SymbolClass::classify(&sema, in_file_token.clone()) {
class.iter().for_each(|def| {
otp_links::links(&mut doc_links, &sema, &def);
// @fb-only: exdoc_links::links(&mut doc_links, &sema, &def);
// @fb-only
});
}
// @fb-only: meta_only::links(&mut doc_links, node, position);
// @fb-only
Some(doc_links)
}

View file

@ -83,7 +83,7 @@ mod tests {
check(
r#"
-module(foo).
bar() -> ?L~INE.
-bar() -> ?L~INE.
"#,
expect![[r#"
LINE
@ -97,7 +97,7 @@ bar() -> ?L~INE.
check(
r#"
-module(foo).
bar() -> ?F~ILE.
-bar() -> ?F~ILE.
"#,
expect![[r#"
FILE
@ -420,7 +420,7 @@ baz() ->
maps:get(type, ExpectedQr, missing_expected_type);
_ ->
Type
end
end,
).
baz() ->
?asser~tQrs(AliceWID, ?WA_QR_TYPE_MESSAGE, []),

View file

@ -259,7 +259,6 @@ main() ->
fn param_hints_variables_missing_param() {
check_params(
r#"
//- expect_parse_errors
-module(main).~
-compile(export_all).
sum(A, B) -> A + B.

View file

@ -110,7 +110,7 @@ pub mod diagnostics;
pub mod diagnostics_collection;
pub mod diff;
mod highlight_related;
// @fb-only: pub mod meta_only;
// @fb-only
pub use annotations::Annotation;
pub use annotations::AnnotationKind;
@ -251,9 +251,9 @@ impl Analysis {
})
}
pub fn should_eqwalize(&self, file_id: FileId) -> Cancellable<bool> {
pub fn should_eqwalize(&self, file_id: FileId, include_tests: bool) -> Cancellable<bool> {
let is_in_app = self.file_app_type(file_id).ok() == Some(Some(AppType::App));
Ok(is_in_app && self.is_eqwalizer_enabled(file_id)?)
Ok(is_in_app && self.is_eqwalizer_enabled(file_id, include_tests)?)
}
/// Computes the set of eqwalizer diagnostics for the given files,
@ -383,8 +383,8 @@ impl Analysis {
/// - the app (the module belongs to) has `.eqwalizer` marker in the roof
/// - or the module has `-typing([eqwalizer]).` pragma
/// - or the whole project has `enable_all=true` in its `.elp.toml` file
pub fn is_eqwalizer_enabled(&self, file_id: FileId) -> Cancellable<bool> {
self.with_db(|db| db.is_eqwalizer_enabled(file_id))
pub fn is_eqwalizer_enabled(&self, file_id: FileId, include_tests: bool) -> Cancellable<bool> {
self.with_db(|db| db.is_eqwalizer_enabled(file_id, include_tests))
}
/// ETF for the module's abstract forms

View file

@ -194,53 +194,35 @@ pub fn rename_var(
#[cfg(test)]
pub(crate) mod tests {
use elp_ide_db::RootDatabase;
use elp_ide_db::elp_base_db::AnchoredPathBuf;
use elp_ide_db::elp_base_db::FileId;
use elp_ide_db::elp_base_db::VfsPath;
use elp_ide_db::elp_base_db::assert_eq_text;
use elp_ide_db::elp_base_db::fixture::ChangeFixture;
use elp_ide_db::elp_base_db::fixture::WithFixture as _;
use elp_ide_db::source_change::FileSystemEdit;
use elp_ide_db::text_edit::TextEdit;
use elp_project_model::test_fixture::trim_indent;
use elp_syntax::AstNode;
use elp_syntax::algo;
use elp_syntax::ast;
use fxhash::FxHashSet;
use hir::AnyExprId;
use hir::InFile;
use hir::Semantic;
use super::rename_var;
use crate::AnalysisHost;
use crate::fixture;
#[track_caller]
pub(crate) fn check_rename(new_name: &str, fixture_before: &str, fixture_after_str: &str) {
let fixture_after_str = &trim_indent(fixture_after_str);
let analysis_after = fixture::multi_file(fixture_after_str);
let (db_before, fixture) = RootDatabase::with_fixture(fixture_before);
let host_before = AnalysisHost { db: db_before };
let analysis = host_before.analysis();
let position = fixture.position();
let (db_after, fixture_after) = RootDatabase::with_fixture(fixture_after_str);
let host_after = AnalysisHost { db: db_after };
let analysis_after = host_after.analysis();
let (analysis, position, _) = fixture::position(fixture_before);
let rename_result = analysis
.rename(position, new_name)
.unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}"));
match rename_result {
Ok(source_change) => {
let mut file_ids: FxHashSet<FileId> = FxHashSet::default();
for edit in source_change.source_file_edits {
let mut text_edit_builder = TextEdit::builder();
let file_id = edit.0;
// New and old file_id are the same
file_ids.insert(file_id);
for indel in edit.1.into_iter() {
text_edit_builder.replace(indel.delete, indel.insert);
}
@ -250,82 +232,6 @@ pub(crate) mod tests {
let expected = analysis_after.file_text(file_id).unwrap().to_string();
assert_eq_text!(&*expected, &*result);
}
for op in source_change.file_system_edits {
let expected;
let new_file_id;
match op {
FileSystemEdit::CreateFile {
dst,
initial_contents,
} => {
let new_file =
find_new_file_id(&fixture_after, &dst).unwrap_or_else(|| {
panic!(
"Fixture after:could not find file created as '{}'",
&dst.path
)
});
new_file_id = *new_file.1;
expected = initial_contents;
let actual = analysis_after.file_text(new_file_id).unwrap().to_string();
assert_eq_text!(&*expected, &*actual);
}
FileSystemEdit::MoveFile { src: _, dst } => {
let new_file =
find_new_file_id(&fixture_after, &dst).unwrap_or_else(|| {
panic!(
"Fixture after:could not find file renamed to '{}'",
&dst.path
)
});
new_file_id = *new_file.1;
// We simply record the new file id for checking in `fixture_after``.
// The expected value will be updated by the new_file_edits below,
// and the result asserted there
}
}
file_ids.insert(new_file_id);
}
for (dst, op) in source_change.new_file_edits {
// When renaming a module, we move the original file, then apply fixup edits
// to the new file
let anchored_dst = AnchoredPathBuf {
anchor: dst.anchor,
path: dst.path,
};
let new_file =
find_new_file_id(&fixture_after, &anchored_dst).unwrap_or_else(|| {
panic!(
"Fixture after:could not find file created as '{}'",
&anchored_dst.path
)
});
let mut text_edit_builder = TextEdit::builder();
let file_id = *new_file.1;
// New and old file_id are the same
file_ids.insert(file_id);
for indel in op.iter() {
text_edit_builder.replace(indel.delete, indel.insert.to_string());
}
let mut result = analysis.file_text(file_id).unwrap().to_string();
let edit = text_edit_builder.finish();
edit.apply(&mut result);
let expected = analysis_after.file_text(file_id).unwrap().to_string();
assert_eq_text!(&*expected, &*result);
}
// Check the balance of the expectations in the new fixture.
for file_id in &fixture_after.files {
if !file_ids.contains(file_id) {
let actual = analysis_after.file_text(*file_id).unwrap().to_string();
let expected = if fixture.files.contains(file_id) {
analysis.file_text(*file_id).unwrap().to_string()
} else {
format!("File {:?} not present in original fixture", file_id)
};
assert_eq_text!(&*expected, &*actual);
}
}
}
Err(err) => {
if fixture_after_str.starts_with("error:") {
@ -341,16 +247,6 @@ pub(crate) mod tests {
};
}
fn find_new_file_id<'a>(
fixture: &'a ChangeFixture,
dst: &'a AnchoredPathBuf,
) -> Option<(&'a VfsPath, &'a FileId)> {
fixture
.files_by_path
.iter()
.find(|(name, _)| name.as_path().unwrap().to_string().ends_with(&dst.path))
}
#[test]
fn test_rename_var_1() {
check_rename("Y", r#"main() -> I~ = 1."#, r#"main() -> Y = 1."#);
@ -1239,326 +1135,6 @@ pub(crate) mod tests {
);
}
// ---------------------------------
// Renaming modules
#[test]
fn rename_module_fails_name_exists() {
check_rename(
"main_2",
r#"
//- /app_a/src/main.erl
-module(ma~in).
//- /app_a_/src/main_2.erl
-module(main_2).
"#,
r#"error: module 'main_2' already exists"#,
);
}
#[test]
fn rename_module_fails_bad_name_1() {
check_rename(
"Main",
r#"
//- /app_a/src/main.erl
-module(ma~in).
//- /app_a_/src/main_2.erl
-module(main_2).
"#,
r#"error: Invalid new module name: 'Main'"#,
);
}
#[test]
fn rename_module_simple() {
check_rename(
"main_2",
r#"
//- /app_a/src/main.erl
-module(ma~in).
"#,
r#"
//- /app_a/src/main_2.erl
-module(main_2).
"#,
);
}
#[test]
fn rename_module_fails_dup_name() {
check_rename(
"main_2",
r#"
//- /app_a/src/main_2.erl
-module(main_2).
-export([foo/0]).
foo() -> ok.
//- /app_a/src/main.erl
-module(ma~in).
-export([foo/0]).
foo() -> ok.
bar() -> main:foo().
baz() -> main:bar().
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
bar() -> main:foo().
"#,
r#"error: module 'main_2' already exists"#,
);
}
#[test]
fn rename_module_with_usage_internal() {
check_rename(
"main_2",
r#"
//- /app_a/src/main.erl
-module(ma~in).
-export([foo/0]).
foo() -> ok.
bar() -> main:foo().
baz() -> main:bar().
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
bar() -> main:foo().
"#,
//------------------
r#"
//- /app_a/src/main_2.erl
-module(main_2).
-export([foo/0]).
foo() -> ok.
bar() -> main_2:foo().
baz() -> main_2:bar().
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
bar() -> main_2:foo().
"#,
);
}
#[test]
fn rename_module_with_usage_type() {
// TODO: check for compile errors in the fixture
check_rename(
"main_3",
r#"
//- /app_a/src/main.erl
-module(ma~in).
-export_type([foo/0]).
-type foo() :: ok.
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
-spec bar() -> main:foo().
bar() -> ok.
"#,
r#"
//- /app_a/src/main_3.erl
-module(main_3).
-export_type([foo/0]).
-type foo() :: ok.
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
-spec bar() -> main_3:foo().
bar() -> ok.
"#,
);
}
#[test]
fn rename_module_with_usage_record() {
check_rename(
"main_3",
r#"
//- /app_a/src/main.erl
-module(ma~in).
-export_type([foo/0]).
-type foo() :: ok.
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
-spec bar() -> main:foo().
bar() -> ok.
-record(main, {field :: main:foo()}).
"#,
//------------------
r#"
//- /app_a/src/main_3.erl
-module(main_3).
-export_type([foo/0]).
-type foo() :: ok.
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
-spec bar() -> main_3:foo().
bar() -> ok.
-record(main, {field :: main_3:foo()}).
"#,
);
}
#[test]
fn rename_module_with_usage_fun_arg() {
check_rename(
"main_3",
r#"
//- /app_a/src/main.erl
-module(ma~in).
-export_type([foo/0]).
-type foo() :: ok.
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
-spec bar() -> main:foo().
bar() ->
meck:new(main, [passthrough]),
meck:new([other, main] , [passthrough]),
meck:unload(main),
apply(main, foo, []),
ok.
-record(main, {field :: main:foo()}).
"#,
//------------------
r#"
//- /app_a/src/main_3.erl
-module(main_3).
-export_type([foo/0]).
-type foo() :: ok.
//- /app_a/src/other.erl
-module(other).
-export([bar/0]).
-spec bar() -> main_3:foo().
bar() ->
meck:new(main_3, [passthrough]),
meck:new([other, main_3] , [passthrough]),
meck:unload(main_3),
apply(main_3, foo, []),
ok.
-record(main, {field :: main_3:foo()}).
"#,
);
}
#[test]
fn rename_module_with_usage_fun() {
check_rename(
"main_3",
r#"
//- /app_a/src/main.erl
-module(ma~in).
-export([foo/1]).
foo(X) -> {X}.
//- /app_a/src/other.erl
-module(other).
-export([bar/1]).
-spec bar(term()) -> ok.
bar(UStrings) ->
Jobs = [{fun main:foo/1, [U], []} || U <- UStrings],
ok.
"#,
r#"
//- /app_a/src/main_3.erl
-module(main_3).
-export([foo/1]).
foo(X) -> {X}.
//- /app_a/src/other.erl
-module(other).
-export([bar/1]).
-spec bar(term()) -> ok.
bar(UStrings) ->
Jobs = [{fun main_3:foo/1, [U], []} || U <- UStrings],
ok.
"#,
);
}
#[test]
fn rename_module_with_usage_fun_as_module() {
check_rename(
"main_3",
r#"
//- /app_a/src/main.erl
-module(ma~in).
-export([main/1]).
main(X) -> {X}.
//- /app_a/src/other.erl
-module(other).
-export([bar/1]).
-spec bar(term()) -> ok.
bar(UStrings) ->
Jobs = [{fun main:main/1, [U], []} || U <- UStrings],
ok.
"#,
r#"
//- /app_a/src/main_3.erl
-module(main_3).
-export([main/1]).
main(X) -> {X}.
//- /app_a/src/other.erl
-module(other).
-export([bar/1]).
-spec bar(term()) -> ok.
bar(UStrings) ->
Jobs = [{fun main_3:main/1, [U], []} || U <- UStrings],
ok.
"#,
);
}
#[test]
fn rename_module_with_usage_define() {
check_rename(
"main_3",
r#"
//- /app_a/src/main.erl
-module(ma~in).
-export([foo/1]).
foo(X) -> {X}.
//- /app_a/src/definer.hrl
-define(FOO(X), main:foo(X)).
//- /app_a/src/other.erl
-module(other).
-include("definer.hrl").
-export([bar/0]).
-spec bar(term()) -> ok.
bar(U) ->
main:foo(U),
?FOO(U),
ok.
"#,
//------------------
r#"
//- /app_a/src/main_3.erl
-module(main_3).
-export([foo/1]).
foo(X) -> {X}.
//- /app_a/src/definer.hrl
-define(FOO(X), main_3:foo(X)).
//- /app_a/src/other.erl
-module(other).
-include("definer.hrl").
-export([bar/0]).
-spec bar(term()) -> ok.
bar(U) ->
main_3:foo(U),
?FOO(U),
ok.
"#,
);
}
// ---------------------------------
#[track_caller]

View file

@ -243,7 +243,7 @@ mod tests {
//- /my_app/src/runnables.erl
~
-module(runnables).
-export([all/0]).
-export([all/]).
main() ->
ok.
"#,

View file

@ -311,7 +311,6 @@ mod tests {
fn test_fn_signature_local_two_args() {
check(
r#"
//- expect_parse_errors
-module(main).
-spec add(integer(), integer()) -> integer().
@ -344,7 +343,6 @@ main() ->
);
check(
r#"
//- expect_parse_errors
-module(main).
-spec add(integer(), integer()) -> integer().
@ -377,7 +375,6 @@ main() ->
);
check(
r#"
//- expect_parse_errors
-module(main).
-spec add(integer(), integer()) -> integer().
@ -414,7 +411,6 @@ main() ->
fn test_fn_signature_remote_two_args() {
check(
r#"
//- expect_parse_errors
//- /one.erl
-module(one).
@ -453,7 +449,6 @@ main() ->
);
check(
r#"
//- expect_parse_errors
//- /one.erl
-module(one).
@ -492,7 +487,6 @@ main() ->
);
check(
r#"
//- expect_parse_errors
//- /one.erl
-module(one).
@ -535,7 +529,6 @@ main() ->
fn test_fn_signature_quoted_remote_two_args() {
check(
r#"
//- expect_parse_errors
//- /Elixir.One.erl
-module('Elixir.One').
@ -583,7 +576,6 @@ main() ->
fn test_fn_signature_unclosed_call() {
check(
r#"
//- expect_parse_errors
-module(main).
-compile(export_all).
@ -634,7 +626,6 @@ main() ->
fn test_fn_signature_doc() {
check(
r#"
//- expect_parse_errors
-module(main).
-compile(export_all).
@ -694,7 +685,6 @@ main() ->
if supports_eep59_doc_attributes() {
check(
r#"
//- expect_parse_errors
-module(main).
-compile(export_all).
@ -763,7 +753,6 @@ main() ->
fn test_fn_signature_local_imported() {
check(
r#"
//- expect_parse_errors
//- /one.erl
-module(one).
-compile(export_all).
@ -805,7 +794,6 @@ main() ->
fn test_fn_signature_spec_arg_names() {
check(
r#"
//- expect_parse_errors
//- /one.erl
-module(one).
-compile(export_all).

View file

@ -378,7 +378,6 @@ pub(crate) fn check_diagnostics(fixture: &str) {
.disable(DiagnosticCode::UnspecificInclude)
.disable(DiagnosticCode::BinaryStringToSigil)
.disable(DiagnosticCode::HirUnresolvedMacro)
.disable(DiagnosticCode::BoundVarInLhs)
.disable(DiagnosticCode::HirUnresolvedInclude);
check_diagnostics_with_config(config, fixture)
}
@ -696,7 +695,6 @@ mod test {
fn filtered_diagnostics_passes_syntax_errors() {
check_filtered_diagnostics(
r#"
//- expect_parse_errors
%%<^^^^^^^^^^^^ 💡 error: L1201: no module definition
foo() ->
bug bug.

View file

@ -166,7 +166,6 @@ mod test {
fn test_error_recovery() {
check(
r#"
//- expect_parse_errors
//- /src/sample.erl
-module(sample1).
% U.S. English
@ -181,7 +180,6 @@ mod test {
check(
r#"
//- expect_parse_errors
//- /src/sample.erl
-module(sample1).
% U.K. English
@ -199,7 +197,6 @@ mod test {
fn test_typing_attribute() {
check(
r#"
//- expect_parse_errors
-module(sample).
-typ~
"#,
@ -214,7 +211,6 @@ mod test {
fn test_module_attribute() {
check(
r#"
//- expect_parse_errors
-mod~
"#,
None,
@ -228,7 +224,6 @@ mod test {
fn test_module_attribute_hyphen() {
check(
r#"
//- expect_parse_errors
//- /src/my-module.erl
-mod~
"#,
@ -243,7 +238,6 @@ mod test {
fn test_module_attribute_at() {
check(
r#"
//- expect_parse_errors
//- /src/my@module.erl
-mod~
"#,
@ -258,7 +252,6 @@ mod test {
fn test_module_attribute_underscore() {
check(
r#"
//- expect_parse_errors
//- /src/my_module.erl
-mod~
"#,
@ -273,7 +266,6 @@ mod test {
fn test_module_attribute_uppercase() {
check(
r#"
//- expect_parse_errors
//- /src/Module.erl
-mod~
"#,
@ -288,7 +280,6 @@ mod test {
fn test_module_attribute_uppercase_middle() {
check(
r#"
//- expect_parse_errors
//- /src/moDule.erl
-mod~
"#,

View file

@ -281,7 +281,6 @@ mod ctx_tests {
fn expr_ctx() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
~X.
@ -291,7 +290,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
case 1 of.
@ -303,7 +301,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
fun(_) -> ~X end.
@ -313,7 +310,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
try 1
@ -357,7 +353,6 @@ mod ctx_tests {
fn ctx_pattern() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(Y, X) ->
~Y = X.
@ -367,7 +362,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(X) ->
case rand:uniform(1) of
@ -379,7 +373,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(X) ->
fun(X~) -> 1 end.
@ -389,7 +382,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
receive
@ -401,7 +393,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
try [1]
@ -416,7 +407,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(X) ->
if
@ -439,7 +429,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(Y, X) ->
try ok of
@ -451,7 +440,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(Y, X) ->
try ok of
@ -469,7 +457,6 @@ mod ctx_tests {
fn ctx_pattern_error_recovery_wip() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(Y, X) ->
try ok of
@ -482,7 +469,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test(Y, X) ->
try ok of
@ -499,7 +485,6 @@ mod ctx_tests {
fn test_type_param_ctx() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-type ty(s~) :: ok.
"#),
@ -511,7 +496,6 @@ mod ctx_tests {
fn test_export_ctx() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-export([
f~
@ -525,7 +509,6 @@ mod ctx_tests {
fn test_export_type_ctx() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-export_type([
t~
@ -539,7 +522,6 @@ mod ctx_tests {
fn test_spec_ctx() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-spec t~
table() -> ok.
@ -553,7 +535,6 @@ mod ctx_tests {
fn test_type_ctx() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-spec test() -> ~
test() -> ok.
@ -563,7 +544,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-spec test() -> o~k
test() -> ok.
@ -573,7 +553,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-spec test(o~) -> ok.
test() -> ok.
@ -583,7 +562,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-record(foo, {field1, field2 :: X~}).
"#),
@ -592,7 +570,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-opaque test() :: ~.
"#),
@ -601,7 +578,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-nominal test() :: ~.
"#),
@ -610,7 +586,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-type test() :: m~
"#),
@ -619,7 +594,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-spec test() -> ~ok.
"#),
@ -631,7 +605,6 @@ mod ctx_tests {
fn test_ctx_error_recovery() {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
~
@ -641,7 +614,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
X + ~
@ -651,7 +623,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
X + ~.
@ -661,7 +632,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
case rand:uniform(1) of
@ -673,7 +643,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
(erlang:term_to_binary(~
@ -684,7 +653,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
test() ->
(erlang:term_to_binary(~.
@ -695,7 +663,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-type ty() :: ~
"#),
@ -704,7 +671,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-type ty() :: l~.
"#),
@ -713,7 +679,6 @@ mod ctx_tests {
assert_eq!(
ctx(r#"
//- expect_parse_errors
-module(sample).
-record(rec, {field = lists:map(fun(X) -> X + 1 end, [1, ~])}).
"#),

View file

@ -83,7 +83,6 @@ mod test {
check(
r#"
//- expect_parse_errors
-module(sample).
-export([
foo~
@ -107,7 +106,6 @@ mod test {
check(
r#"
//- expect_parse_errors
-module(sample).
-export([
function_a/0,

View file

@ -79,7 +79,6 @@ mod test {
check(
r#"
//- expect_parse_errors
-module(sample).
-export_type([
foo~

Some files were not shown because too many files have changed in this diff Show more