diff --git a/Cargo.lock b/Cargo.lock index 44a7c099710fc..3c7e136c75827 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -128,12 +128,6 @@ version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" -[[package]] -name = "arc-swap" -version = "1.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" - [[package]] name = "argfile" version = "0.2.1" @@ -3192,6 +3186,7 @@ dependencies = [ "serde_json", "shellexpand", "thiserror 2.0.11", + "toml", "tracing", "tracing-subscriber", ] @@ -3329,9 +3324,8 @@ checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "salsa" version = "0.18.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=687251fb50b4893dc373a7e2609ceaefb8accbe7#687251fb50b4893dc373a7e2609ceaefb8accbe7" +source = "git+https://github.com/salsa-rs/salsa.git?rev=99be5d9917c3dd88e19735a82ef6bf39ba84bd7e#99be5d9917c3dd88e19735a82ef6bf39ba84bd7e" dependencies = [ - "arc-swap", "boxcar", "compact_str", "crossbeam-queue", @@ -3351,12 +3345,12 @@ dependencies = [ [[package]] name = "salsa-macro-rules" version = "0.1.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=687251fb50b4893dc373a7e2609ceaefb8accbe7#687251fb50b4893dc373a7e2609ceaefb8accbe7" +source = "git+https://github.com/salsa-rs/salsa.git?rev=99be5d9917c3dd88e19735a82ef6bf39ba84bd7e#99be5d9917c3dd88e19735a82ef6bf39ba84bd7e" [[package]] name = "salsa-macros" version = "0.18.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=687251fb50b4893dc373a7e2609ceaefb8accbe7#687251fb50b4893dc373a7e2609ceaefb8accbe7" +source = "git+https://github.com/salsa-rs/salsa.git?rev=99be5d9917c3dd88e19735a82ef6bf39ba84bd7e#99be5d9917c3dd88e19735a82ef6bf39ba84bd7e" dependencies = [ "heck", "proc-macro2", @@ -3991,6 +3985,7 @@ version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ + "chrono", "matchers", "nu-ansi-term 0.46.0", "once_cell", diff --git a/Cargo.toml b/Cargo.toml index 37e24dc97d918..d680dfd99c87a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ resolver = "2" [workspace.package] edition = "2021" -rust-version = "1.80" +rust-version = "1.83" homepage = "https://docs.astral.sh/ruff" documentation = "https://docs.astral.sh/ruff" repository = "https://github.com/astral-sh/ruff" @@ -125,7 +125,7 @@ rayon = { version = "1.10.0" } regex = { version = "1.10.2" } rustc-hash = { version = "2.0.0" } # When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml` -salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "687251fb50b4893dc373a7e2609ceaefb8accbe7" } +salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "99be5d9917c3dd88e19735a82ef6bf39ba84bd7e" } schemars = { version = "0.8.16" } seahash = { version = "4.1.0" } serde = { version = "1.0.197", features = ["derive"] } diff --git a/crates/red_knot/tests/file_watching.rs b/crates/red_knot/tests/file_watching.rs index 24eed08e09f40..a0a77f6719ebf 100644 --- a/crates/red_knot/tests/file_watching.rs +++ b/crates/red_knot/tests/file_watching.rs @@ -462,6 +462,41 @@ fn new_ignored_file() -> anyhow::Result<()> { Ok(()) } +#[test] +fn new_non_project_file() -> anyhow::Result<()> { + let mut case = setup_with_options([("bar.py", "")], |context| { + Some(Options { + environment: Some(EnvironmentOptions { + extra_paths: Some(vec![RelativePathBuf::cli( + context.join_root_path("site_packages"), + )]), + ..EnvironmentOptions::default() + }), + ..Options::default() + }) + })?; + + let bar_path = case.project_path("bar.py"); + let bar_file = case.system_file(&bar_path).unwrap(); + + assert_eq!(&case.collect_project_files(), &[bar_file]); + + // Add a file to site packages + let black_path = case.root_path().join("site_packages/black.py"); + std::fs::write(black_path.as_std_path(), "print('Hello')")?; + + let changes = case.stop_watch(event_for_file("black.py")); + + case.apply_changes(changes); + + assert!(case.system_file(&black_path).is_ok()); + + // The file should not have been added to the project files + assert_eq!(&case.collect_project_files(), &[bar_file]); + + Ok(()) +} + #[test] fn changed_file() -> anyhow::Result<()> { let foo_source = "print('Hello, world!')"; @@ -1075,6 +1110,7 @@ fn hard_links_in_project() -> anyhow::Result<()> { assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')"); assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 1')"); + assert_eq!(case.collect_project_files(), &[bar, foo]); // Write to the hard link target. update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?; @@ -1354,6 +1390,8 @@ mod unix { ); assert_eq!(baz.file().path(case.db()).as_system_path(), Some(&*bar_baz)); + assert_eq!(case.collect_project_files(), &[patched_bar_baz_file]); + // Write to the symlink target. update_file(&patched_bar_baz, "def baz(): print('Version 2')") .context("Failed to update bar/baz.py")?; @@ -1389,6 +1427,7 @@ mod unix { bar_baz_text = bar_baz_text.as_str() ); + assert_eq!(case.collect_project_files(), &[patched_bar_baz_file]); Ok(()) } @@ -1469,6 +1508,8 @@ mod unix { Some(&*baz_original) ); + assert_eq!(case.collect_project_files(), &[]); + // Write to the symlink target. update_file(&baz_original, "def baz(): print('Version 2')") .context("Failed to update bar/baz.py")?; @@ -1494,6 +1535,8 @@ mod unix { "def baz(): print('Version 2')" ); + assert_eq!(case.collect_project_files(), &[]); + Ok(()) } } diff --git a/crates/red_knot_project/src/db/changes.rs b/crates/red_knot_project/src/db/changes.rs index 8c7111ed43aba..a910afc5166c9 100644 --- a/crates/red_knot_project/src/db/changes.rs +++ b/crates/red_knot_project/src/db/changes.rs @@ -208,11 +208,12 @@ impl ProjectDatabase { return WalkState::Continue; } - if entry - .path() - .extension() - .and_then(PySourceType::try_from_extension) - .is_some() + if entry.path().starts_with(&project_path) + && entry + .path() + .extension() + .and_then(PySourceType::try_from_extension) + .is_some() { let mut paths = added_paths.lock().unwrap(); diff --git a/crates/red_knot_project/src/metadata.rs b/crates/red_knot_project/src/metadata.rs index 363c5fba471aa..d6e174059e0d8 100644 --- a/crates/red_knot_project/src/metadata.rs +++ b/crates/red_knot_project/src/metadata.rs @@ -77,10 +77,10 @@ impl ProjectMetadata { // If the `options` don't specify a python version but the `project.requires-python` field is set, // use that as a lower bound instead. if let Some(project) = project { - if !options + if options .environment .as_ref() - .is_some_and(|env| env.python_version.is_some()) + .is_none_or(|env| env.python_version.is_none()) { if let Some(requires_python) = project.resolve_requires_python_lower_bound()? { let mut environment = options.environment.unwrap_or_default(); diff --git a/crates/red_knot_python_semantic/resources/mdtest/call/union.md b/crates/red_knot_python_semantic/resources/mdtest/call/union.md index 086bfa8447da3..d2ae8875a88cf 100644 --- a/crates/red_knot_python_semantic/resources/mdtest/call/union.md +++ b/crates/red_knot_python_semantic/resources/mdtest/call/union.md @@ -56,6 +56,7 @@ def _(flag: bool, flag2: bool): else: def f() -> int: return 1 + # TODO we should mention all non-callable elements of the union # error: [call-non-callable] "Object of type `Literal[1]` is not callable" # revealed: int | Unknown reveal_type(f()) @@ -108,3 +109,38 @@ def _(flag: bool): x = f(3) reveal_type(x) # revealed: Unknown ``` + +## Union of binding errors + +```py +def f1(): ... +def f2(): ... +def _(flag: bool): + if flag: + f = f1 + else: + f = f2 + + # TODO: we should show all errors from the union, not arbitrarily pick one union element + # error: [too-many-positional-arguments] "Too many positional arguments to function `f1`: expected 0, got 1" + x = f(3) + reveal_type(x) # revealed: Unknown +``` + +## One not-callable, one wrong argument + +```py +class C: ... + +def f1(): ... +def _(flag: bool): + if flag: + f = f1 + else: + f = C() + + # TODO: we should either show all union errors here, or prioritize the not-callable error + # error: [too-many-positional-arguments] "Too many positional arguments to function `f1`: expected 0, got 1" + x = f(3) + reveal_type(x) # revealed: Unknown +``` diff --git a/crates/red_knot_python_semantic/src/semantic_index/builder.rs b/crates/red_knot_python_semantic/src/semantic_index/builder.rs index 4cb637e37148d..b0b7ac3a9770e 100644 --- a/crates/red_knot_python_semantic/src/semantic_index/builder.rs +++ b/crates/red_knot_python_semantic/src/semantic_index/builder.rs @@ -41,22 +41,22 @@ use crate::Db; mod except_handlers; -/// Are we in a state where a `break` statement is allowed? -#[derive(Clone, Copy, Debug)] -enum LoopState { - InLoop, - NotInLoop, +#[derive(Debug, Default)] +struct Loop { + /// Flow states at each `break` in the current loop. + break_states: Vec, } -impl LoopState { - fn is_inside(self) -> bool { - matches!(self, LoopState::InLoop) +impl Loop { + fn push_break(&mut self, state: FlowSnapshot) { + self.break_states.push(state); } } struct ScopeInfo { file_scope_id: FileScopeId, - loop_state: LoopState, + /// Current loop state; None if we are not currently visiting a loop + current_loop: Option, } pub(super) struct SemanticIndexBuilder<'db> { @@ -73,8 +73,6 @@ pub(super) struct SemanticIndexBuilder<'db> { /// The name of the first function parameter of the innermost function that we're currently visiting. current_first_parameter_name: Option<&'db str>, - /// Flow states at each `break` in the current loop. - loop_break_states: Vec, /// Per-scope contexts regarding nested `try`/`except` statements try_node_context_stack_manager: TryNodeContextStackManager, @@ -106,7 +104,6 @@ impl<'db> SemanticIndexBuilder<'db> { current_assignments: vec![], current_match_case: None, current_first_parameter_name: None, - loop_break_states: vec![], try_node_context_stack_manager: TryNodeContextStackManager::default(), has_future_annotations: false, @@ -134,19 +131,20 @@ impl<'db> SemanticIndexBuilder<'db> { builder } - fn current_scope(&self) -> FileScopeId { - *self - .scope_stack + fn current_scope_info(&self) -> &ScopeInfo { + self.scope_stack .last() - .map(|ScopeInfo { file_scope_id, .. }| file_scope_id) .expect("SemanticIndexBuilder should have created a root scope") } - fn loop_state(&self) -> LoopState { + fn current_scope_info_mut(&mut self) -> &mut ScopeInfo { self.scope_stack - .last() + .last_mut() .expect("SemanticIndexBuilder should have created a root scope") - .loop_state + } + + fn current_scope(&self) -> FileScopeId { + self.current_scope_info().file_scope_id } /// Returns the scope ID of the surrounding class body scope if the current scope @@ -167,11 +165,21 @@ impl<'db> SemanticIndexBuilder<'db> { } } - fn set_inside_loop(&mut self, state: LoopState) { - self.scope_stack - .last_mut() - .expect("Always to have a root scope") - .loop_state = state; + /// Push a new loop, returning the outer loop, if any. + fn push_loop(&mut self) -> Option { + self.current_scope_info_mut() + .current_loop + .replace(Loop::default()) + } + + /// Pop a loop, replacing with the previous saved outer loop, if any. + fn pop_loop(&mut self, outer_loop: Option) -> Loop { + std::mem::replace(&mut self.current_scope_info_mut().current_loop, outer_loop) + .expect("pop_loop() should not be called without a prior push_loop()") + } + + fn current_loop_mut(&mut self) -> Option<&mut Loop> { + self.current_scope_info_mut().current_loop.as_mut() } fn push_scope(&mut self, node: NodeWithScopeRef) { @@ -204,7 +212,7 @@ impl<'db> SemanticIndexBuilder<'db> { self.scope_stack.push(ScopeInfo { file_scope_id, - loop_state: LoopState::NotInLoop, + current_loop: None, }); } @@ -1217,15 +1225,9 @@ where .current_visibility_constraints_mut() .add_atom(later_predicate_id); - // Save aside any break states from an outer loop - let saved_break_states = std::mem::take(&mut self.loop_break_states); - - // TODO: definitions created inside the body should be fully visible - // to other statements/expressions inside the body --Alex/Carl - let outer_loop_state = self.loop_state(); - self.set_inside_loop(LoopState::InLoop); + let outer_loop = self.push_loop(); self.visit_body(body); - self.set_inside_loop(outer_loop_state); + let this_loop = self.pop_loop(outer_loop); // If the body is executed, we know that we've evaluated the condition at least // once, and that the first evaluation was True. We might not have evaluated the @@ -1234,11 +1236,6 @@ where let body_vis_constraint_id = first_vis_constraint_id; self.record_visibility_constraint_id(body_vis_constraint_id); - // Get the break states from the body of this loop, and restore the saved outer - // ones. - let break_states = - std::mem::replace(&mut self.loop_break_states, saved_break_states); - // We execute the `else` once the condition evaluates to false. This could happen // without ever executing the body, if the condition is false the first time it's // tested. So the starting flow state of the `else` clause is the union of: @@ -1259,7 +1256,7 @@ where // Breaking out of a while loop bypasses the `else` clause, so merge in the break // states after visiting `else`. - for break_state in break_states { + for break_state in this_loop.break_states { let snapshot = self.flow_snapshot(); self.flow_restore(break_state); self.record_visibility_constraint_id(body_vis_constraint_id); @@ -1307,7 +1304,6 @@ where self.record_ambiguous_visibility(); let pre_loop = self.flow_snapshot(); - let saved_break_states = std::mem::take(&mut self.loop_break_states); let current_assignment = match &**target { ast::Expr::List(_) | ast::Expr::Tuple(_) => Some(CurrentAssignment::For { @@ -1355,16 +1351,9 @@ where self.pop_assignment(); } - // TODO: Definitions created by loop variables - // (and definitions created inside the body) - // are fully visible to other statements/expressions inside the body --Alex/Carl - let outer_loop_state = self.loop_state(); - self.set_inside_loop(LoopState::InLoop); + let outer_loop = self.push_loop(); self.visit_body(body); - self.set_inside_loop(outer_loop_state); - - let break_states = - std::mem::replace(&mut self.loop_break_states, saved_break_states); + let this_loop = self.pop_loop(outer_loop); // We may execute the `else` clause without ever executing the body, so merge in // the pre-loop state before visiting `else`. @@ -1373,7 +1362,7 @@ where // Breaking out of a `for` loop bypasses the `else` clause, so merge in the break // states after visiting `else`. - for break_state in break_states { + for break_state in this_loop.break_states { self.flow_merge(break_state); } } @@ -1556,9 +1545,9 @@ where } ast::Stmt::Break(_) => { - if self.loop_state().is_inside() { - let snapshot = self.flow_snapshot(); - self.loop_break_states.push(snapshot); + let snapshot = self.flow_snapshot(); + if let Some(current_loop) = self.current_loop_mut() { + current_loop.push_break(snapshot); } // Everything in the current block after a terminal statement is unreachable. self.mark_unreachable(); diff --git a/crates/red_knot_python_semantic/src/types/call.rs b/crates/red_knot_python_semantic/src/types/call.rs index e108a6656ae32..0fd81de9f4975 100644 --- a/crates/red_knot_python_semantic/src/types/call.rs +++ b/crates/red_knot_python_semantic/src/types/call.rs @@ -35,7 +35,7 @@ impl<'db> CallOutcome<'db> { let elements = union.elements(db); let mut bindings = Vec::with_capacity(elements.len()); let mut errors = Vec::new(); - let mut not_callable = true; + let mut all_errors_not_callable = true; for element in elements { match call(*element) { @@ -44,7 +44,7 @@ impl<'db> CallOutcome<'db> { bindings.extend(inner_bindings); } Err(error) => { - not_callable |= error.is_not_callable(); + all_errors_not_callable &= error.is_not_callable(); errors.push(error); } } @@ -52,7 +52,7 @@ impl<'db> CallOutcome<'db> { if errors.is_empty() { Ok(CallOutcome::Union(bindings.into())) - } else if bindings.is_empty() && not_callable { + } else if bindings.is_empty() && all_errors_not_callable { Err(CallError::NotCallable { not_callable_type: Type::Union(union), }) diff --git a/crates/red_knot_test/src/matcher.rs b/crates/red_knot_test/src/matcher.rs index d350ec7c61c3e..21598d608bf79 100644 --- a/crates/red_knot_test/src/matcher.rs +++ b/crates/red_knot_test/src/matcher.rs @@ -283,12 +283,12 @@ impl Matcher { let position = unmatched.iter().position(|diagnostic| { !error.rule.is_some_and(|rule| { !(diagnostic.id().is_lint_named(rule) || diagnostic.id().matches(rule)) - }) && !error + }) && error .column - .is_some_and(|col| col != self.column(*diagnostic)) - && !error + .is_none_or(|col| col == self.column(*diagnostic)) + && error .message_contains - .is_some_and(|needle| !diagnostic.message().contains(needle)) + .is_none_or(|needle| diagnostic.message().contains(needle)) }); if let Some(position) = position { unmatched.swap_remove(position); diff --git a/crates/ruff/src/args.rs b/crates/ruff/src/args.rs index 6c37948f32532..57a437fd2ffd5 100644 --- a/crates/ruff/src/args.rs +++ b/crates/ruff/src/args.rs @@ -830,7 +830,7 @@ enum InvalidConfigFlagReason { ValidTomlButInvalidRuffSchema(toml::de::Error), /// It was a valid ruff config file, but the user tried to pass a /// value for `extend` as part of the config override. - // `extend` is special, because it affects which config files we look at + /// `extend` is special, because it affects which config files we look at /// in the first place. We currently only parse --config overrides *after* /// we've combined them with all the arguments from the various config files /// that we found, so trying to override `extend` as part of a --config diff --git a/crates/ruff/src/cache.rs b/crates/ruff/src/cache.rs index 40b495189afbc..2ae5e82850f26 100644 --- a/crates/ruff/src/cache.rs +++ b/crates/ruff/src/cache.rs @@ -586,6 +586,7 @@ mod tests { use anyhow::Result; use filetime::{set_file_mtime, FileTime}; use itertools::Itertools; + use ruff_linter::settings::LinterSettings; use test_case::test_case; use ruff_cache::CACHE_DIR_NAME; @@ -593,7 +594,7 @@ mod tests { use ruff_linter::package::PackageRoot; use ruff_linter::settings::flags; use ruff_linter::settings::types::UnsafeFixes; - use ruff_python_ast::PySourceType; + use ruff_python_ast::{PySourceType, PythonVersion}; use ruff_workspace::Settings; use crate::cache::{self, FileCache, FileCacheData, FileCacheKey}; @@ -611,6 +612,10 @@ mod tests { let settings = Settings { cache_dir, + linter: LinterSettings { + unresolved_target_version: PythonVersion::PY310, + ..Default::default() + }, ..Settings::default() }; diff --git a/crates/ruff/tests/lint.rs b/crates/ruff/tests/lint.rs index e3348cb22f269..15591314ce2ba 100644 --- a/crates/ruff/tests/lint.rs +++ b/crates/ruff/tests/lint.rs @@ -2627,3 +2627,77 @@ class A(Generic[T]): " ); } + +#[test] +fn match_before_py310() { + // ok on 3.10 + assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) + .args(STDIN_BASE_OPTIONS) + .args(["--stdin-filename", "test.py"]) + .arg("--target-version=py310") + .arg("-") + .pass_stdin( + r#" +match 2: + case 1: + print("it's one") +"# + ), + @r" + success: true + exit_code: 0 + ----- stdout ----- + All checks passed! + + ----- stderr ----- + " + ); + + // ok on 3.9 without preview + assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) + .args(STDIN_BASE_OPTIONS) + .args(["--stdin-filename", "test.py"]) + .arg("--target-version=py39") + .arg("-") + .pass_stdin( + r#" +match 2: + case 1: + print("it's one") +"# + ), + @r" + success: true + exit_code: 0 + ----- stdout ----- + All checks passed! + + ----- stderr ----- + " + ); + + // syntax error on 3.9 with preview + assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) + .args(STDIN_BASE_OPTIONS) + .args(["--stdin-filename", "test.py"]) + .arg("--target-version=py39") + .arg("--preview") + .arg("-") + .pass_stdin( + r#" +match 2: + case 1: + print("it's one") +"# + ), + @r" + success: false + exit_code: 1 + ----- stdout ----- + test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10) + Found 1 error. + + ----- stderr ----- + " + ); +} diff --git a/crates/ruff_formatter/src/lib.rs b/crates/ruff_formatter/src/lib.rs index 0a81cb121902e..337200cc0aaa8 100644 --- a/crates/ruff_formatter/src/lib.rs +++ b/crates/ruff_formatter/src/lib.rs @@ -470,13 +470,13 @@ impl Printed { for marker in self.sourcemap { // Take the closest start marker, but skip over start_markers that have the same start. if marker.source <= source_range.start() - && !start_marker.is_some_and(|existing| existing.source >= marker.source) + && start_marker.is_none_or(|existing| existing.source < marker.source) { start_marker = Some(marker); } if marker.source >= source_range.end() - && !end_marker.is_some_and(|existing| existing.source <= marker.source) + && end_marker.is_none_or(|existing| existing.source > marker.source) { end_marker = Some(marker); } diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/nan_comparison.py b/crates/ruff_linter/resources/test/fixtures/pylint/nan_comparison.py index e9ef56c1304f5..3481b37d6e1ac 100644 --- a/crates/ruff_linter/resources/test/fixtures/pylint/nan_comparison.py +++ b/crates/ruff_linter/resources/test/fixtures/pylint/nan_comparison.py @@ -53,6 +53,18 @@ if x == builtins.float("nan"): pass +# https://github.com/astral-sh/ruff/issues/16374 +match number: + # Errors + case np.nan: ... + case math.nan: ... + + # No errors + case np.nan(): ... + case math.nan(): ... + case float('nan'): ... + case npy_nan: ... + # OK if math.isnan(x): pass diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB156.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB156.py index 8b086f108df3d..032e72aa98aec 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB156.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB156.py @@ -27,8 +27,26 @@ # with comment ).capitalize() +# example with augmented assignment +_ += "0123456789" + # OK _ = "1234567890" _ = "1234" _ = "12" in "12345670" + + +# No errors as the string is considered as a docstring +class C: + "01234567" + + +class C: + def method(self): + "01234567" + + +def function(): + """01234567""" + diff --git a/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs b/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs index 2f2bb7bf13308..d5801e9dba53c 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs @@ -177,10 +177,10 @@ pub(crate) fn deferred_scopes(checker: &Checker) { } // If the bindings are in different forks, abort. - if shadowed.source.map_or(true, |left| { + if shadowed.source.is_none_or(|left| { binding .source - .map_or(true, |right| !checker.semantic.same_branch(left, right)) + .is_none_or(|right| !checker.semantic.same_branch(left, right)) }) { continue; } @@ -269,10 +269,10 @@ pub(crate) fn deferred_scopes(checker: &Checker) { } // If the bindings are in different forks, abort. - if shadowed.source.map_or(true, |left| { + if shadowed.source.is_none_or(|left| { binding .source - .map_or(true, |right| !checker.semantic.same_branch(left, right)) + .is_none_or(|right| !checker.semantic.same_branch(left, right)) }) { continue; } diff --git a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs index 77863086d6989..7b4aaa0a7e88e 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs @@ -1742,6 +1742,15 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { pylint::rules::useless_exception_statement(checker, expr); } } + Stmt::Match(ast::StmtMatch { + subject: _, + cases, + range: _, + }) => { + if checker.enabled(Rule::NanComparison) { + pylint::rules::nan_comparison_match(checker, cases); + } + } _ => {} } } diff --git a/crates/ruff_linter/src/docstrings/sections.rs b/crates/ruff_linter/src/docstrings/sections.rs index edfc8bb7dbe5e..aa42af30ef50f 100644 --- a/crates/ruff_linter/src/docstrings/sections.rs +++ b/crates/ruff_linter/src/docstrings/sections.rs @@ -453,7 +453,7 @@ fn is_docstring_section( } // Determine whether the previous line looks like the end of a paragraph. - let previous_line_looks_like_end_of_paragraph = previous_line.map_or(true, |previous_line| { + let previous_line_looks_like_end_of_paragraph = previous_line.is_none_or(|previous_line| { let previous_line = previous_line.trim(); let previous_line_ends_with_punctuation = [',', ';', '.', '-', '\\', '/', ']', '}', ')'] .into_iter() diff --git a/crates/ruff_linter/src/linter.rs b/crates/ruff_linter/src/linter.rs index 4179c3b3815fc..be0362f9b1244 100644 --- a/crates/ruff_linter/src/linter.rs +++ b/crates/ruff_linter/src/linter.rs @@ -10,10 +10,10 @@ use rustc_hash::FxHashMap; use ruff_diagnostics::Diagnostic; use ruff_notebook::Notebook; -use ruff_python_ast::{ModModule, PySourceType}; +use ruff_python_ast::{ModModule, PySourceType, PythonVersion}; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_parser::{ParseError, Parsed}; +use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError}; use ruff_source_file::SourceFileBuilder; use ruff_text_size::Ranged; @@ -71,6 +71,7 @@ pub fn check_path( source_kind: &SourceKind, source_type: PySourceType, parsed: &Parsed, + target_version: PythonVersion, ) -> Vec { // Aggregate all diagnostics. let mut diagnostics = vec![]; @@ -104,8 +105,6 @@ pub fn check_path( )); } - let target_version = settings.resolve_target_version(path); - // Run the filesystem-based rules. if settings .rules @@ -335,7 +334,8 @@ pub fn add_noqa_to_path( settings: &LinterSettings, ) -> Result { // Parse once. - let parsed = ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type); + let target_version = settings.resolve_target_version(path); + let parsed = parse_unchecked_source(source_kind, source_type, target_version); // Map row and column locations to byte slices (lazily). let locator = Locator::new(source_kind.source_code()); @@ -367,6 +367,7 @@ pub fn add_noqa_to_path( source_kind, source_type, &parsed, + target_version, ); // Add any missing `# noqa` pragmas. @@ -393,7 +394,8 @@ pub fn lint_only( source_type: PySourceType, source: ParseSource, ) -> LinterResult { - let parsed = source.into_parsed(source_kind, source_type); + let target_version = settings.resolve_target_version(path); + let parsed = source.into_parsed(source_kind, source_type, target_version); // Map row and column locations to byte slices (lazily). let locator = Locator::new(source_kind.source_code()); @@ -425,12 +427,20 @@ pub fn lint_only( source_kind, source_type, &parsed, + target_version, ); + let syntax_errors = if settings.preview.is_enabled() { + parsed.unsupported_syntax_errors() + } else { + &[] + }; + LinterResult { messages: diagnostics_to_messages( diagnostics, parsed.errors(), + syntax_errors, path, &locator, &directives, @@ -443,6 +453,7 @@ pub fn lint_only( fn diagnostics_to_messages( diagnostics: Vec, parse_errors: &[ParseError], + unsupported_syntax_errors: &[UnsupportedSyntaxError], path: &Path, locator: &Locator, directives: &Directives, @@ -461,6 +472,9 @@ fn diagnostics_to_messages( parse_errors .iter() .map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone())) + .chain(unsupported_syntax_errors.iter().map(|syntax_error| { + Message::from_unsupported_syntax_error(syntax_error, file.deref().clone()) + })) .chain(diagnostics.into_iter().map(|diagnostic| { let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start()); Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset) @@ -491,11 +505,12 @@ pub fn lint_fix<'a>( // Track whether the _initial_ source code is valid syntax. let mut is_valid_syntax = false; + let target_version = settings.resolve_target_version(path); + // Continuously fix until the source code stabilizes. loop { // Parse once. - let parsed = - ruff_python_parser::parse_unchecked_source(transformed.source_code(), source_type); + let parsed = parse_unchecked_source(&transformed, source_type, target_version); // Map row and column locations to byte slices (lazily). let locator = Locator::new(transformed.source_code()); @@ -527,6 +542,7 @@ pub fn lint_fix<'a>( &transformed, source_type, &parsed, + target_version, ); if iterations == 0 { @@ -573,11 +589,18 @@ pub fn lint_fix<'a>( report_failed_to_converge_error(path, transformed.source_code(), &diagnostics); } + let syntax_errors = if settings.preview.is_enabled() { + parsed.unsupported_syntax_errors() + } else { + &[] + }; + return Ok(FixerResult { result: LinterResult { messages: diagnostics_to_messages( diagnostics, parsed.errors(), + syntax_errors, path, &locator, &directives, @@ -680,16 +703,35 @@ pub enum ParseSource { impl ParseSource { /// Consumes the [`ParseSource`] and returns the parsed [`Parsed`], parsing the source code if /// necessary. - fn into_parsed(self, source_kind: &SourceKind, source_type: PySourceType) -> Parsed { + fn into_parsed( + self, + source_kind: &SourceKind, + source_type: PySourceType, + target_version: PythonVersion, + ) -> Parsed { match self { - ParseSource::None => { - ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type) - } + ParseSource::None => parse_unchecked_source(source_kind, source_type, target_version), ParseSource::Precomputed(parsed) => parsed, } } } +/// Like [`ruff_python_parser::parse_unchecked_source`] but with an additional [`PythonVersion`] +/// argument. +fn parse_unchecked_source( + source_kind: &SourceKind, + source_type: PySourceType, + target_version: PythonVersion, +) -> Parsed { + let options = ParseOptions::from(source_type).with_target_version(target_version); + // SAFETY: Safe because `PySourceType` always parses to a `ModModule`. See + // `ruff_python_parser::parse_unchecked_source`. We use `parse_unchecked` (and thus + // have to unwrap) in order to pass the `PythonVersion` via `ParseOptions`. + ruff_python_parser::parse_unchecked(source_kind.source_code(), options) + .try_into_module() + .expect("PySourceType always parses into a module") +} + #[cfg(test)] mod tests { use std::path::Path; diff --git a/crates/ruff_linter/src/message/mod.rs b/crates/ruff_linter/src/message/mod.rs index 52de250c4c32d..09d96bb5c6374 100644 --- a/crates/ruff_linter/src/message/mod.rs +++ b/crates/ruff_linter/src/message/mod.rs @@ -16,7 +16,7 @@ pub use pylint::PylintEmitter; pub use rdjson::RdjsonEmitter; use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix}; use ruff_notebook::NotebookIndex; -use ruff_python_parser::ParseError; +use ruff_python_parser::{ParseError, UnsupportedSyntaxError}; use ruff_source_file::{SourceFile, SourceLocation}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; pub use sarif::SarifEmitter; @@ -121,6 +121,18 @@ impl Message { }) } + /// Create a [`Message`] from the given [`UnsupportedSyntaxError`]. + pub fn from_unsupported_syntax_error( + unsupported_syntax_error: &UnsupportedSyntaxError, + file: SourceFile, + ) -> Message { + Message::SyntaxError(SyntaxErrorMessage { + message: format!("SyntaxError: {unsupported_syntax_error}"), + range: unsupported_syntax_error.range, + file, + }) + } + pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> { match self { Message::Diagnostic(m) => Some(m), diff --git a/crates/ruff_linter/src/rules/flake8_bandit/helpers.rs b/crates/ruff_linter/src/rules/flake8_bandit/helpers.rs index a79d707f8aa13..917c80f2f4925 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/helpers.rs +++ b/crates/ruff_linter/src/rules/flake8_bandit/helpers.rs @@ -20,7 +20,7 @@ pub(super) fn matches_password_name(string: &str) -> bool { } pub(super) fn is_untyped_exception(type_: Option<&Expr>, semantic: &SemanticModel) -> bool { - type_.map_or(true, |type_| { + type_.is_none_or(|type_| { if let Expr::Tuple(ast::ExprTuple { elts, .. }) = &type_ { elts.iter().any(|type_| { semantic diff --git a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs index 0515d62d31afb..f29cd41d30c02 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs +++ b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs @@ -62,10 +62,10 @@ pub(crate) fn hardcoded_sql_expression(checker: &Checker, expr: &Expr) { op: Operator::Add, .. }) => { // Only evaluate the full BinOp, not the nested components. - if !checker + if checker .semantic() .current_expression_parent() - .map_or(true, |parent| !parent.is_bin_op_expr()) + .is_some_and(ruff_python_ast::Expr::is_bin_op_expr) { return; } diff --git a/crates/ruff_linter/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs b/crates/ruff_linter/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs index d2ad1dba8574d..3f97de5050e00 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs +++ b/crates/ruff_linter/src/rules/flake8_bandit/rules/hashlib_insecure_hash_functions.rs @@ -199,7 +199,7 @@ fn detect_insecure_crypt_calls(checker: &Checker, call: &ast::ExprCall) { fn is_used_for_security(arguments: &Arguments) -> bool { arguments .find_keyword("usedforsecurity") - .map_or(true, |keyword| !is_const_false(&keyword.value)) + .is_none_or(|keyword| !is_const_false(&keyword.value)) } #[derive(Debug, Copy, Clone)] diff --git a/crates/ruff_linter/src/rules/flake8_bandit/rules/tarfile_unsafe_members.rs b/crates/ruff_linter/src/rules/flake8_bandit/rules/tarfile_unsafe_members.rs index 5d91dabe97024..e27409678ec39 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/rules/tarfile_unsafe_members.rs +++ b/crates/ruff_linter/src/rules/flake8_bandit/rules/tarfile_unsafe_members.rs @@ -53,10 +53,10 @@ pub(crate) fn tarfile_unsafe_members(checker: &Checker, call: &ast::ExprCall) { return; } - if !call + if call .func .as_attribute_expr() - .is_some_and(|attr| attr.attr.as_str() == "extractall") + .is_none_or(|attr| attr.attr.as_str() != "extractall") { return; } diff --git a/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs b/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs index 95f5c00611afc..4b6942a8509cf 100644 --- a/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs +++ b/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs @@ -120,10 +120,7 @@ pub(crate) fn stdlib_module_shadowing( fn get_prefix<'a>(settings: &'a LinterSettings, path: &Path) -> Option<&'a PathBuf> { let mut prefix = None; for dir in settings.src.iter().chain([&settings.project_root]) { - if path.starts_with(dir) - // TODO `is_none_or` when MSRV >= 1.82 - && (prefix.is_none() || prefix.is_some_and(|existing| existing < dir)) - { + if path.starts_with(dir) && prefix.is_none_or(|existing| existing < dir) { prefix = Some(dir); } } diff --git a/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs b/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs index c0b916bbdc8fc..4e91d2058263a 100644 --- a/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs +++ b/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs @@ -66,17 +66,17 @@ pub(crate) fn implicit_namespace_package( // Ignore non-`.py` files, which don't require an `__init__.py`. && PySourceType::try_from_path(path).is_some_and(PySourceType::is_py_file) // Ignore any files that are direct children of the project root. - && !path + && path .parent() - .is_some_and( |parent| parent == project_root) + .is_none_or( |parent| parent != project_root) // Ignore any files that are direct children of a source directory (e.g., `src/manage.py`). && !path .parent() .is_some_and( |parent| src.iter().any(|src| src == parent)) // Ignore files that contain a shebang. - && !comment_ranges + && comment_ranges .first().filter(|range| range.start() == TextSize::from(0)) - .is_some_and(|range| ShebangDirective::try_extract(locator.slice(*range)).is_some()) + .is_none_or(|range| ShebangDirective::try_extract(locator.slice(*range)).is_none()) // Ignore PEP 723 scripts. && ScriptTag::parse(locator.contents().as_bytes()).is_none() { diff --git a/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs b/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs index 59ce9542cd728..ec895762912ed 100644 --- a/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs +++ b/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs @@ -109,8 +109,7 @@ pub(crate) fn print_call(checker: &Checker, call: &ast::ExprCall) { // or `"sys.stderr"`), don't trigger T201. if let Some(keyword) = call.arguments.find_keyword("file") { if !keyword.value.is_none_literal_expr() { - if semantic.resolve_qualified_name(&keyword.value).map_or( - true, + if semantic.resolve_qualified_name(&keyword.value).is_none_or( |qualified_name| { !matches!(qualified_name.segments(), ["sys", "stdout" | "stderr"]) }, diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/exit_annotations.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/exit_annotations.rs index 1202d5cbe2ac0..881da80687e22 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/exit_annotations.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/exit_annotations.rs @@ -294,7 +294,7 @@ fn check_positional_args_for_overloaded_method( predicate: impl FnOnce(&Expr) -> bool, semantic: &SemanticModel, ) -> bool { - parameter.annotation().map_or(true, |annotation| { + parameter.annotation().is_none_or(|annotation| { predicate(annotation) || is_object_or_unused(annotation, semantic) }) } diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fail.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fail.rs index 1702f25b1ea55..998ad78014ed0 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fail.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fail.rs @@ -63,7 +63,7 @@ pub(crate) fn fail_call(checker: &Checker, call: &ast::ExprCall) { .arguments .find_argument_value("reason", 0) .or_else(|| call.arguments.find_argument_value("msg", 0)) - .map_or(true, is_empty_or_null_string) + .is_none_or(is_empty_or_null_string) { checker.report_diagnostic(Diagnostic::new(PytestFailWithoutMessage, call.func.range())); } diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/raises.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/raises.rs index ebaf2e722afbb..4701b9e2fb705 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/raises.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/raises.rs @@ -188,7 +188,7 @@ pub(crate) fn raises_call(checker: &Checker, call: &ast::ExprCall) { if call .arguments .find_keyword("match") - .map_or(true, |k| is_empty_or_null_string(&k.value)) + .is_none_or(|k| is_empty_or_null_string(&k.value)) { exception_needs_match(checker, exception); } diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/warns.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/warns.rs index df92a3e653af8..3b741f3af6f21 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/warns.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/warns.rs @@ -187,7 +187,7 @@ pub(crate) fn warns_call(checker: &Checker, call: &ast::ExprCall) { if call .arguments .find_keyword("match") - .map_or(true, |k| is_empty_or_null_string(&k.value)) + .is_none_or(|k| is_empty_or_null_string(&k.value)) { warning_needs_match(checker, warning); } diff --git a/crates/ruff_linter/src/rules/flake8_return/rules/function.rs b/crates/ruff_linter/src/rules/flake8_return/rules/function.rs index 2cd869848c560..a28597949e85e 100644 --- a/crates/ruff_linter/src/rules/flake8_return/rules/function.rs +++ b/crates/ruff_linter/src/rules/flake8_return/rules/function.rs @@ -817,7 +817,7 @@ pub(crate) fn function(checker: &Checker, function_def: &ast::StmtFunctionDef) { } else { if checker.enabled(Rule::UnnecessaryReturnNone) { // Skip functions that have a return annotation that is not `None`. - if returns.as_deref().map_or(true, Expr::is_none_literal_expr) { + if returns.as_deref().is_none_or(Expr::is_none_literal_expr) { unnecessary_return_none(checker, decorator_list, &stack); } } diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_with.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_with.rs index fff6dace5be09..b86626984eab7 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_with.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_with.rs @@ -177,7 +177,7 @@ pub(crate) fn multiple_with_statements( with_stmt, ) { Ok(edit) => { - if edit.content().map_or(true, |content| { + if edit.content().is_none_or(|content| { fits( content, with_stmt.into(), diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/collapsible_if.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/collapsible_if.rs index 76aa2fe5e65c2..43f8facc3c549 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/collapsible_if.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/collapsible_if.rs @@ -120,7 +120,7 @@ pub(crate) fn nested_if_statements( diagnostic.try_set_optional_fix(|| { match collapse_nested_if(checker.locator(), checker.stylist(), nested_if) { Ok(edit) => { - if edit.content().map_or(true, |content| { + if edit.content().is_none_or(|content| { fits( content, (&nested_if).into(), diff --git a/crates/ruff_linter/src/rules/isort/normalize.rs b/crates/ruff_linter/src/rules/isort/normalize.rs index f7f7bcabce9fb..9feb16456634e 100644 --- a/crates/ruff_linter/src/rules/isort/normalize.rs +++ b/crates/ruff_linter/src/rules/isort/normalize.rs @@ -53,10 +53,9 @@ pub(crate) fn normalize_imports<'a>( } => { // Whether to track each member of the import as a separate entry. let isolate_aliases = settings.force_single_line - && module.map_or(true, |module| { - !settings.single_line_exclusions.contains(module) - }) - && !names.first().is_some_and(|alias| alias.name == "*"); + && module + .is_none_or(|module| !settings.single_line_exclusions.contains(module)) + && names.first().is_none_or(|alias| alias.name != "*"); // Insert comments on the statement itself. if isolate_aliases { diff --git a/crates/ruff_linter/src/rules/pep8_naming/settings.rs b/crates/ruff_linter/src/rules/pep8_naming/settings.rs index 9705b7cde05d5..accac6ef6cd5c 100644 --- a/crates/ruff_linter/src/rules/pep8_naming/settings.rs +++ b/crates/ruff_linter/src/rules/pep8_naming/settings.rs @@ -102,7 +102,7 @@ impl IgnoreNames { ) -> Result { // If the user is not customizing the set of ignored names, use the default matcher, // which is hard-coded to avoid expensive regex matching. - if ignore_names.is_none() && extend_ignore_names.as_ref().map_or(true, Vec::is_empty) { + if ignore_names.is_none() && extend_ignore_names.as_ref().is_none_or(Vec::is_empty) { return Ok(IgnoreNames::Default); } diff --git a/crates/ruff_linter/src/rules/perflint/rules/manual_list_copy.rs b/crates/ruff_linter/src/rules/perflint/rules/manual_list_copy.rs index 798647080881c..24ee2ae9c188b 100644 --- a/crates/ruff_linter/src/rules/perflint/rules/manual_list_copy.rs +++ b/crates/ruff_linter/src/rules/perflint/rules/manual_list_copy.rs @@ -93,7 +93,7 @@ pub(crate) fn manual_list_copy(checker: &Checker, for_stmt: &ast::StmtFor) { } // Only flag direct list copies (e.g., `for x in y: filtered.append(x)`). - if !arg.as_name_expr().is_some_and(|arg| arg.id == *id) { + if arg.as_name_expr().is_none_or(|arg| arg.id != *id) { return; } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs index 064d7fabd581e..a3e16837e2d7d 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs @@ -245,9 +245,9 @@ pub(crate) fn missing_whitespace_around_operator( let has_leading_trivia = prev_token.end() < token.start() || is_non_logical_token(prev_token.kind()); - let has_trailing_trivia = tokens.peek().map_or(true, |next| { - token.end() < next.start() || is_non_logical_token(next.kind()) - }); + let has_trailing_trivia = tokens + .peek() + .is_none_or(|next| token.end() < next.start() || is_non_logical_token(next.kind())); match (has_leading_trivia, has_trailing_trivia) { // Operator with trailing but no leading space, enforce consistent spacing. diff --git a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs index 95fb57feedf2b..826d71761501d 100644 --- a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs +++ b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs @@ -794,7 +794,7 @@ impl<'a> GeneratorOrIteratorArguments<'a> { match self { Self::Unparameterized => true, Self::Single(_) => true, - Self::Several(elements) => elements.get(2).map_or(true, Expr::is_none_literal_expr), + Self::Several(elements) => elements.get(2).is_none_or(Expr::is_none_literal_expr), } } } @@ -947,7 +947,7 @@ pub(crate) fn check_docstring( match function_def.returns.as_deref() { Some(returns) if !generator_annotation_arguments(returns, semantic).is_some_and( - |arguments| arguments.first().map_or(true, Expr::is_none_literal_expr), + |arguments| arguments.first().is_none_or(Expr::is_none_literal_expr), ) => { diagnostics diff --git a/crates/ruff_linter/src/rules/pydocstyle/rules/no_signature.rs b/crates/ruff_linter/src/rules/pydocstyle/rules/no_signature.rs index d63f698f2ffef..2addede6a4388 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/rules/no_signature.rs +++ b/crates/ruff_linter/src/rules/pydocstyle/rules/no_signature.rs @@ -71,7 +71,7 @@ pub(crate) fn no_signature(checker: &Checker, docstring: &Docstring) { let preceded_by_word_boundary = first_line[..index] .chars() .next_back() - .map_or(true, |c| matches!(c, ' ' | '\t' | ';' | ',')); + .is_none_or(|c| matches!(c, ' ' | '\t' | ';' | ',')); if !preceded_by_word_boundary { return false; } diff --git a/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs b/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs index 913269f743e03..2535ac71acb69 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs +++ b/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs @@ -1871,7 +1871,7 @@ fn args_section(context: &SectionContext) -> FxHashSet { // Reformat each section. let mut args_sections: Vec = vec![]; for line in args_content.trim().lines() { - if line.chars().next().map_or(true, char::is_whitespace) { + if line.chars().next().is_none_or(char::is_whitespace) { // This is a continuation of the documentation for the previous parameter, // because it starts with whitespace. if let Some(last) = args_sections.last_mut() { diff --git a/crates/ruff_linter/src/rules/pyflakes/mod.rs b/crates/ruff_linter/src/rules/pyflakes/mod.rs index 83f0387b9666c..4f00f15b8d3c6 100644 --- a/crates/ruff_linter/src/rules/pyflakes/mod.rs +++ b/crates/ruff_linter/src/rules/pyflakes/mod.rs @@ -11,6 +11,7 @@ mod tests { use anyhow::Result; use regex::Regex; + use ruff_python_parser::ParseOptions; use rustc_hash::FxHashMap; use test_case::test_case; @@ -744,8 +745,11 @@ mod tests { let source_type = PySourceType::default(); let source_kind = SourceKind::Python(contents.to_string()); let settings = LinterSettings::for_rules(Linter::Pyflakes.rules()); - let parsed = - ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type); + let options = + ParseOptions::from(source_type).with_target_version(settings.unresolved_target_version); + let parsed = ruff_python_parser::parse_unchecked(source_kind.source_code(), options) + .try_into_module() + .expect("PySourceType always parses into a module"); let locator = Locator::new(&contents); let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents()); let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents()); @@ -767,6 +771,7 @@ mod tests { &source_kind, source_type, &parsed, + settings.unresolved_target_version, ); diagnostics.sort_by_key(Ranged::start); let actual = diagnostics diff --git a/crates/ruff_linter/src/rules/pylint/rules/nan_comparison.rs b/crates/ruff_linter/src/rules/pylint/rules/nan_comparison.rs index 5eee0a1ee5222..aca8fe15fcfd9 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/nan_comparison.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/nan_comparison.rs @@ -48,7 +48,21 @@ impl Violation for NanComparison { /// PLW0177 pub(crate) fn nan_comparison(checker: &Checker, left: &Expr, comparators: &[Expr]) { - for expr in std::iter::once(left).chain(comparators) { + nan_comparison_impl(checker, std::iter::once(left).chain(comparators)); +} + +/// PLW0177 +pub(crate) fn nan_comparison_match(checker: &Checker, cases: &[ast::MatchCase]) { + nan_comparison_impl( + checker, + cases + .iter() + .filter_map(|case| case.pattern.as_match_value().map(|pattern| &*pattern.value)), + ); +} + +fn nan_comparison_impl<'a>(checker: &Checker, comparators: impl Iterator) { + for expr in comparators { if let Some(qualified_name) = checker.semantic().resolve_qualified_name(expr) { match qualified_name.segments() { ["numpy", "nan" | "NAN" | "NaN"] => { diff --git a/crates/ruff_linter/src/rules/pylint/rules/potential_index_error.rs b/crates/ruff_linter/src/rules/pylint/rules/potential_index_error.rs index 95cb2705bc0f3..5d1081836345b 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/potential_index_error.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/potential_index_error.rs @@ -65,7 +65,7 @@ pub(crate) fn potential_index_error(checker: &Checker, value: &Expr, slice: &Exp // Emit a diagnostic if the index is out of bounds. If the index can't be represented as an // `i64`, but the length _can_, then the index is definitely out of bounds. - if index.map_or(true, |index| index >= length || index < -length) { + if index.is_none_or(|index| index >= length || index < -length) { checker.report_diagnostic(Diagnostic::new(PotentialIndexError, slice.range())); } } diff --git a/crates/ruff_linter/src/rules/pylint/rules/type_name_incorrect_variance.rs b/crates/ruff_linter/src/rules/pylint/rules/type_name_incorrect_variance.rs index b6981f259f7ee..d4b0b7ab16335 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/type_name_incorrect_variance.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/type_name_incorrect_variance.rs @@ -140,9 +140,9 @@ pub(crate) fn type_name_incorrect_variance(checker: &Checker, value: &Expr) { /// Returns `true` if the parameter name does not match its type variance. fn mismatch(param_name: &str, covariant: Option<&Expr>, contravariant: Option<&Expr>) -> bool { if param_name.ends_with("_co") { - covariant.map_or(true, |covariant| !is_const_true(covariant)) + covariant.is_none_or(|covariant| !is_const_true(covariant)) } else if param_name.ends_with("_contra") { - contravariant.map_or(true, |contravariant| !is_const_true(contravariant)) + contravariant.is_none_or(|contravariant| !is_const_true(contravariant)) } else { covariant.is_some_and(is_const_true) || contravariant.is_some_and(is_const_true) } diff --git a/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs b/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs index e4c2b594d948a..c0874d0d3deb7 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs @@ -154,7 +154,7 @@ fn enumerate_items<'a>( // If the `enumerate` call has a non-zero `start`, don't omit. if !arguments .find_argument_value("start", 1) - .map_or(true, |expr| { + .is_none_or(|expr| { matches!( expr, Expr::NumberLiteral(ast::ExprNumberLiteral { diff --git a/crates/ruff_linter/src/rules/pylint/rules/useless_return.rs b/crates/ruff_linter/src/rules/pylint/rules/useless_return.rs index b06064d842600..513f391f53382 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/useless_return.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/useless_return.rs @@ -50,7 +50,7 @@ pub(crate) fn useless_return( returns: Option<&Expr>, ) { // Skip functions that have a return annotation that is not `None`. - if !returns.map_or(true, Expr::is_none_literal_expr) { + if !returns.is_none_or(Expr::is_none_literal_expr) { return; } @@ -82,7 +82,7 @@ pub(crate) fn useless_return( // Verify that the return statement is either bare or returns `None`. if !value .as_ref() - .map_or(true, |expr| expr.is_none_literal_expr()) + .is_none_or(|expr| expr.is_none_literal_expr()) { return; }; diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0177_nan_comparison.py.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0177_nan_comparison.py.snap index 3b87327d734f6..a9c4292f14771 100644 --- a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0177_nan_comparison.py.snap +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0177_nan_comparison.py.snap @@ -1,6 +1,5 @@ --- source: crates/ruff_linter/src/rules/pylint/mod.rs -snapshot_kind: text --- nan_comparison.py:11:9: PLW0177 Comparing against a NaN value; use `math.isnan` instead | @@ -89,3 +88,22 @@ nan_comparison.py:53:9: PLW0177 Comparing against a NaN value; use `math.isnan` | ^^^^^^^^^^^^^^^^^^^^^ PLW0177 54 | pass | + +nan_comparison.py:59:10: PLW0177 Comparing against a NaN value; use `np.isnan` instead + | +57 | match number: +58 | # Errors +59 | case np.nan: ... + | ^^^^^^ PLW0177 +60 | case math.nan: ... + | + +nan_comparison.py:60:10: PLW0177 Comparing against a NaN value; use `math.isnan` instead + | +58 | # Errors +59 | case np.nan: ... +60 | case math.nan: ... + | ^^^^^^^^ PLW0177 +61 | +62 | # No errors + | diff --git a/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs b/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs index c9583cdbddabe..0c0d0a4a422cf 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs @@ -46,6 +46,11 @@ impl AlwaysFixableViolation for HardcodedStringCharset { /// FURB156 pub(crate) fn hardcoded_string_charset_literal(checker: &Checker, expr: &ExprStringLiteral) { + // if the string literal is a docstring, the rule is not applied + if checker.semantic().in_pep_257_docstring() { + return; + } + if let Some(charset) = check_charset_exact(expr.value.to_str().as_bytes()) { push_diagnostic(checker, expr.range, charset); } diff --git a/crates/ruff_linter/src/rules/refurb/rules/int_on_sliced_str.rs b/crates/ruff_linter/src/rules/refurb/rules/int_on_sliced_str.rs index 252cf0ec46e49..b2da47d6be6cd 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/int_on_sliced_str.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/int_on_sliced_str.rs @@ -106,12 +106,12 @@ pub(crate) fn int_on_sliced_str(checker: &Checker, call: &ExprCall) { if expr_slice.upper.is_some() || expr_slice.step.is_some() { return; } - if !expr_slice + if expr_slice .lower .as_ref() .and_then(|expr| expr.as_number_literal_expr()) .and_then(|expr| expr.value.as_int()) - .is_some_and(|expr| expr.as_u8() == Some(2)) + .is_none_or(|expr| expr.as_u8() != Some(2)) { return; } diff --git a/crates/ruff_linter/src/rules/refurb/rules/list_reverse_copy.rs b/crates/ruff_linter/src/rules/refurb/rules/list_reverse_copy.rs index 24ba55f860b34..c3995008c728c 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/list_reverse_copy.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/list_reverse_copy.rs @@ -119,7 +119,7 @@ fn peel_lists(expr: &Expr) -> &Expr { return expr; } - if !func.as_name_expr().is_some_and(|name| name.id == "list") { + if func.as_name_expr().is_none_or(|name| name.id != "list") { return expr; } @@ -175,11 +175,11 @@ fn extract_name_from_sliced_reversed(expr: &Expr) -> Option<&ExprName> { else { return None; }; - if !operand + if operand .as_number_literal_expr() .and_then(|num| num.value.as_int()) .and_then(Int::as_u8) - .is_some_and(|value| value == 1) + .is_none_or(|value| value != 1) { return None; }; diff --git a/crates/ruff_linter/src/rules/refurb/rules/unnecessary_enumerate.rs b/crates/ruff_linter/src/rules/refurb/rules/unnecessary_enumerate.rs index ae2e7c5241f59..4dceca05bcf47 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/unnecessary_enumerate.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/unnecessary_enumerate.rs @@ -179,7 +179,7 @@ pub(crate) fn unnecessary_enumerate(checker: &Checker, stmt_for: &ast::StmtFor) // If the `start` argument is set to something other than the `range` default, // there's no clear fix. let start = arguments.find_argument_value("start", 1); - if start.map_or(true, |start| { + if start.is_none_or(|start| { matches!( start, Expr::NumberLiteral(ast::ExprNumberLiteral { diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB156_FURB156.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB156_FURB156.py.snap index c11a67749126e..2041d7694f934 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB156_FURB156.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB156_FURB156.py.snap @@ -331,4 +331,30 @@ FURB156.py:26:5: FURB156 [*] Use of hardcoded string charset 27 |+ string.digits 27 28 | # with comment 28 29 | ).capitalize() -29 30 | +29 30 | + +FURB156.py:31:6: FURB156 [*] Use of hardcoded string charset + | +30 | # example with augmented assignment +31 | _ += "0123456789" + | ^^^^^^^^^^^^ FURB156 +32 | +33 | # OK + | + = help: Replace hardcoded charset with `string.digits` + +ℹ Safe fix +1 1 | # Errors + 2 |+import string +2 3 | +3 4 | _ = "0123456789" +4 5 | _ = "01234567" +-------------------------------------------------------------------------------- +28 29 | ).capitalize() +29 30 | +30 31 | # example with augmented assignment +31 |-_ += "0123456789" + 32 |+_ += string.digits +32 33 | +33 34 | # OK +34 35 | diff --git a/crates/ruff_linter/src/rules/ruff/rules/missing_fstring_syntax.rs b/crates/ruff_linter/src/rules/ruff/rules/missing_fstring_syntax.rs index 994e6a3ff2e54..c44bd2cb8a214 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/missing_fstring_syntax.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/missing_fstring_syntax.rs @@ -224,7 +224,7 @@ fn should_be_fstring( semantic.scope_id, TypingOnlyBindingsStatus::Disallowed, ) - .map_or(true, |id| semantic.binding(id).kind.is_builtin()) + .is_none_or(|id| semantic.binding(id).kind.is_builtin()) { return false; } diff --git a/crates/ruff_linter/src/rules/ruff/rules/sequence_sorting.rs b/crates/ruff_linter/src/rules/ruff/rules/sequence_sorting.rs index 6926170ee9bbf..35715e75aa495 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/sequence_sorting.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/sequence_sorting.rs @@ -513,7 +513,7 @@ impl<'a> MultilineStringSequenceValue<'a> { // we'll end up with two commas after the final item, which would be invalid syntax) let needs_trailing_comma = self.ends_with_trailing_comma && first_non_trivia_token(TextSize::new(0), &postlude) - .map_or(true, |tok| tok.kind() != SimpleTokenKind::Comma); + .is_none_or(|tok| tok.kind() != SimpleTokenKind::Comma); self.items .sort_by(|a, b| sorting_style.compare(a.value, b.value)); @@ -979,7 +979,7 @@ fn multiline_string_sequence_postlude<'a>( if postlude.len() <= 2 { let mut reversed_postlude_chars = postlude.chars().rev(); if let Some(closing_paren @ (')' | '}' | ']')) = reversed_postlude_chars.next() { - if reversed_postlude_chars.next().map_or(true, |c| c == ',') { + if reversed_postlude_chars.next().is_none_or(|c| c == ',') { return Cow::Owned(format!(",{newline}{leading_indent}{closing_paren}")); } } diff --git a/crates/ruff_linter/src/rules/ruff/rules/suppression_comment_visitor.rs b/crates/ruff_linter/src/rules/ruff/rules/suppression_comment_visitor.rs index b97ed311799c3..24a1423260a9d 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/suppression_comment_visitor.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/suppression_comment_visitor.rs @@ -59,7 +59,7 @@ where fn can_skip(&mut self, node_end: TextSize) -> bool { self.comments .peek() - .map_or(true, |next| next.range.start() >= node_end) + .is_none_or(|next| next.range.start() >= node_end) } } diff --git a/crates/ruff_linter/src/rules/ruff/typing.rs b/crates/ruff_linter/src/rules/ruff/typing.rs index a12ab9b6df482..3c650bfe0bb69 100644 --- a/crates/ruff_linter/src/rules/ruff/typing.rs +++ b/crates/ruff_linter/src/rules/ruff/typing.rs @@ -178,25 +178,20 @@ impl<'a> TypingTarget<'a> { TypingTarget::Union(slice) => slice.is_some_and(|slice| { resolve_slice_value(slice).any(|element| { TypingTarget::try_from_expr(element, checker, version) - .map_or(true, |new_target| { - new_target.contains_none(checker, version) - }) + .is_none_or(|new_target| new_target.contains_none(checker, version)) }) }), TypingTarget::PEP604Union(left, right) => [left, right].iter().any(|element| { - TypingTarget::try_from_expr(element, checker, version).map_or(true, |new_target| { - new_target.contains_none(checker, version) - }) + TypingTarget::try_from_expr(element, checker, version) + .is_none_or(|new_target| new_target.contains_none(checker, version)) }), TypingTarget::Annotated(expr) => expr.is_some_and(|expr| { - TypingTarget::try_from_expr(expr, checker, version).map_or(true, |new_target| { - new_target.contains_none(checker, version) - }) + TypingTarget::try_from_expr(expr, checker, version) + .is_none_or(|new_target| new_target.contains_none(checker, version)) }), TypingTarget::ForwardReference(expr) => { - TypingTarget::try_from_expr(expr, checker, version).map_or(true, |new_target| { - new_target.contains_none(checker, version) - }) + TypingTarget::try_from_expr(expr, checker, version) + .is_none_or(|new_target| new_target.contains_none(checker, version)) } } } @@ -215,22 +210,22 @@ impl<'a> TypingTarget<'a> { TypingTarget::Union(slice) => slice.is_some_and(|slice| { resolve_slice_value(slice).any(|element| { TypingTarget::try_from_expr(element, checker, version) - .map_or(true, |new_target| new_target.contains_any(checker, version)) + .is_none_or(|new_target| new_target.contains_any(checker, version)) }) }), TypingTarget::PEP604Union(left, right) => [left, right].iter().any(|element| { TypingTarget::try_from_expr(element, checker, version) - .map_or(true, |new_target| new_target.contains_any(checker, version)) + .is_none_or(|new_target| new_target.contains_any(checker, version)) }), TypingTarget::Annotated(expr) | TypingTarget::Optional(expr) => { expr.is_some_and(|expr| { TypingTarget::try_from_expr(expr, checker, version) - .map_or(true, |new_target| new_target.contains_any(checker, version)) + .is_none_or(|new_target| new_target.contains_any(checker, version)) }) } TypingTarget::ForwardReference(expr) => { TypingTarget::try_from_expr(expr, checker, version) - .map_or(true, |new_target| new_target.contains_any(checker, version)) + .is_none_or(|new_target| new_target.contains_any(checker, version)) } } } diff --git a/crates/ruff_linter/src/test.rs b/crates/ruff_linter/src/test.rs index 6ded54565eee5..d15f49ed20509 100644 --- a/crates/ruff_linter/src/test.rs +++ b/crates/ruff_linter/src/test.rs @@ -16,7 +16,7 @@ use ruff_notebook::NotebookError; use ruff_python_ast::PySourceType; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_parser::ParseError; +use ruff_python_parser::{ParseError, ParseOptions}; use ruff_python_trivia::textwrap::dedent; use ruff_source_file::SourceFileBuilder; use ruff_text_size::Ranged; @@ -110,7 +110,11 @@ pub(crate) fn test_contents<'a>( settings: &LinterSettings, ) -> (Vec, Cow<'a, SourceKind>) { let source_type = PySourceType::from(path); - let parsed = ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type); + let target_version = settings.resolve_target_version(path); + let options = ParseOptions::from(source_type).with_target_version(target_version); + let parsed = ruff_python_parser::parse_unchecked(source_kind.source_code(), options.clone()) + .try_into_module() + .expect("PySourceType always parses into a module"); let locator = Locator::new(source_kind.source_code()); let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents()); let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents()); @@ -134,6 +138,7 @@ pub(crate) fn test_contents<'a>( source_kind, source_type, &parsed, + target_version, ); let source_has_errors = !parsed.is_valid(); @@ -174,7 +179,9 @@ pub(crate) fn test_contents<'a>( transformed = Cow::Owned(transformed.updated(fixed_contents, &source_map)); let parsed = - ruff_python_parser::parse_unchecked_source(transformed.source_code(), source_type); + ruff_python_parser::parse_unchecked(transformed.source_code(), options.clone()) + .try_into_module() + .expect("PySourceType always parses into a module"); let locator = Locator::new(transformed.source_code()); let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents()); let indexer = Indexer::from_tokens(parsed.tokens(), locator.contents()); @@ -197,6 +204,7 @@ pub(crate) fn test_contents<'a>( &transformed, source_type, &parsed, + target_version, ); if !parsed.is_valid() && !source_has_errors { diff --git a/crates/ruff_notebook/src/cell.rs b/crates/ruff_notebook/src/cell.rs index c917dd16274b8..949f55726a1f6 100644 --- a/crates/ruff_notebook/src/cell.rs +++ b/crates/ruff_notebook/src/cell.rs @@ -66,7 +66,7 @@ impl Cell { .metadata .vscode .as_ref() - .map_or(true, |vscode| vscode.language_id == "python") => + .is_none_or(|vscode| vscode.language_id == "python") => { &cell.source } diff --git a/crates/ruff_python_ast/src/nodes.rs b/crates/ruff_python_ast/src/nodes.rs index 7463fe5068c62..e48349997c99b 100644 --- a/crates/ruff_python_ast/src/nodes.rs +++ b/crates/ruff_python_ast/src/nodes.rs @@ -2800,7 +2800,7 @@ impl Pattern { pub fn is_wildcard(&self) -> bool { match self { Pattern::MatchAs(PatternMatchAs { pattern, .. }) => { - pattern.as_deref().map_or(true, Pattern::is_wildcard) + pattern.as_deref().is_none_or(Pattern::is_wildcard) } Pattern::MatchOr(PatternMatchOr { patterns, .. }) => { patterns.iter().all(Pattern::is_wildcard) diff --git a/crates/ruff_python_ast/src/script.rs b/crates/ruff_python_ast/src/script.rs index f6b592a7b9077..287769d338f5a 100644 --- a/crates/ruff_python_ast/src/script.rs +++ b/crates/ruff_python_ast/src/script.rs @@ -67,7 +67,7 @@ impl ScriptTag { let mut lines = contents.lines(); // Ensure that the first line is exactly `# /// script`. - if !lines.next().is_some_and(|line| line == "# /// script") { + if lines.next().is_none_or(|line| line != "# /// script") { return None; } diff --git a/crates/ruff_python_formatter/src/comments/visitor.rs b/crates/ruff_python_formatter/src/comments/visitor.rs index 52bd5d2009784..0633ffd07015e 100644 --- a/crates/ruff_python_formatter/src/comments/visitor.rs +++ b/crates/ruff_python_formatter/src/comments/visitor.rs @@ -65,7 +65,7 @@ impl<'a, 'builder> CommentsVisitor<'a, 'builder> { fn can_skip(&mut self, node_end: TextSize) -> bool { self.comment_ranges .peek() - .map_or(true, |next_comment| next_comment.start() >= node_end) + .is_none_or(|next_comment| next_comment.start() >= node_end) } } diff --git a/crates/ruff_python_formatter/src/expression/expr_slice.rs b/crates/ruff_python_formatter/src/expression/expr_slice.rs index 29358ab7041d9..6f5f844342b7d 100644 --- a/crates/ruff_python_formatter/src/expression/expr_slice.rs +++ b/crates/ruff_python_formatter/src/expression/expr_slice.rs @@ -60,9 +60,9 @@ impl FormatNodeRule for FormatExprSlice { // Handle spacing around the colon(s) // https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#slices - let lower_simple = lower.as_ref().map_or(true, |expr| is_simple_expr(expr)); - let upper_simple = upper.as_ref().map_or(true, |expr| is_simple_expr(expr)); - let step_simple = step.as_ref().map_or(true, |expr| is_simple_expr(expr)); + let lower_simple = lower.as_ref().is_none_or(|expr| is_simple_expr(expr)); + let upper_simple = upper.as_ref().is_none_or(|expr| is_simple_expr(expr)); + let step_simple = step.as_ref().is_none_or(|expr| is_simple_expr(expr)); let all_simple = lower_simple && upper_simple && step_simple; // lower diff --git a/crates/ruff_python_formatter/src/other/parameter_with_default.rs b/crates/ruff_python_formatter/src/other/parameter_with_default.rs index 74164fb5d9266..a4d09671dfb31 100644 --- a/crates/ruff_python_formatter/src/other/parameter_with_default.rs +++ b/crates/ruff_python_formatter/src/other/parameter_with_default.rs @@ -49,8 +49,7 @@ impl FormatNodeRule for FormatParameterWithDefault { debug_assert!(equals.is_some_and(|token| token.kind == SimpleTokenKind::Equals)); let lparens = tokenizer.next(); debug_assert!(lparens - .as_ref() - .map_or(true, |token| token.kind == SimpleTokenKind::LParen)); + .as_ref().is_none_or(|token| token.kind == SimpleTokenKind::LParen)); lparens.is_none() }); let needs_line_break = needs_line_break_trailing || needs_line_break_leading; diff --git a/crates/ruff_python_parser/src/error.rs b/crates/ruff_python_parser/src/error.rs index 98efdf52e2e48..0c5435e91f24e 100644 --- a/crates/ruff_python_parser/src/error.rs +++ b/crates/ruff_python_parser/src/error.rs @@ -1,5 +1,6 @@ -use std::fmt; +use std::fmt::{self, Display}; +use ruff_python_ast::PythonVersion; use ruff_text_size::TextRange; use crate::TokenKind; @@ -426,6 +427,50 @@ impl std::fmt::Display for LexicalErrorType { } } +/// Represents a version-related syntax error detected during parsing. +/// +/// An example of a version-related error is the use of a `match` statement before Python 3.10, when +/// it was first introduced. See [`UnsupportedSyntaxErrorKind`] for other kinds of errors. +#[derive(Debug, PartialEq, Clone)] +pub struct UnsupportedSyntaxError { + pub kind: UnsupportedSyntaxErrorKind, + pub range: TextRange, + /// The target [`PythonVersion`] for which this error was detected. + /// + /// This is different from the version reported by the + /// [`minimum_version`](UnsupportedSyntaxError::minimum_version) method, which is the earliest + /// allowed version for this piece of syntax. The `target_version` is primarily used for + /// user-facing error messages. + pub target_version: PythonVersion, +} + +impl UnsupportedSyntaxError { + /// The earliest allowed version for the syntax associated with this error. + pub const fn minimum_version(&self) -> PythonVersion { + match self.kind { + UnsupportedSyntaxErrorKind::MatchBeforePy310 => PythonVersion::PY310, + } + } +} + +impl Display for UnsupportedSyntaxError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.kind { + UnsupportedSyntaxErrorKind::MatchBeforePy310 => write!( + f, + "Cannot use `match` statement on Python {} (syntax was added in Python {})", + self.target_version, + self.minimum_version(), + ), + } + } +} + +#[derive(Debug, PartialEq, Clone, Copy)] +pub enum UnsupportedSyntaxErrorKind { + MatchBeforePy310, +} + #[cfg(target_pointer_width = "64")] mod sizes { use crate::error::{LexicalError, LexicalErrorType}; diff --git a/crates/ruff_python_parser/src/lexer.rs b/crates/ruff_python_parser/src/lexer.rs index 0bd8472daf8d2..f03630a92435b 100644 --- a/crates/ruff_python_parser/src/lexer.rs +++ b/crates/ruff_python_parser/src/lexer.rs @@ -1256,7 +1256,7 @@ impl<'src> Lexer<'src> { // `IpyEscapeKind::Magic` and `IpyEscapeKind::Help` because of the initial `%` and `??` // tokens. if question_count > 2 - || value.chars().last().map_or(true, is_python_whitespace) + || value.chars().last().is_none_or(is_python_whitespace) || !matches!(self.cursor.first(), '\n' | '\r' | EOF_CHAR) { // Not a help end escape command, so continue with the lexing. diff --git a/crates/ruff_python_parser/src/lib.rs b/crates/ruff_python_parser/src/lib.rs index 61db67ddad4f4..c2c21adec8b60 100644 --- a/crates/ruff_python_parser/src/lib.rs +++ b/crates/ruff_python_parser/src/lib.rs @@ -67,7 +67,10 @@ use std::iter::FusedIterator; use std::ops::Deref; -pub use crate::error::{FStringErrorType, LexicalErrorType, ParseError, ParseErrorType}; +pub use crate::error::{ + FStringErrorType, LexicalErrorType, ParseError, ParseErrorType, UnsupportedSyntaxError, + UnsupportedSyntaxErrorKind, +}; pub use crate::parser::ParseOptions; pub use crate::token::{Token, TokenKind}; @@ -305,6 +308,7 @@ pub struct Parsed { syntax: T, tokens: Tokens, errors: Vec, + unsupported_syntax_errors: Vec, } impl Parsed { @@ -323,6 +327,11 @@ impl Parsed { &self.errors } + /// Returns a list of version-related syntax errors found during parsing. + pub fn unsupported_syntax_errors(&self) -> &[UnsupportedSyntaxError] { + &self.unsupported_syntax_errors + } + /// Consumes the [`Parsed`] output and returns the contained syntax node. pub fn into_syntax(self) -> T { self.syntax @@ -334,12 +343,18 @@ impl Parsed { } /// Returns `true` if the parsed source code is valid i.e., it has no syntax errors. + /// + /// Note that this does not include version-related + /// [`unsupported_syntax_errors`](Parsed::unsupported_syntax_errors). pub fn is_valid(&self) -> bool { self.errors.is_empty() } /// Returns the [`Parsed`] output as a [`Result`], returning [`Ok`] if it has no syntax errors, /// or [`Err`] containing the first [`ParseError`] encountered. + /// + /// Note that any [`unsupported_syntax_errors`](Parsed::unsupported_syntax_errors) will not + /// cause [`Err`] to be returned. pub fn as_result(&self) -> Result<&Parsed, &[ParseError]> { if self.is_valid() { Ok(self) @@ -350,6 +365,9 @@ impl Parsed { /// Consumes the [`Parsed`] output and returns a [`Result`] which is [`Ok`] if it has no syntax /// errors, or [`Err`] containing the first [`ParseError`] encountered. + /// + /// Note that any [`unsupported_syntax_errors`](Parsed::unsupported_syntax_errors) will not + /// cause [`Err`] to be returned. pub(crate) fn into_result(self) -> Result, ParseError> { if self.is_valid() { Ok(self) @@ -373,6 +391,7 @@ impl Parsed { syntax: module, tokens: self.tokens, errors: self.errors, + unsupported_syntax_errors: self.unsupported_syntax_errors, }), Mod::Expression(_) => None, } @@ -392,6 +411,7 @@ impl Parsed { syntax: expression, tokens: self.tokens, errors: self.errors, + unsupported_syntax_errors: self.unsupported_syntax_errors, }), } } diff --git a/crates/ruff_python_parser/src/parser/mod.rs b/crates/ruff_python_parser/src/parser/mod.rs index 951727667f0ce..e7382f6e0b109 100644 --- a/crates/ruff_python_parser/src/parser/mod.rs +++ b/crates/ruff_python_parser/src/parser/mod.rs @@ -5,6 +5,7 @@ use bitflags::bitflags; use ruff_python_ast::{Mod, ModExpression, ModModule}; use ruff_text_size::{Ranged, TextRange, TextSize}; +use crate::error::UnsupportedSyntaxError; use crate::parser::expression::ExpressionContext; use crate::parser::progress::{ParserProgress, TokenId}; use crate::token::TokenValue; @@ -35,6 +36,9 @@ pub(crate) struct Parser<'src> { /// Stores all the syntax errors found during the parsing. errors: Vec, + /// Stores non-fatal syntax errors found during parsing, such as version-related errors. + unsupported_syntax_errors: Vec, + /// Options for how the code will be parsed. options: ParseOptions, @@ -70,6 +74,7 @@ impl<'src> Parser<'src> { options, source, errors: Vec::new(), + unsupported_syntax_errors: Vec::new(), tokens, recovery_context: RecoveryContext::empty(), prev_token_end: TextSize::new(0), @@ -166,6 +171,7 @@ impl<'src> Parser<'src> { syntax, tokens: Tokens::new(tokens), errors: parse_errors, + unsupported_syntax_errors: self.unsupported_syntax_errors, }; } @@ -197,6 +203,7 @@ impl<'src> Parser<'src> { syntax, tokens: Tokens::new(tokens), errors: merged, + unsupported_syntax_errors: self.unsupported_syntax_errors, } } @@ -658,6 +665,7 @@ impl<'src> Parser<'src> { ParserCheckpoint { tokens: self.tokens.checkpoint(), errors_position: self.errors.len(), + unsupported_syntax_errors_position: self.unsupported_syntax_errors.len(), current_token_id: self.current_token_id, prev_token_end: self.prev_token_end, recovery_context: self.recovery_context, @@ -669,6 +677,7 @@ impl<'src> Parser<'src> { let ParserCheckpoint { tokens, errors_position, + unsupported_syntax_errors_position, current_token_id, prev_token_end, recovery_context, @@ -676,6 +685,8 @@ impl<'src> Parser<'src> { self.tokens.rewind(tokens); self.errors.truncate(errors_position); + self.unsupported_syntax_errors + .truncate(unsupported_syntax_errors_position); self.current_token_id = current_token_id; self.prev_token_end = prev_token_end; self.recovery_context = recovery_context; @@ -685,6 +696,7 @@ impl<'src> Parser<'src> { struct ParserCheckpoint { tokens: TokenSourceCheckpoint, errors_position: usize, + unsupported_syntax_errors_position: usize, current_token_id: TokenId, prev_token_end: TextSize, recovery_context: RecoveryContext, diff --git a/crates/ruff_python_parser/src/parser/options.rs b/crates/ruff_python_parser/src/parser/options.rs index 27a87a32ba4fb..6258216d8860a 100644 --- a/crates/ruff_python_parser/src/parser/options.rs +++ b/crates/ruff_python_parser/src/parser/options.rs @@ -1,4 +1,4 @@ -use ruff_python_ast::PySourceType; +use ruff_python_ast::{PySourceType, PythonVersion}; use crate::{AsMode, Mode}; @@ -20,15 +20,28 @@ use crate::{AsMode, Mode}; /// /// let options = ParseOptions::from(PySourceType::Python); /// ``` -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct ParseOptions { /// Specify the mode in which the code will be parsed. pub(crate) mode: Mode, + /// Target version for detecting version-related syntax errors. + pub(crate) target_version: PythonVersion, +} + +impl ParseOptions { + #[must_use] + pub fn with_target_version(mut self, target_version: PythonVersion) -> Self { + self.target_version = target_version; + self + } } impl From for ParseOptions { fn from(mode: Mode) -> Self { - Self { mode } + Self { + mode, + target_version: PythonVersion::default(), + } } } @@ -36,6 +49,7 @@ impl From for ParseOptions { fn from(source_type: PySourceType) -> Self { Self { mode: source_type.as_mode(), + target_version: PythonVersion::default(), } } } diff --git a/crates/ruff_python_parser/src/parser/statement.rs b/crates/ruff_python_parser/src/parser/statement.rs index e76fc08915e1f..5e5bee4ce252e 100644 --- a/crates/ruff_python_parser/src/parser/statement.rs +++ b/crates/ruff_python_parser/src/parser/statement.rs @@ -5,7 +5,8 @@ use rustc_hash::{FxBuildHasher, FxHashSet}; use ruff_python_ast::name::Name; use ruff_python_ast::{ - self as ast, ExceptHandler, Expr, ExprContext, IpyEscapeKind, Operator, Stmt, WithItem, + self as ast, ExceptHandler, Expr, ExprContext, IpyEscapeKind, Operator, PythonVersion, Stmt, + WithItem, }; use ruff_text_size::{Ranged, TextSize}; @@ -16,7 +17,7 @@ use crate::parser::{ }; use crate::token::{TokenKind, TokenValue}; use crate::token_set::TokenSet; -use crate::{Mode, ParseErrorType}; +use crate::{Mode, ParseErrorType, UnsupportedSyntaxError, UnsupportedSyntaxErrorKind}; use super::expression::ExpressionContext; use super::Parenthesized; @@ -2257,11 +2258,21 @@ impl<'src> Parser<'src> { let start = self.node_start(); self.bump(TokenKind::Match); + let match_range = self.node_range(start); + let subject = self.parse_match_subject_expression(); self.expect(TokenKind::Colon); let cases = self.parse_match_body(); + if self.options.target_version < PythonVersion::PY310 { + self.unsupported_syntax_errors.push(UnsupportedSyntaxError { + kind: UnsupportedSyntaxErrorKind::MatchBeforePy310, + range: match_range, + target_version: self.options.target_version, + }); + } + ast::StmtMatch { subject: Box::new(subject), cases, diff --git a/crates/ruff_python_resolver/src/implicit_imports.rs b/crates/ruff_python_resolver/src/implicit_imports.rs index 693b6572ca6a1..afa81d27ba8ef 100644 --- a/crates/ruff_python_resolver/src/implicit_imports.rs +++ b/crates/ruff_python_resolver/src/implicit_imports.rs @@ -55,7 +55,7 @@ impl ImplicitImports { // Always prefer stub files over non-stub files. if submodules .get(name) - .map_or(true, |implicit_import| !implicit_import.is_stub_file) + .is_none_or(|implicit_import| !implicit_import.is_stub_file) { submodules.insert( name.to_string(), diff --git a/crates/ruff_python_semantic/src/model.rs b/crates/ruff_python_semantic/src/model.rs index 7805db18c4175..62145f4234da5 100644 --- a/crates/ruff_python_semantic/src/model.rs +++ b/crates/ruff_python_semantic/src/model.rs @@ -344,14 +344,14 @@ impl<'a> SemanticModel<'a> { pub fn is_available_in_scope(&self, member: &str, scope_id: ScopeId) -> bool { self.lookup_symbol_in_scope(member, scope_id, false) .map(|binding_id| &self.bindings[binding_id]) - .map_or(true, |binding| binding.kind.is_builtin()) + .is_none_or(|binding| binding.kind.is_builtin()) } /// Resolve a `del` reference to `symbol` at `range`. pub fn resolve_del(&mut self, symbol: &str, range: TextRange) { let is_unbound = self.scopes[self.scope_id] .get(symbol) - .map_or(true, |binding_id| { + .is_none_or(|binding_id| { // Treat the deletion of a name as a reference to that name. self.add_local_reference(binding_id, ExprContext::Del, range); self.bindings[binding_id].is_unbound() @@ -1508,7 +1508,7 @@ impl<'a> SemanticModel<'a> { if self .global_scope() .get(name) - .map_or(true, |binding_id| self.bindings[binding_id].is_unbound()) + .is_none_or(|binding_id| self.bindings[binding_id].is_unbound()) { let id = self.bindings.push(Binding { kind: BindingKind::Assignment, diff --git a/crates/ruff_server/Cargo.toml b/crates/ruff_server/Cargo.toml index c9678e1a12b32..a3d0b7cee5621 100644 --- a/crates/ruff_server/Cargo.toml +++ b/crates/ruff_server/Cargo.toml @@ -38,8 +38,9 @@ serde = { workspace = true } serde_json = { workspace = true } shellexpand = { workspace = true } thiserror = { workspace = true } +toml = { workspace = true } tracing = { workspace = true } -tracing-subscriber = { workspace = true } +tracing-subscriber = { workspace = true, features = ["chrono"] } [dev-dependencies] insta = { workspace = true } diff --git a/crates/ruff_server/resources/test/fixtures/settings/inline_configuration.json b/crates/ruff_server/resources/test/fixtures/settings/inline_configuration.json new file mode 100644 index 0000000000000..50460eb0b350b --- /dev/null +++ b/crates/ruff_server/resources/test/fixtures/settings/inline_configuration.json @@ -0,0 +1,16 @@ +{ + "settings": { + "configuration": { + "line-length": 100, + "lint": { + "extend-select": ["I001"] + }, + "format": { + "quote-style": "single" + } + }, + "lint": { + "extendSelect": ["RUF001"] + } + } +} diff --git a/crates/ruff_server/src/lint.rs b/crates/ruff_server/src/lint.rs index 9b9e8a06d2e0d..53cff99ad3286 100644 --- a/crates/ruff_server/src/lint.rs +++ b/crates/ruff_server/src/lint.rs @@ -24,7 +24,7 @@ use ruff_linter::{ use ruff_notebook::Notebook; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; -use ruff_python_parser::ParseError; +use ruff_python_parser::{ParseError, ParseOptions, UnsupportedSyntaxError}; use ruff_source_file::LineIndex; use ruff_text_size::{Ranged, TextRange}; @@ -94,8 +94,18 @@ pub(crate) fn check( let source_type = query.source_type(); + let target_version = if let Some(path) = &document_path { + settings.linter.resolve_target_version(path) + } else { + settings.linter.unresolved_target_version + }; + + let parse_options = ParseOptions::from(source_type).with_target_version(target_version); + // Parse once. - let parsed = ruff_python_parser::parse_unchecked_source(source_kind.source_code(), source_type); + let parsed = ruff_python_parser::parse_unchecked(source_kind.source_code(), parse_options) + .try_into_module() + .expect("PySourceType always parses to a ModModule"); // Map row and column locations to byte slices (lazily). let locator = Locator::new(source_kind.source_code()); @@ -122,6 +132,7 @@ pub(crate) fn check( &source_kind, source_type, &parsed, + target_version, ); let noqa_edits = generate_noqa_edits( @@ -164,14 +175,25 @@ pub(crate) fn check( let lsp_diagnostics = lsp_diagnostics.chain( show_syntax_errors .then(|| { - parsed.errors().iter().map(|parse_error| { - parse_error_to_lsp_diagnostic( - parse_error, - &source_kind, - locator.to_index(), - encoding, - ) - }) + parsed + .errors() + .iter() + .map(|parse_error| { + parse_error_to_lsp_diagnostic( + parse_error, + &source_kind, + locator.to_index(), + encoding, + ) + }) + .chain(parsed.unsupported_syntax_errors().iter().map(|error| { + unsupported_syntax_error_to_lsp_diagnostic( + error, + &source_kind, + locator.to_index(), + encoding, + ) + })) }) .into_iter() .flatten(), @@ -350,6 +372,45 @@ fn parse_error_to_lsp_diagnostic( ) } +fn unsupported_syntax_error_to_lsp_diagnostic( + unsupported_syntax_error: &UnsupportedSyntaxError, + source_kind: &SourceKind, + index: &LineIndex, + encoding: PositionEncoding, +) -> (usize, lsp_types::Diagnostic) { + let range: lsp_types::Range; + let cell: usize; + + if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) { + NotebookRange { cell, range } = unsupported_syntax_error.range.to_notebook_range( + source_kind.source_code(), + index, + notebook_index, + encoding, + ); + } else { + cell = usize::default(); + range = unsupported_syntax_error + .range + .to_range(source_kind.source_code(), index, encoding); + } + + ( + cell, + lsp_types::Diagnostic { + range, + severity: Some(lsp_types::DiagnosticSeverity::ERROR), + tags: None, + code: None, + code_description: None, + source: Some(DIAGNOSTIC_NAME.into()), + message: format!("SyntaxError: {unsupported_syntax_error}"), + related_information: None, + data: None, + }, + ) +} + fn diagnostic_edit_range( range: TextRange, source_kind: &SourceKind, diff --git a/crates/ruff_server/src/logging.rs b/crates/ruff_server/src/logging.rs index 1b23bff3bddf7..d4747cadae930 100644 --- a/crates/ruff_server/src/logging.rs +++ b/crates/ruff_server/src/logging.rs @@ -9,7 +9,7 @@ use serde::Deserialize; use std::{path::PathBuf, str::FromStr, sync::Arc}; use tracing::level_filters::LevelFilter; use tracing_subscriber::{ - fmt::{format::FmtSpan, time::Uptime, writer::BoxMakeWriter}, + fmt::{format::FmtSpan, time::ChronoLocal, writer::BoxMakeWriter}, layer::SubscriberExt, Layer, }; @@ -49,10 +49,13 @@ pub(crate) fn init_logging(log_level: LogLevel, log_file: Option<&std::path::Pat Some(file) => BoxMakeWriter::new(Arc::new(file)), None => BoxMakeWriter::new(std::io::stderr), }; + + let is_trace_level = log_level == LogLevel::Trace; let subscriber = tracing_subscriber::Registry::default().with( tracing_subscriber::fmt::layer() - .with_timer(Uptime::default()) - .with_thread_names(true) + .with_timer(ChronoLocal::new("%Y-%m-%d %H:%M:%S.%f".to_string())) + .with_thread_names(is_trace_level) + .with_target(is_trace_level) .with_ansi(false) .with_writer(logger) .with_span_events(FmtSpan::ENTER) diff --git a/crates/ruff_server/src/session/index/ruff_settings.rs b/crates/ruff_server/src/session/index/ruff_settings.rs index a9962c5fa7926..92c5d74b3e0ce 100644 --- a/crates/ruff_server/src/session/index/ruff_settings.rs +++ b/crates/ruff_server/src/session/index/ruff_settings.rs @@ -18,7 +18,9 @@ use ruff_workspace::{ resolver::{ConfigurationTransformer, Relativity}, }; -use crate::session::settings::{ConfigurationPreference, ResolvedEditorSettings}; +use crate::session::settings::{ + ConfigurationPreference, ResolvedConfiguration, ResolvedEditorSettings, +}; #[derive(Debug)] pub struct RuffSettings { @@ -363,21 +365,39 @@ impl ConfigurationTransformer for EditorConfigurationTransformer<'_> { ..Configuration::default() }; - // Merge in the editor-specified configuration file, if it exists. - let editor_configuration = if let Some(config_file_path) = configuration { - tracing::debug!( - "Combining settings from editor-specified configuration file at: {}", - config_file_path.display() - ); - match open_configuration_file(&config_file_path) { - Ok(config_from_file) => editor_configuration.combine(config_from_file), - err => { - tracing::error!( - "{:?}", - err.context("Unable to load editor-specified configuration file") - .unwrap_err() + // Merge in the editor-specified configuration. + let editor_configuration = if let Some(configuration) = configuration { + match configuration { + ResolvedConfiguration::FilePath(path) => { + tracing::debug!( + "Combining settings from editor-specified configuration file at: {}", + path.display() + ); + match open_configuration_file(&path) { + Ok(config_from_file) => editor_configuration.combine(config_from_file), + err => { + tracing::error!( + "{:?}", + err.context("Unable to load editor-specified configuration file") + .unwrap_err() + ); + editor_configuration + } + } + } + ResolvedConfiguration::Inline(options) => { + tracing::debug!( + "Combining settings from editor-specified inline configuration" ); - editor_configuration + match Configuration::from_options(options, None, project_root) { + Ok(configuration) => editor_configuration.combine(configuration), + Err(err) => { + tracing::error!( + "Unable to load editor-specified inline configuration: {err:?}", + ); + editor_configuration + } + } } } } else { @@ -411,3 +431,47 @@ impl ConfigurationTransformer for IdentityTransformer { config } } + +#[cfg(test)] +mod tests { + use ruff_linter::line_width::LineLength; + use ruff_workspace::options::Options; + + use super::*; + + /// This test ensures that the inline configuration is correctly applied to the configuration. + #[test] + fn inline_settings() { + let editor_settings = ResolvedEditorSettings { + configuration: Some(ResolvedConfiguration::Inline(Options { + line_length: Some(LineLength::try_from(120).unwrap()), + ..Default::default() + })), + ..Default::default() + }; + + let config = EditorConfigurationTransformer(&editor_settings, Path::new("/src/project")) + .transform(Configuration::default()); + + assert_eq!(config.line_length.unwrap().value(), 120); + } + + /// This test ensures that between the inline configuration and specific settings, the specific + /// settings is prioritized. + #[test] + fn inline_and_specific_settings_resolution_order() { + let editor_settings = ResolvedEditorSettings { + configuration: Some(ResolvedConfiguration::Inline(Options { + line_length: Some(LineLength::try_from(120).unwrap()), + ..Default::default() + })), + line_length: Some(LineLength::try_from(100).unwrap()), + ..Default::default() + }; + + let config = EditorConfigurationTransformer(&editor_settings, Path::new("/src/project")) + .transform(Configuration::default()); + + assert_eq!(config.line_length.unwrap().value(), 100); + } +} diff --git a/crates/ruff_server/src/session/settings.rs b/crates/ruff_server/src/session/settings.rs index 50bbd413077a8..f8fba08c4ef80 100644 --- a/crates/ruff_server/src/session/settings.rs +++ b/crates/ruff_server/src/session/settings.rs @@ -3,8 +3,11 @@ use std::{ops::Deref, path::PathBuf, str::FromStr}; use lsp_types::Url; use rustc_hash::FxHashMap; use serde::Deserialize; +use serde_json::{Map, Value}; +use thiserror::Error; use ruff_linter::{line_width::LineLength, RuleSelector}; +use ruff_workspace::options::Options; /// Maps a workspace URI to its associated client settings. Used during server initialization. pub(crate) type WorkspaceSettingsMap = FxHashMap; @@ -29,9 +32,9 @@ pub(crate) struct ResolvedClientSettings { /// LSP client settings. These fields are optional because we don't want to override file-based linter/formatting settings /// if these were un-set. #[derive(Clone, Debug)] -#[cfg_attr(test, derive(PartialEq, Eq))] +#[cfg_attr(test, derive(Default, PartialEq, Eq))] pub(crate) struct ResolvedEditorSettings { - pub(super) configuration: Option, + pub(super) configuration: Option, pub(super) lint_preview: Option, pub(super) format_preview: Option, pub(super) select: Option>, @@ -42,6 +45,48 @@ pub(crate) struct ResolvedEditorSettings { pub(super) configuration_preference: ConfigurationPreference, } +/// The resolved configuration from the client settings. +#[derive(Clone, Debug)] +#[cfg_attr(test, derive(PartialEq, Eq))] +pub(crate) enum ResolvedConfiguration { + FilePath(PathBuf), + Inline(Options), +} + +impl TryFrom<&ClientConfiguration> for ResolvedConfiguration { + type Error = ResolvedConfigurationError; + + fn try_from(value: &ClientConfiguration) -> Result { + match value { + ClientConfiguration::String(path) => Ok(ResolvedConfiguration::FilePath( + PathBuf::from(shellexpand::full(path)?.as_ref()), + )), + ClientConfiguration::Object(map) => { + let options = toml::Table::try_from(map)?.try_into::()?; + if options.extend.is_some() { + Err(ResolvedConfigurationError::ExtendNotSupported) + } else { + Ok(ResolvedConfiguration::Inline(options)) + } + } + } + } +} + +/// An error that can occur when trying to resolve the `configuration` value from the client +/// settings. +#[derive(Debug, Error)] +pub(crate) enum ResolvedConfigurationError { + #[error(transparent)] + EnvVarLookupError(#[from] shellexpand::LookupError), + #[error("error serializing configuration to TOML: {0}")] + InvalidToml(#[from] toml::ser::Error), + #[error(transparent)] + InvalidRuffSchema(#[from] toml::de::Error), + #[error("using `extend` is unsupported for inline configuration")] + ExtendNotSupported, +} + /// Determines how multiple conflicting configurations should be resolved - in this /// case, the configuration from the client settings and configuration from local /// `.toml` files (aka 'workspace' configuration). @@ -57,12 +102,23 @@ pub(crate) enum ConfigurationPreference { EditorOnly, } +/// A direct representation of of `configuration` schema within the client settings. +#[derive(Debug, Deserialize)] +#[cfg_attr(test, derive(PartialEq, Eq))] +#[serde(untagged)] +enum ClientConfiguration { + /// A path to a configuration file. + String(String), + /// An object containing the configuration options. + Object(Map), +} + /// This is a direct representation of the settings schema sent by the client. #[derive(Debug, Deserialize, Default)] #[cfg_attr(test, derive(PartialEq, Eq))] #[serde(rename_all = "camelCase")] pub struct ClientSettings { - configuration: Option, + configuration: Option, fix_all: Option, organize_imports: Option, lint: Option, @@ -306,11 +362,17 @@ impl ResolvedClientSettings { ), editor_settings: ResolvedEditorSettings { configuration: Self::resolve_optional(all_settings, |settings| { - settings - .configuration - .as_ref() - .and_then(|config_path| shellexpand::full(config_path).ok()) - .map(|config_path| PathBuf::from(config_path.as_ref())) + settings.configuration.as_ref().and_then(|configuration| { + match ResolvedConfiguration::try_from(configuration) { + Ok(configuration) => Some(configuration), + Err(err) => { + tracing::error!( + "Failed to load settings from `configuration`: {err}" + ); + None + } + } + }) }), lint_preview: Self::resolve_optional(all_settings, |settings| { settings.lint.as_ref()?.preview @@ -425,6 +487,10 @@ impl Default for InitializationOptions { #[cfg(test)] mod tests { use insta::assert_debug_snapshot; + use ruff_python_formatter::QuoteStyle; + use ruff_workspace::options::{ + FormatOptions as RuffFormatOptions, LintCommonOptions, LintOptions, + }; use serde::de::DeserializeOwned; #[cfg(not(windows))] @@ -445,6 +511,9 @@ mod tests { const EMPTY_MULTIPLE_WORKSPACE_INIT_OPTIONS_FIXTURE: &str = include_str!("../../resources/test/fixtures/settings/empty_multiple_workspace.json"); + const INLINE_CONFIGURATION_FIXTURE: &str = + include_str!("../../resources/test/fixtures/settings/inline_configuration.json"); + fn deserialize_fixture(content: &str) -> T { serde_json::from_str(content).expect("test fixture JSON should deserialize") } @@ -855,4 +924,48 @@ mod tests { all_settings.set_preview(true); assert_preview_all_settings(&all_settings, true); } + + #[test] + fn inline_configuration() { + let options: InitializationOptions = deserialize_fixture(INLINE_CONFIGURATION_FIXTURE); + + let AllSettings { + global_settings, + workspace_settings: None, + } = AllSettings::from_init_options(options) + else { + panic!("Expected global settings only"); + }; + + assert_eq!( + ResolvedClientSettings::global(&global_settings), + ResolvedClientSettings { + fix_all: true, + organize_imports: true, + lint_enable: true, + disable_rule_comment_enable: true, + fix_violation_enable: true, + show_syntax_errors: true, + editor_settings: ResolvedEditorSettings { + configuration: Some(ResolvedConfiguration::Inline(Options { + line_length: Some(LineLength::try_from(100).unwrap()), + lint: Some(LintOptions { + common: LintCommonOptions { + extend_select: Some(vec![RuleSelector::from_str("I001").unwrap()]), + ..Default::default() + }, + ..Default::default() + }), + format: Some(RuffFormatOptions { + quote_style: Some(QuoteStyle::Single), + ..Default::default() + }), + ..Default::default() + })), + extend_select: Some(vec![RuleSelector::from_str("RUF001").unwrap()]), + ..Default::default() + } + } + ); + } } diff --git a/crates/ruff_wasm/src/lib.rs b/crates/ruff_wasm/src/lib.rs index 52b25b379425e..0006abf7faa7b 100644 --- a/crates/ruff_wasm/src/lib.rs +++ b/crates/ruff_wasm/src/lib.rs @@ -196,6 +196,7 @@ impl Workspace { &source_kind, source_type, &parsed, + self.settings.linter.unresolved_target_version, ); let source_code = locator.to_source_code(); diff --git a/crates/ruff_workspace/src/resolver.rs b/crates/ruff_workspace/src/resolver.rs index f550f810f6ac1..7c51eb9326c7c 100644 --- a/crates/ruff_workspace/src/resolver.rs +++ b/crates/ruff_workspace/src/resolver.rs @@ -588,7 +588,7 @@ impl ParallelVisitor for PythonFilesVisitor<'_, '_> { match result { Ok(entry) => { // Ignore directories - let resolved = if entry.file_type().map_or(true, |ft| ft.is_dir()) { + let resolved = if entry.file_type().is_none_or(|ft| ft.is_dir()) { None } else if entry.depth() == 0 { // Accept all files that are passed-in directly. diff --git a/docs/editors/settings.md b/docs/editors/settings.md index e6ac9ed177fe7..1eb36af3a6d28 100644 --- a/docs/editors/settings.md +++ b/docs/editors/settings.md @@ -11,10 +11,39 @@ as per the editor. ### `configuration` -Path to a `ruff.toml` or `pyproject.toml` file to use for configuration. +The `configuration` setting allows you to configure editor-specific Ruff behavior. This can be done +in one of the following ways: -By default, Ruff will discover configuration for each project from the filesystem, mirroring the -behavior of the Ruff CLI. +1. **Configuration file path:** Specify the path to a `ruff.toml` or `pyproject.toml` file that + contains the configuration. User home directory and environment variables will be expanded. +1. **Inline JSON configuration:** Directly provide the configuration as a JSON object. + +!!! note "Added in Ruff `0.9.8`" + + The **Inline JSON configuration** option was introduced in Ruff `0.9.8`. + +The default behavior, if `configuration` is unset, is to load the settings from the project's +configuration (a `ruff.toml` or `pyproject.toml` in the project's directory), consistent with when +running Ruff on the command-line. + +The [`configurationPreference`](#configurationpreference) setting controls the precedence if both an +editor-provided configuration (`configuration`) and a project level configuration file are present. + +#### Resolution order {: #configuration_resolution_order } + +In an editor, Ruff supports three sources of configuration, prioritized as follows (from highest to +lowest): + +1. **Specific settings:** Individual settings like [`lineLength`](#linelength) or + [`lint.select`](#select) defined in the editor +1. [**`ruff.configuration`**](#configuration): Settings provided via the + [`configuration`](#configuration) field (either a path to a configuration file or an inline + configuration object) +1. **Configuration file:** Settings defined in a `ruff.toml` or `pyproject.toml` file in the + project's directory (if present) + +For example, if the line length is specified in all three sources, Ruff will use the value from the +[`lineLength`](#linelength) setting. **Default value**: `null` @@ -22,6 +51,8 @@ behavior of the Ruff CLI. **Example usage**: +_Using configuration file path:_ + === "VS Code" ```json @@ -35,9 +66,7 @@ behavior of the Ruff CLI. ```lua require('lspconfig').ruff.setup { init_options = { - settings = { - configuration = "~/path/to/ruff.toml" - } + configuration = "~/path/to/ruff.toml" } } ``` @@ -58,6 +87,87 @@ behavior of the Ruff CLI. } ``` +_Using inline configuration:_ + +=== "VS Code" + + ```json + { + "ruff.configuration": { + "lint": { + "unfixable": ["F401"], + "extend-select": ["TID251"], + "flake8-tidy-imports": { + "banned-api": { + "typing.TypedDict": { + "msg": "Use `typing_extensions.TypedDict` instead", + } + } + } + }, + "format": { + "quote-style": "single" + } + } + } + ``` + +=== "Neovim" + + ```lua + require('lspconfig').ruff.setup { + init_options = { + configuration = { + lint = { + unfixable = {"F401"}, + ["extend-select"] = {"TID251"}, + ["flake8-tidy-imports"] = { + ["banned-api"] = { + ["typing.TypedDict"] = { + msg = "Use `typing_extensions.TypedDict` instead" + } + } + } + }, + format = { + ["quote-style"] = "single" + } + } + } + } + ``` + +=== "Zed" + + ```json + { + "lsp": { + "ruff": { + "initialization_options": { + "settings": { + "configuration": { + "lint": { + "unfixable": ["F401"], + "extend-select": ["TID251"], + "flake8-tidy-imports": { + "banned-api": { + "typing.TypedDict": { + "msg": "Use `typing_extensions.TypedDict` instead" + } + } + } + }, + "format": { + "quote-style": "single" + } + } + } + } + } + } + } + ``` + ### `configurationPreference` The strategy to use when resolving settings across VS Code and the filesystem. By default, editor @@ -594,9 +704,7 @@ Whether to enable linting. Set to `false` to use Ruff exclusively as a formatter "initialization_options": { "settings": { "lint": { - "enable" = { - "enable": false - } + "enable": false } } } diff --git a/docs/versioning.md b/docs/versioning.md index 3546d0a696cc4..2740cf2b6efc7 100644 --- a/docs/versioning.md +++ b/docs/versioning.md @@ -48,6 +48,15 @@ Ruff uses a custom versioning scheme that uses the **minor** version number for - A new server setting is added - A server setting is deprecated +## Minimum supported Rust version + +The minimum supported Rust version required to compile Ruff is listed in the `rust-version` key of +the `[workspace.package]` section in `Cargo.toml`. It may change in any release (minor or patch). It +will never be newer than N-2 Rust versions, where N is the latest stable version. For example, if +the latest stable Rust version is 1.85, Ruff's minimum supported Rust version will be at most 1.83. + +This is only relevant to users who build Ruff from source. Installing Ruff from the Python package +index usually installs a pre-built binary and does not require Rust compilation. ## Preview mode diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index ade652085f95c..3e27db0c5f67c 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -29,7 +29,7 @@ ruff_python_formatter = { path = "../crates/ruff_python_formatter" } ruff_text_size = { path = "../crates/ruff_text_size" } libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false } -salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "687251fb50b4893dc373a7e2609ceaefb8accbe7" } +salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "99be5d9917c3dd88e19735a82ef6bf39ba84bd7e" } similar = { version = "2.5.0" } tracing = { version = "0.1.40" } diff --git a/python/py-fuzzer/fuzz.py b/python/py-fuzzer/fuzz.py index c4713f4028c40..181b69770ba72 100644 --- a/python/py-fuzzer/fuzz.py +++ b/python/py-fuzzer/fuzz.py @@ -58,7 +58,16 @@ def redknot_contains_bug(code: str, *, red_knot_executable: Path) -> bool: def ruff_contains_bug(code: str, *, ruff_executable: Path) -> bool: """Return `True` if the code triggers a parser error.""" completed_process = subprocess.run( - [ruff_executable, "check", "--config", "lint.select=[]", "--no-cache", "-"], + [ + ruff_executable, + "check", + "--config", + "lint.select=[]", + "--no-cache", + "--target-version", + "py313", + "-", + ], capture_output=True, text=True, input=code,