From ada2084aec02d2485481383eb331cb1e26b89442 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Tue, 17 Dec 2024 09:36:23 +0100 Subject: [PATCH 01/27] initial commit --- .env | 2 +- Cargo.lock | 19 + Cargo.toml | 1 + crates/pg_analyse/Cargo.toml | 30 ++ crates/pg_analyse/src/categories.rs | 338 ++++++++++++++++++ crates/pg_analyse/src/context.rs | 89 +++++ crates/pg_analyse/src/diagnostics.rs | 149 ++++++++ crates/pg_analyse/src/filter.rs | 124 +++++++ crates/pg_analyse/src/lib.rs | 34 ++ crates/pg_analyse/src/matcher.rs | 135 +++++++ crates/pg_analyse/src/options.rs | 74 ++++ crates/pg_analyse/src/registry.rs | 208 +++++++++++ crates/pg_analyse/src/rule.rs | 302 ++++++++++++++++ crates/pg_analyse/src/signals.rs | 123 +++++++ crates/pg_lint/Cargo.toml | 3 + crates/pg_lint/src/rules/lint/safety.rs | 0 .../src/rules/lint/safety/ban_drop_table.rs | 0 17 files changed, 1630 insertions(+), 1 deletion(-) create mode 100644 crates/pg_analyse/Cargo.toml create mode 100644 crates/pg_analyse/src/categories.rs create mode 100644 crates/pg_analyse/src/context.rs create mode 100644 crates/pg_analyse/src/diagnostics.rs create mode 100644 crates/pg_analyse/src/filter.rs create mode 100644 crates/pg_analyse/src/lib.rs create mode 100644 crates/pg_analyse/src/matcher.rs create mode 100644 crates/pg_analyse/src/options.rs create mode 100644 crates/pg_analyse/src/registry.rs create mode 100644 crates/pg_analyse/src/rule.rs create mode 100644 crates/pg_analyse/src/signals.rs create mode 100644 crates/pg_lint/src/rules/lint/safety.rs create mode 100644 crates/pg_lint/src/rules/lint/safety/ban_drop_table.rs diff --git a/.env b/.env index dda717435..88b3b55c5 100644 --- a/.env +++ b/.env @@ -1 +1 @@ -DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:5432/postgres +DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres diff --git a/Cargo.lock b/Cargo.lock index 4b69accab..c934feece 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2216,6 +2216,22 @@ dependencies = [ "indexmap 2.7.0", ] +[[package]] +name = "pg_analyse" +version = "0.0.0" +dependencies = [ + "biome_deserialize", + "biome_deserialize_macros", + "enumflags2", + "pg_console", + "pg_diagnostics", + "pg_query_ext", + "pg_schema_cache", + "rustc-hash 2.1.0", + "serde", + "text-size", +] + [[package]] name = "pg_base_db" version = "0.0.0" @@ -2427,8 +2443,11 @@ dependencies = [ name = "pg_lint" version = "0.0.0" dependencies = [ + "enumflags2", "lazy_static", "pg_base_db", + "pg_console", + "pg_diagnostics", "pg_query_ext", "pg_syntax", "serde", diff --git a/Cargo.toml b/Cargo.toml index 0f4a4195c..bc6bff597 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ unicode-width = "0.1.12" # postgres specific crates pg_base_db = { path = "./crates/pg_base_db", version = "0.0.0" } +pg_analyse = { path = "./crates/pg_analyse", version = "0.0.0" } pg_cli = { path = "./crates/pg_cli", version = "0.0.0" } pg_commands = { path = "./crates/pg_commands", version = "0.0.0" } pg_completions = { path = "./crates/pg_completions", version = "0.0.0" } diff --git a/crates/pg_analyse/Cargo.toml b/crates/pg_analyse/Cargo.toml new file mode 100644 index 000000000..b165ff749 --- /dev/null +++ b/crates/pg_analyse/Cargo.toml @@ -0,0 +1,30 @@ + +[package] +authors.workspace = true +categories.workspace = true +description = "" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "pg_analyse" +repository.workspace = true +version = "0.0.0" + + +[dependencies] +pg_schema_cache.workspace = true +pg_diagnostics.workspace = true +pg_console.workspace = true +pg_query_ext.workspace = true +rustc-hash = { workspace = true } + +text-size.workspace = true +enumflags2.workspace = true +serde = { workspace = true, features = ["derive"], optional = true } +biome_deserialize = { workspace = true, optional = true } +biome_deserialize_macros = { workspace = true, optional = true } + +[features] +serde = ["dep:serde", "dep:biome_deserialize", "dep:biome_deserialize_macros"] + diff --git a/crates/pg_analyse/src/categories.rs b/crates/pg_analyse/src/categories.rs new file mode 100644 index 000000000..66c7f884d --- /dev/null +++ b/crates/pg_analyse/src/categories.rs @@ -0,0 +1,338 @@ +use enumflags2::{bitflags, BitFlags}; +use std::borrow::Cow; + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) +)] +pub enum RuleCategory { + /// This rule performs static analysis of the source code to detect + /// invalid or error-prone patterns, and emits diagnostics along with + /// proposed fixes + Lint, + /// This rule detects refactoring opportunities and emits code action + /// signals + Action, + /// This rule detects transformations that should be applied to the code + Transformation, +} + +/// Actions that suppress rules should start with this string +pub const SUPPRESSION_ACTION_CATEGORY: &str = "quickfix.suppressRule"; + +/// The category of a code action, this type maps directly to the +/// [CodeActionKind] type in the Language Server Protocol specification +/// +/// [CodeActionKind]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#codeActionKind +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) +)] +pub enum ActionCategory { + /// Base kind for quickfix actions: 'quickfix'. + /// + /// This action provides a fix to the diagnostic emitted by the same signal + QuickFix(Cow<'static, str>), + /// Base kind for refactoring actions: 'refactor'. + /// + /// This action provides an optional refactor opportunity + Refactor(RefactorKind), + /// Base kind for source actions: `source`. + /// + /// Source code actions apply to the entire file. + Source(SourceActionKind), + /// This action is using a base kind not covered by any of the previous + /// variants + Other(Cow<'static, str>), +} + +impl ActionCategory { + /// Returns true if this category matches the provided filter + /// + /// ## Examples + /// + /// ``` + /// use std::borrow::Cow; + /// use biome_analyze::{ActionCategory, RefactorKind}; + /// + /// assert!(ActionCategory::QuickFix(Cow::from("quickfix")).matches("quickfix")); + /// + /// assert!(ActionCategory::Refactor(RefactorKind::None).matches("refactor")); + /// assert!(!ActionCategory::Refactor(RefactorKind::None).matches("refactor.extract")); + /// + /// assert!(ActionCategory::Refactor(RefactorKind::Extract).matches("refactor")); + /// assert!(ActionCategory::Refactor(RefactorKind::Extract).matches("refactor.extract")); + /// ``` + pub fn matches(&self, filter: &str) -> bool { + self.to_str().starts_with(filter) + } + + /// Returns the representation of this [ActionCategory] as a `CodeActionKind` string + pub fn to_str(&self) -> Cow<'static, str> { + match self { + ActionCategory::QuickFix(tag) => { + if tag.is_empty() { + Cow::Borrowed("quickfix.pglsp") + } else { + Cow::Owned(format!("quickfix.pglsp.{tag}")) + } + } + + ActionCategory::Refactor(RefactorKind::None) => Cow::Borrowed("refactor.pglsp"), + ActionCategory::Refactor(RefactorKind::Extract) => { + Cow::Borrowed("refactor.extract.pglsp") + } + ActionCategory::Refactor(RefactorKind::Inline) => { + Cow::Borrowed("refactor.inline.pglsp") + } + ActionCategory::Refactor(RefactorKind::Rewrite) => { + Cow::Borrowed("refactor.rewrite.pglsp") + } + ActionCategory::Refactor(RefactorKind::Other(tag)) => { + Cow::Owned(format!("refactor.{tag}.pglsp")) + } + + ActionCategory::Source(SourceActionKind::None) => Cow::Borrowed("source.pglsp"), + ActionCategory::Source(SourceActionKind::FixAll) => { + Cow::Borrowed("source.fixAll.pglsp") + } + ActionCategory::Source(SourceActionKind::OrganizeImports) => { + Cow::Borrowed("source.organizeImports.pglsp") + } + ActionCategory::Source(SourceActionKind::Other(tag)) => { + Cow::Owned(format!("source.{tag}.pglsp")) + } + + ActionCategory::Other(tag) => Cow::Owned(format!("{tag}.pglsp")), + } + } +} + +/// The sub-category of a refactor code action. +/// +/// [Check the LSP spec](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#codeActionKind) for more information: +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) +)] +pub enum RefactorKind { + /// This action describes a refactor with no particular sub-category + None, + /// Base kind for refactoring extraction actions: 'refactor.extract'. + /// + /// Example extract actions: + /// - Extract method + /// - Extract function + /// - Extract variable + /// - Extract interface from class + Extract, + /// Base kind for refactoring inline actions: 'refactor.inline'. + /// + /// Example inline actions: + /// - Inline function + /// - Inline variable + /// - Inline constant + /// - ... + Inline, + /// Base kind for refactoring rewrite actions: 'refactor.rewrite'. + /// + /// Example rewrite actions: + /// - Convert JavaScript function to class + /// - Add or remove parameter + /// - Encapsulate field + /// - Make method static + /// - Move method to base class + /// - ... + Rewrite, + /// This action is using a refactor kind not covered by any of the previous + /// variants + Other(Cow<'static, str>), +} + +/// The sub-category of a source code action +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) +)] +pub enum SourceActionKind { + /// This action describes a source action with no particular sub-category + None, + // Base kind for a 'fix all' source action: `source.fixAll`. + // + // 'Fix all' actions automatically fix errors that have a clear fix that + // do not require user input. They should not suppress errors or perform + // unsafe fixes such as generating new types or classes. + FixAll, + /// Base kind for an organize imports source action: `source.organizeImports`. + OrganizeImports, + /// This action is using a source action kind not covered by any of the + /// previous variants + Other(Cow<'static, str>), +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +#[bitflags] +#[repr(u8)] +pub(crate) enum Categories { + Lint = 1 << RuleCategory::Lint as u8, + Action = 1 << RuleCategory::Action as u8, + Transformation = 1 << RuleCategory::Transformation as u8, +} + +#[derive(Debug, Copy, Clone)] +/// The categories supported by the analyzer. +/// +/// The default implementation of this type returns an instance with all the categories. +/// +/// Use [RuleCategoriesBuilder] to generate the categories you want to query. +pub struct RuleCategories(BitFlags); + +impl RuleCategories { + pub fn empty() -> Self { + let empty: BitFlags = BitFlags::empty(); + Self(empty) + } + + pub fn all() -> Self { + let empty: BitFlags = BitFlags::all(); + Self(empty) + } + + /// Checks whether the current categories contain a specific [RuleCategories] + pub fn contains(&self, other: impl Into) -> bool { + self.0.contains(other.into().0) + } +} + +impl Default for RuleCategories { + fn default() -> Self { + Self::all() + } +} + +impl From for RuleCategories { + fn from(input: RuleCategory) -> Self { + match input { + RuleCategory::Lint => RuleCategories(BitFlags::from_flag(Categories::Lint)), + RuleCategory::Action => RuleCategories(BitFlags::from_flag(Categories::Action)), + RuleCategory::Transformation => { + RuleCategories(BitFlags::from_flag(Categories::Transformation)) + } + } + } +} + +#[cfg(feature = "serde")] +impl serde::Serialize for RuleCategories { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut flags = Vec::new(); + + if self.0.contains(Categories::Lint) { + flags.push(RuleCategory::Lint); + } + + if self.0.contains(Categories::Action) { + flags.push(RuleCategory::Action); + } + + if self.0.contains(Categories::Transformation) { + flags.push(RuleCategory::Transformation); + } + + serializer.collect_seq(flags) + } +} + +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for RuleCategories { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::{self, SeqAccess}; + use std::fmt::{self, Formatter}; + + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = RuleCategories; + + fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { + write!(formatter, "RuleCategories") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let mut result = RuleCategories::empty(); + + while let Some(item) = seq.next_element::()? { + result.0 |= RuleCategories::from(item).0; + } + + Ok(result) + } + } + + deserializer.deserialize_seq(Visitor) + } +} + +#[cfg(feature = "serde")] +impl schemars::JsonSchema for RuleCategories { + fn schema_name() -> String { + String::from("RuleCategories") + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + >::json_schema(gen) + } +} + +#[derive(Debug, Default)] +/// A convenient type create a [RuleCategories] type +/// +/// ``` +/// use biome_analyze::{RuleCategoriesBuilder, RuleCategory}; +/// let mut categories = RuleCategoriesBuilder::default().with_syntax().with_lint().build(); +/// +/// assert!(categories.contains(RuleCategory::Lint)); +/// assert!(!categories.contains(RuleCategory::Action)); +/// assert!(!categories.contains(RuleCategory::Transformation)); +/// ``` +pub struct RuleCategoriesBuilder { + flags: BitFlags, +} + +impl RuleCategoriesBuilder { + pub fn with_lint(mut self) -> Self { + self.flags.insert(Categories::Lint); + self + } + + pub fn with_action(mut self) -> Self { + self.flags.insert(Categories::Action); + self + } + + pub fn with_transformation(mut self) -> Self { + self.flags.insert(Categories::Transformation); + self + } + + pub fn build(self) -> RuleCategories { + RuleCategories(self.flags) + } +} + + + + diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs new file mode 100644 index 000000000..ab226b843 --- /dev/null +++ b/crates/pg_analyse/src/context.rs @@ -0,0 +1,89 @@ +use pg_diagnostics::{Error, Result}; +use std::path::Path; + +use crate::{categories::RuleCategory, rule::{GroupCategory, Rule, RuleGroup, RuleMetadata}}; + +pub struct RuleContext<'a, R: Rule> { + stmt: &'a pg_query_ext::NodeEnum, + file_path: &'a Path, + options: &'a R::Options, +} + +impl<'a, R> RuleContext<'a, R> +where + R: Rule + Sized + 'static, +{ + #[allow(clippy::too_many_arguments)] + pub fn new( + stmt: &'a pg_query_ext::NodeEnum, + file_path: &'a Path, + options: &'a R::Options, + ) -> Result { + Ok(Self { + stmt, + file_path, + options, + }) + } + + /// Returns the group that belongs to the current rule + pub fn group(&self) -> &'static str { + ::NAME + } + + /// Returns the category that belongs to the current rule + pub fn category(&self) -> RuleCategory { + <::Category as GroupCategory>::CATEGORY + } + + /// Returns a clone of the AST root + pub fn stmt(&self) -> pg_query_ext::NodeEnum { + self.stmt.clone() + } + + /// Returns the metadata of the rule + /// + /// The metadata contains information about the rule, such as the name, version, language, and whether it is recommended. + /// + /// ## Examples + /// ```rust,ignore + /// declare_lint_rule! { + /// /// Some doc + /// pub(crate) Foo { + /// version: "0.0.0", + /// name: "foo", + /// language: "js", + /// recommended: true, + /// } + /// } + /// + /// impl Rule for Foo { + /// const CATEGORY: RuleCategory = RuleCategory::Lint; + /// type State = (); + /// type Signals = (); + /// type Options = (); + /// + /// fn run(ctx: &RuleContext) -> Self::Signals { + /// assert_eq!(ctx.metadata().name, "foo"); + /// } + /// } + /// ``` + pub fn metadata(&self) -> &RuleMetadata { + &R::METADATA + } + + /// It retrieves the options that belong to a rule, if they exist. + /// + /// In order to retrieve a typed data structure, you have to create a deserializable + /// data structure and define it inside the generic type `type Options` of the [Rule] + /// + pub fn options(&self) -> &R::Options { + self.options + } + + /// The file path of the current file + pub fn file_path(&self) -> &Path { + self.file_path + } +} + diff --git a/crates/pg_analyse/src/diagnostics.rs b/crates/pg_analyse/src/diagnostics.rs new file mode 100644 index 000000000..b02a07574 --- /dev/null +++ b/crates/pg_analyse/src/diagnostics.rs @@ -0,0 +1,149 @@ +use pg_console::MarkupBuf; +use pg_diagnostics::{ + advice::CodeSuggestionAdvice, category, Advices, Category, Diagnostic, DiagnosticExt, + DiagnosticTags, Error, Location, Severity, Visit, +}; +use text_size::TextRange; +use std::borrow::Cow; +use std::fmt::{Debug, Display, Formatter}; + +use crate::rule::RuleDiagnostic; + +/// Small wrapper for diagnostics during the analysis phase. +/// +/// During these phases, analyzers can create various type diagnostics and some of them +/// don't have all the info to actually create a real [Diagnostic]. +/// +/// This wrapper serves as glue, which eventually is able to spit out full fledged diagnostics. +/// +#[derive(Debug)] +pub struct AnalyzerDiagnostic { + kind: DiagnosticKind, +} + +impl From for AnalyzerDiagnostic { + fn from(rule_diagnostic: RuleDiagnostic) -> Self { + Self { + kind: DiagnosticKind::Rule(rule_diagnostic), + } + } +} + +#[derive(Debug)] +enum DiagnosticKind { + /// It holds various info related to diagnostics emitted by the rules + Rule(RuleDiagnostic), + /// We have raw information to create a basic [Diagnostic] + Raw(Error), +} + +impl Diagnostic for AnalyzerDiagnostic { + fn category(&self) -> Option<&'static Category> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => Some(rule_diagnostic.category), + DiagnosticKind::Raw(error) => error.category(), + } + } + fn description(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => Debug::fmt(&rule_diagnostic.message, fmt), + DiagnosticKind::Raw(error) => error.description(fmt), + } + } + + fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => { + pg_console::fmt::Display::fmt(&rule_diagnostic.message, fmt) + } + DiagnosticKind::Raw(error) => error.message(fmt), + } + } + + fn severity(&self) -> Severity { + match &self.kind { + DiagnosticKind::Rule { .. } => Severity::Error, + DiagnosticKind::Raw(error) => error.severity(), + } + } + + fn tags(&self) -> DiagnosticTags { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.tags, + DiagnosticKind::Raw(error) => error.tags(), + } + } + + fn location(&self) -> Location<'_> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => { + Location::builder().span(&rule_diagnostic.span).build() + } + DiagnosticKind::Raw(error) => error.location(), + } + } + + fn advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.advices().record(visitor)?, + DiagnosticKind::Raw(error) => error.advices(visitor)?, + } + + Ok(()) + } +} + +impl AnalyzerDiagnostic { + /// Creates a diagnostic from a generic [Error] + pub fn from_error(error: Error) -> Self { + Self { + kind: DiagnosticKind::Raw(error), + } + } + + pub fn get_span(&self) -> Option { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.span, + DiagnosticKind::Raw(error) => error.location().span, + } + } + + pub const fn is_raw(&self) -> bool { + matches!(self.kind, DiagnosticKind::Raw(_)) + } +} + +#[derive(Debug, Diagnostic, Clone)] +#[diagnostic(severity = Warning)] +pub struct SuppressionDiagnostic { + #[category] + category: &'static Category, + #[location(span)] + range: TextRange, + #[message] + #[description] + message: String, + #[tags] + tags: DiagnosticTags, +} + +impl SuppressionDiagnostic { + pub(crate) fn new( + category: &'static Category, + range: TextRange, + message: impl Display, + ) -> Self { + Self { + category, + range, + message: message.to_string(), + tags: DiagnosticTags::empty(), + } + } + + pub(crate) fn with_tags(mut self, tags: DiagnosticTags) -> Self { + self.tags |= tags; + self + } +} + diff --git a/crates/pg_analyse/src/filter.rs b/crates/pg_analyse/src/filter.rs new file mode 100644 index 000000000..b4f004b69 --- /dev/null +++ b/crates/pg_analyse/src/filter.rs @@ -0,0 +1,124 @@ +use std::fmt::{Display, Formatter, Debug}; + +use text_size::TextRange; + +use crate::{categories::RuleCategories, rule::{GroupCategory, Rule, RuleGroup}, RuleFilter}; + +/// Allows filtering the list of rules that will be executed in a run of the analyzer, +/// and at what source code range signals (diagnostics or actions) may be raised +#[derive(Debug, Default, Clone, Copy)] +pub struct AnalysisFilter<'a> { + /// Only allow rules with these categories to emit signals + pub categories: RuleCategories, + /// Only allow rules matching these names to emit signals + /// If `enabled_rules` is set to `None`, then all rules are enabled. + pub enabled_rules: Option<&'a [RuleFilter<'a>]>, + /// Do not allow rules matching these names to emit signals + pub disabled_rules: &'a [RuleFilter<'a>], + /// Only emit signals matching this text range + pub range: Option, +} + +impl<'analysis> AnalysisFilter<'analysis> { + /// It creates a new filter with the set of [enabled rules](RuleFilter) passed as argument + pub fn from_enabled_rules(enabled_rules: &'analysis [RuleFilter<'analysis>]) -> Self { + Self { + enabled_rules: Some(enabled_rules), + ..AnalysisFilter::default() + } + } + + /// Return `true` if the category `C` matches this filter + pub fn match_category(&self) -> bool { + self.categories.contains(C::CATEGORY) + } + + /// Return `true` if the group `G` matches this filter + pub fn match_group(&self) -> bool { + self.match_category::() + && self.enabled_rules.map_or(true, |enabled_rules| { + enabled_rules.iter().any(|filter| filter.match_group::()) + }) + && !self + .disabled_rules + .iter() + .any(|filter| matches!(filter, RuleFilter::Group(_)) && filter.match_group::()) + } + + /// Return `true` if the rule `R` matches this filter + pub fn match_rule(&self) -> bool { + self.match_category::<::Category>() + && self.enabled_rules.map_or(true, |enabled_rules| { + enabled_rules.iter().any(|filter| filter.match_rule::()) + }) + && !self + .disabled_rules + .iter() + .any(|filter| filter.match_rule::()) + } +} + + + +impl<'a> RuleFilter<'a> { + // Returns the group name of this filter. + pub fn group(self) -> &'a str { + match self { + RuleFilter::Group(group) => group, + RuleFilter::Rule(group, _) => group, + } + } + /// Return `true` if the group `G` matches this filter + pub fn match_group(self) -> bool { + match self { + RuleFilter::Group(group) => group == G::NAME, + RuleFilter::Rule(group, _) => group == G::NAME, + } + } + + /// Return `true` if the rule `R` matches this filter + pub fn match_rule(self) -> bool + where + R: Rule, + { + match self { + RuleFilter::Group(group) => group == ::NAME, + RuleFilter::Rule(group, rule) => { + group == ::NAME && rule == R::METADATA.name + } + } + } +} + +impl<'a> Debug for RuleFilter<'a> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + Display::fmt(self, f) + } +} + +impl<'a> Display for RuleFilter<'a> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + RuleFilter::Group(group) => { + write!(f, "{group}") + } + RuleFilter::Rule(group, rule) => { + write!(f, "{group}/{rule}") + } + } + } +} + +impl<'a> pg_console::fmt::Display for RuleFilter<'a> { + fn fmt(&self, fmt: &mut pg_console::fmt::Formatter) -> std::io::Result<()> { + match self { + RuleFilter::Group(group) => { + write!(fmt, "{group}") + } + RuleFilter::Rule(group, rule) => { + write!(fmt, "{group}/{rule}") + } + } + } +} + diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs new file mode 100644 index 000000000..d476b0575 --- /dev/null +++ b/crates/pg_analyse/src/lib.rs @@ -0,0 +1,34 @@ +mod categories; +mod context; +mod diagnostics; +mod filter; +mod matcher; +mod options; +mod registry; +mod rule; +mod signals; + +pub use crate::categories::{ + ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, + SourceActionKind, SUPPRESSION_ACTION_CATEGORY, +}; +// pub use crate::diagnostics::{AnalyzerDiagnostic, RuleError, SuppressionDiagnostic}; +pub use crate::matcher::RuleKey; +pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; +// pub use crate::query::{AddVisitor, QueryKey, QueryMatch, Queryable}; +pub use crate::registry::{ + MetadataRegistry, RegistryVisitor, + RuleRegistry, RuleRegistryBuilder, +}; +pub use crate::rule::{Rule}; +// pub use crate::rule::{ +// GroupCategory, Rule, RuleAction, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata, RuleSource, +// RuleSourceKind, SuppressAction, +// }; + +/// Allow filtering a single rule or group of rules by their names +#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)] +pub enum RuleFilter<'a> { + Group(&'a str), + Rule(&'a str, &'a str), +} diff --git a/crates/pg_analyse/src/matcher.rs b/crates/pg_analyse/src/matcher.rs new file mode 100644 index 000000000..b61375b7f --- /dev/null +++ b/crates/pg_analyse/src/matcher.rs @@ -0,0 +1,135 @@ +use std::fmt::{Display, Formatter, Debug}; + +use crate::rule::{Rule, RuleGroup}; + +/// Allow filtering a single rule or group of rules by their names +#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)] +pub enum RuleFilter<'a> { + Group(&'a str), + Rule(&'a str, &'a str), +} + +impl<'a> RuleFilter<'a> { + // Returns the group name of this filter. + pub fn group(self) -> &'a str { + match self { + RuleFilter::Group(group) => group, + RuleFilter::Rule(group, _) => group, + } + } + /// Return `true` if the group `G` matches this filter + pub fn match_group(self) -> bool { + match self { + RuleFilter::Group(group) => group == G::NAME, + RuleFilter::Rule(group, _) => group == G::NAME, + } + } + + /// Return `true` if the rule `R` matches this filter + pub fn match_rule(self) -> bool + where + R: Rule, + { + match self { + RuleFilter::Group(group) => group == ::NAME, + RuleFilter::Rule(group, rule) => { + group == ::NAME && rule == R::METADATA.name + } + } + } +} + +impl<'a> Debug for RuleFilter<'a> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + Display::fmt(self, f) + } +} + +impl<'a> Display for RuleFilter<'a> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + RuleFilter::Group(group) => { + write!(f, "{group}") + } + RuleFilter::Rule(group, rule) => { + write!(f, "{group}/{rule}") + } + } + } +} + +impl<'a> pg_console::fmt::Display for RuleFilter<'a> { + fn fmt(&self, fmt: &mut pg_console::fmt::Formatter) -> std::io::Result<()> { + match self { + RuleFilter::Group(group) => { + write!(fmt, "{group}") + } + RuleFilter::Rule(group, rule) => { + write!(fmt, "{group}/{rule}") + } + } + } +} + +/// Opaque identifier for a group of rule +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct GroupKey { + group: &'static str, +} + +impl GroupKey { + pub(crate) fn new(group: &'static str) -> Self { + Self { group } + } + + pub fn group() -> Self { + Self::new(G::NAME) + } +} + +impl From for RuleFilter<'static> { + fn from(key: GroupKey) -> Self { + RuleFilter::Group(key.group) + } +} + +/// Opaque identifier for a single rule +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct RuleKey { + group: &'static str, + rule: &'static str, +} + +impl RuleKey { + pub fn new(group: &'static str, rule: &'static str) -> Self { + Self { group, rule } + } + + pub fn rule() -> Self { + Self::new(::NAME, R::METADATA.name) + } + + pub fn group(&self) -> &'static str { + self.group + } + + pub fn rule_name(&self) -> &'static str { + self.rule + } +} + +impl From for RuleFilter<'static> { + fn from(key: RuleKey) -> Self { + RuleFilter::Rule(key.group, key.rule) + } +} + +impl PartialEq for RuleFilter<'static> { + fn eq(&self, other: &RuleKey) -> bool { + match *self { + RuleFilter::Group(group) => group == other.group, + RuleFilter::Rule(group, rule) => group == other.group && rule == other.rule, + } + } +} + diff --git a/crates/pg_analyse/src/options.rs b/crates/pg_analyse/src/options.rs new file mode 100644 index 000000000..11a2059e1 --- /dev/null +++ b/crates/pg_analyse/src/options.rs @@ -0,0 +1,74 @@ +use rustc_hash::FxHashMap; + +use crate::{Rule, RuleKey}; +use std::any::{Any, TypeId}; +use std::fmt::Debug; +use std::path::PathBuf; + +/// A convenient new type data structure to store the options that belong to a rule +#[derive(Debug)] +pub struct RuleOptions(TypeId, Box); + +impl RuleOptions { + /// Creates a new [RuleOptions] + pub fn new(options: O) -> Self { + Self(TypeId::of::(), Box::new(options)) + } + + /// It returns the deserialized rule option + pub fn value(&self) -> &O { + let RuleOptions(type_id, value)= &self; + let current_id = TypeId::of::(); + debug_assert_eq!(type_id, ¤t_id); + // SAFETY: the code should fail when asserting the types. + // If the code throws an error here, it means that the developer didn't test + // the rule with the options + value.downcast_ref::().unwrap() + } +} + +/// A convenient new type data structure to insert and get rules +#[derive(Debug, Default)] +pub struct AnalyzerRules(FxHashMap); + +impl AnalyzerRules { + /// It tracks the options of a specific rule + pub fn push_rule(&mut self, rule_key: RuleKey, options: RuleOptions) { + self.0.insert(rule_key, options); + } + + /// It retrieves the options of a stored rule, given its name + pub fn get_rule_options(&self, rule_key: &RuleKey) -> Option<&O> { + self.0.get(rule_key).map(|o| o.value::()) + } +} + +/// A data structured derived from the `biome.json` file +#[derive(Debug, Default)] +pub struct AnalyzerConfiguration { + /// A list of rules and their options + pub rules: AnalyzerRules, +} + +/// A set of information useful to the analyzer infrastructure +#[derive(Debug, Default)] +pub struct AnalyzerOptions { + /// A data structured derived from the [`biome.json`] file + pub configuration: AnalyzerConfiguration, + + /// The file that is being analyzed + pub file_path: PathBuf, +} + +impl AnalyzerOptions { + pub fn rule_options(&self) -> Option + where + R: Rule + 'static, + { + self.configuration + .rules + .get_rule_options::(&RuleKey::rule::()) + .cloned() + } +} + diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs new file mode 100644 index 000000000..ced6c72f7 --- /dev/null +++ b/crates/pg_analyse/src/registry.rs @@ -0,0 +1,208 @@ +use std::{borrow, collections::{BTreeSet, BinaryHeap}}; + +use pg_diagnostics::Error; + +use crate::{context::RuleContext, filter::AnalysisFilter, matcher::{GroupKey, RuleKey}, rule::{GroupCategory, Rule, RuleGroup}, signals::{RuleSignal, SignalEntry}, AnalyzerOptions}; + +pub trait RegistryVisitor { + /// Record the category `C` to this visitor + fn record_category(&mut self) { + C::record_groups(self); + } + + /// Record the group `G` to this visitor + fn record_group(&mut self) { + G::record_rules(self); + } + + /// Record the rule `R` to this visitor + fn record_rule(&mut self) + where + R: Rule + 'static; +} + + +/// Key struct for a rule in the metadata map, sorted alphabetically +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct MetadataKey { + inner: (&'static str, &'static str), +} + +impl MetadataKey { + fn into_group_key(self) -> GroupKey { + let (group, _) = self.inner; + GroupKey::new(group) + } + + fn into_rule_key(self) -> RuleKey { + let (group, rule) = self.inner; + RuleKey::new(group, rule) + } +} + +impl<'a> borrow::Borrow<(&'a str, &'a str)> for MetadataKey { + fn borrow(&self) -> &(&'a str, &'a str) { + &self.inner + } +} + +impl borrow::Borrow for MetadataKey { + fn borrow(&self) -> &str { + self.inner.0 + } +} + + +/// Stores metadata information for all the rules in the registry, sorted +/// alphabetically +#[derive(Debug, Default)] +pub struct MetadataRegistry { + inner: BTreeSet, +} + +impl MetadataRegistry { + /// Return a unique identifier for a rule group if it's known by this registry + pub fn find_group(&self, group: &str) -> Option { + let key = self.inner.get(group)?; + Some(key.into_group_key()) + } + + /// Return a unique identifier for a rule if it's known by this registry + pub fn find_rule(&self, group: &str, rule: &str) -> Option { + let key = self.inner.get(&(group, rule))?; + Some(key.into_rule_key()) + } + + pub(crate) fn insert_rule(&mut self, group: &'static str, rule: &'static str) { + self.inner.insert(MetadataKey { + inner: (group, rule), + }); + } +} + +impl RegistryVisitor for MetadataRegistry { + fn record_rule(&mut self) + where + R: Rule + 'static, + { + self.insert_rule(::NAME, R::METADATA.name); + } +} + +pub struct RuleRegistryBuilder<'a> { + filter: &'a AnalysisFilter<'a>, + root: &'a pg_query_ext::NodeEnum, + // Rule Registry + registry: RuleRegistry, + diagnostics: Vec, +} + +// TOOD: add build, then run through rules in registry and emit signals +// i hope we can simplify this. +// - suppresions are on statement level anyways, so we can just run through the statements and check if they are suppressing certain rules +// - range is always the statement for now. we will add the weak ref thingy later +// - do we really need the RegistryRule stuff? the registry rule doesnt execute, it just provides +// the run function + +impl RegistryVisitor for RuleRegistryBuilder<'_> { + fn record_category(&mut self) { + if self.filter.match_category::() { + C::record_groups(self); + } + } + + fn record_group(&mut self) { + if self.filter.match_group::() { + G::record_rules(self); + } + } + + /// Add the rule `R` to the list of rules stored in this registry instance + fn record_rule(&mut self) + where + R: Rule + 'static, + { + if !self.filter.match_rule::() { + return; + } + + let rule = RegistryRule::new::(); + + self.registry.rules.push(rule); + } +} + +/// The rule registry holds type-erased instances of all active analysis rules +pub struct RuleRegistry { + rules: Vec, +} + +/// Internal representation of a single rule in the registry +#[derive(Copy, Clone)] +pub struct RegistryRule { + run: RuleExecutor, +} + + +pub struct RegistryRuleParams<'analyzer, 'query> { + pub root: &'analyzer pg_query_ext::NodeEnum, + pub signal_queue: &'query mut BinaryHeap>, + pub options: &'analyzer AnalyzerOptions, +} + + +/// Executor for rule as a generic function pointer +type RuleExecutor = fn(&mut RegistryRuleParams) -> Result<(), Error>; + +impl RegistryRule { + fn new() -> Self + where + R: Rule + 'static, + { + /// Generic implementation of RuleExecutor for any rule type R + fn run( + params: &mut RegistryRuleParams + ) -> Result<(), Error> + where + R: Rule + 'static, + { + let options = params.options.rule_options::().unwrap_or_default(); + let ctx = match RuleContext::new( + params.root, + ¶ms.options.file_path, + &options + ) { + Ok(ctx) => ctx, + Err(error) => return Err(error), + }; + + for result in R::run(&ctx) { + // `None` means the entire range of the statement + let text_range = R::text_range(&ctx, &result); + + let signal = Box::new(RuleSignal::::new( + params.root, + result, + params.options, + )); + + params.signal_queue.push(SignalEntry { + signal, + rule: RuleKey::rule::(), + text_range, + }); + } + + Ok(()) + } + + Self { + run: run::, + } + } +} + + + + + diff --git a/crates/pg_analyse/src/rule.rs b/crates/pg_analyse/src/rule.rs new file mode 100644 index 000000000..8b9f3425d --- /dev/null +++ b/crates/pg_analyse/src/rule.rs @@ -0,0 +1,302 @@ +use text_size::TextRange; +use std::fmt::Debug; +use pg_console::fmt::Display; +use pg_console::{markup, MarkupBuf}; +use pg_diagnostics::advice::CodeSuggestionAdvice; +use pg_diagnostics::location::AsSpan; +use pg_diagnostics::{ + Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, Visit +}; + +use crate::{categories::RuleCategory, context::RuleContext, registry::RegistryVisitor}; + +// use crate::categories::RuleCategory; +// use crate::context::RuleContext; +// use crate::registry::RegistryVisitor; + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] +/// Static metadata containing information about a rule +pub struct RuleMetadata { + /// It marks if a rule is deprecated, and if so a reason has to be provided. + pub deprecated: Option<&'static str>, + /// The version when the rule was implemented + pub version: &'static str, + /// The name of this rule, displayed in the diagnostics it emits + pub name: &'static str, + /// The content of the documentation comments for this rule + pub docs: &'static str, + /// Whether a rule is recommended or not + pub recommended: bool, +} + +impl RuleMetadata { + pub const fn new( + version: &'static str, + name: &'static str, + docs: &'static str + ) -> Self { + Self { + deprecated: None, + version, + name, + docs, + recommended: false + } + } + + pub const fn recommended(mut self, recommended: bool) -> Self { + self.recommended = recommended; + self + } + + pub const fn deprecated(mut self, deprecated: &'static str) -> Self { + self.deprecated = Some(deprecated); + self + } +} + +pub trait RuleMeta { + type Group: RuleGroup; + const METADATA: RuleMetadata; +} + +/// A rule group is a collection of rules under a given name, serving as a +/// "namespace" for lint rules and allowing the entire set of rules to be +/// disabled at once +pub trait RuleGroup { + type Category: GroupCategory; + /// The name of this group, displayed in the diagnostics emitted by its rules + const NAME: &'static str; + /// Register all the rules belonging to this group into `registry` + fn record_rules(registry: &mut V); +} + +/// A group category is a collection of rule groups under a given category ID, +/// serving as a broad classification on the kind of diagnostic or code action +/// these rule emit, and allowing whole categories of rules to be disabled at +/// once depending on the kind of analysis being performed +pub trait GroupCategory { + /// The category ID used for all groups and rule belonging to this category + const CATEGORY: RuleCategory; + /// Register all the groups belonging to this category into `registry` + fn record_groups(registry: &mut V); +} + +/// Trait implemented by all analysis rules: declares interest to a certain AstNode type, +/// and a callback function to be executed on all nodes matching the query to possibly +/// raise an analysis event +pub trait Rule: RuleMeta + Sized { + /// A generic type that will be kept in memory between a call to `run` and + /// subsequent executions of `diagnostic` or `action`, allows the rule to + /// hold some temporary state between the moment a signal is raised and + /// when a diagnostic or action needs to be built + type State; + /// An iterator type returned by `run` to yield zero or more signals to the + /// analyzer + type Signals: IntoIterator; + /// The options that belong to a rule + type Options: Default + Clone + Debug; + + /// This function is called once for each node matching `Query` in the tree + /// being analyzed. If it returns `Some` the state object will be wrapped + /// in a generic `AnalyzerSignal`, and the consumer of the analyzer may call + /// `diagnostic` or `action` on it + fn run(ctx: &RuleContext) -> Self::Signals; + + /// Used by the analyzer to associate a range of source text to a signal in + /// order to support suppression comments. + /// + /// If this function returns [None], the range of the query node will be used instead + /// + /// The default implementation returns the range of `Self::diagnostic`, and + /// should return the correct value for most rules however you may want to + /// override this if generating a diagnostic for this rule requires heavy + /// processing and the range could be determined through a faster path + fn text_range(ctx: &RuleContext, state: &Self::State) -> Option { + Self::diagnostic(ctx, state).and_then(|diag| diag.span()) + } + + /// Called by the consumer of the analyzer to try to generate a diagnostic + /// from a signal raised by `run` + /// + /// The default implementation returns None + fn diagnostic(_ctx: &RuleContext, _state: &Self::State) -> Option { + None + } +} + + +/// Diagnostic object returned by a single analysis rule +#[derive(Debug, Diagnostic)] +pub struct RuleDiagnostic { + #[category] + pub(crate) category: &'static Category, + #[location(span)] + pub(crate) span: Option, + #[message] + #[description] + pub(crate) message: MessageAndDescription, + #[tags] + pub(crate) tags: DiagnosticTags, + #[advice] + pub(crate) rule_advice: RuleAdvice, +} + +#[derive(Debug, Default)] +/// It contains possible advices to show when printing a diagnostic that belong to the rule +pub struct RuleAdvice { + pub(crate) details: Vec, + pub(crate) notes: Vec<(LogCategory, MarkupBuf)>, + pub(crate) suggestion_list: Option, + pub(crate) code_suggestion_list: Vec>, +} + +#[derive(Debug, Default)] +pub struct SuggestionList { + pub(crate) message: MarkupBuf, + pub(crate) list: Vec, +} + +impl Advices for RuleAdvice { + fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { + for detail in &self.details { + visitor.record_log( + detail.log_category, + &markup! { {detail.message} }.to_owned(), + )?; + visitor.record_frame(Location::builder().span(&detail.range).build())?; + } + // we then print notes + for (log_category, note) in &self.notes { + visitor.record_log(*log_category, &markup! { {note} }.to_owned())?; + } + + if let Some(suggestion_list) = &self.suggestion_list { + visitor.record_log( + LogCategory::Info, + &markup! { {suggestion_list.message} }.to_owned(), + )?; + let list: Vec<_> = suggestion_list + .list + .iter() + .map(|suggestion| suggestion as &dyn Display) + .collect(); + visitor.record_list(&list)?; + } + + // finally, we print possible code suggestions on how to fix the issue + for suggestion in &self.code_suggestion_list { + suggestion.record(visitor)?; + } + + Ok(()) + } +} + +#[derive(Debug)] +pub struct Detail { + pub log_category: LogCategory, + pub message: MarkupBuf, + pub range: Option, +} + +impl RuleDiagnostic { + /// Creates a new [`RuleDiagnostic`] with a severity and title that will be + /// used in a builder-like way to modify labels. + pub fn new(category: &'static Category, span: impl AsSpan, title: impl Display) -> Self { + let message = markup!({ title }).to_owned(); + Self { + category, + span: span.as_span(), + message: MessageAndDescription::from(message), + tags: DiagnosticTags::empty(), + rule_advice: RuleAdvice::default(), + } + } + + /// Set an explicit plain-text summary for this diagnostic. + pub fn description(mut self, summary: impl Into) -> Self { + self.message.set_description(summary.into()); + self + } + + /// Marks this diagnostic as deprecated code, which will + /// be displayed in the language server. + /// + /// This does not have any influence on the diagnostic rendering. + pub fn deprecated(mut self) -> Self { + self.tags |= DiagnosticTags::DEPRECATED_CODE; + self + } + + /// Marks this diagnostic as unnecessary code, which will + /// be displayed in the language server. + /// + /// This does not have any influence on the diagnostic rendering. + pub fn unnecessary(mut self) -> Self { + self.tags |= DiagnosticTags::UNNECESSARY_CODE; + self + } + + /// Attaches a label to this [`RuleDiagnostic`]. + /// + /// The given span has to be in the file that was provided while creating this [`RuleDiagnostic`]. + pub fn label(mut self, span: impl AsSpan, msg: impl Display) -> Self { + self.rule_advice.details.push(Detail { + log_category: LogCategory::Info, + message: markup!({ msg }).to_owned(), + range: span.as_span(), + }); + self + } + + /// Attaches a detailed message to this [`RuleDiagnostic`]. + pub fn detail(self, span: impl AsSpan, msg: impl Display) -> Self { + self.label(span, msg) + } + + /// Adds a footer to this [`RuleDiagnostic`], which will be displayed under the actual error. + fn footer(mut self, log_category: LogCategory, msg: impl Display) -> Self { + self.rule_advice + .notes + .push((log_category, markup!({ msg }).to_owned())); + self + } + + /// Adds a footer to this [`RuleDiagnostic`], with the `Info` log category. + pub fn note(self, msg: impl Display) -> Self { + self.footer(LogCategory::Info, msg) + } + + /// It creates a new footer note which contains a message and a list of possible suggestions. + /// Useful when there's need to suggest a list of things inside a diagnostic. + pub fn footer_list(mut self, message: impl Display, list: &[impl Display]) -> Self { + if !list.is_empty() { + self.rule_advice.suggestion_list = Some(SuggestionList { + message: markup! { {message} }.to_owned(), + list: list + .iter() + .map(|msg| markup! { {msg} }.to_owned()) + .collect(), + }); + } + + self + } + + /// Adds a footer to this [`RuleDiagnostic`], with the `Warn` severity. + pub fn warning(self, msg: impl Display) -> Self { + self.footer(LogCategory::Warn, msg) + } + + pub(crate) fn span(&self) -> Option { + self.span + } + + pub fn advices(&self) -> &RuleAdvice { + &self.rule_advice + } +} + + diff --git a/crates/pg_analyse/src/signals.rs b/crates/pg_analyse/src/signals.rs new file mode 100644 index 000000000..89004af7a --- /dev/null +++ b/crates/pg_analyse/src/signals.rs @@ -0,0 +1,123 @@ +use crate::diagnostics::AnalyzerDiagnostic; +use crate::rule::RuleGroup; +use crate::{AnalyzerOptions, RuleKey}; +use crate::{categories::ActionCategory, context::RuleContext, rule::Rule}; +use pg_console::MarkupBuf; +use pg_diagnostics::{advice::CodeSuggestionAdvice, Applicability, CodeSuggestion, Error}; +use text_size::{TextRange, TextSize}; +use std::borrow::Cow; +use std::cmp::Ordering; +use std::iter::FusedIterator; +use std::marker::PhantomData; +use std::vec::IntoIter; + +/// Event raised by the analyzer when a [Rule](crate::Rule) +/// emits a diagnostic, a code action, or both +pub trait AnalyzerSignal { + fn diagnostic(&self) -> Option; +} + +/// Simple implementation of [AnalyzerSignal] generating a [AnalyzerDiagnostic] +/// from a provided factory function. Optionally, this signal can be configured +/// to also emit a code action, by calling `.with_action` with a secondary +/// factory function for said action. +pub struct DiagnosticSignal { + diagnostic: D, + _diag: PhantomData, +} + +impl DiagnosticSignal +where + D: Fn() -> T, + Error: From, +{ + pub fn new(factory: D) -> Self { + Self { + diagnostic: factory, + _diag: PhantomData, + } + } +} + +impl AnalyzerSignal for DiagnosticSignal +where + D: Fn() -> T, + Error: From, +{ + fn diagnostic(&self) -> Option { + let diag = (self.diagnostic)(); + let error = Error::from(diag); + Some(AnalyzerDiagnostic::from_error(error)) + } +} + +/// Entry for a pending signal in the `signal_queue` +pub struct SignalEntry<'analyzer> { + /// Boxed analyzer signal to be emitted + pub signal: Box, + /// Unique identifier for the rule that emitted this signal + pub rule: RuleKey, + /// Text range in the statement this signal covers. If `None`, the signal covers the entire + /// statement + pub text_range: Option, +} + + +// SignalEntry is ordered based on the starting point of its `text_range` +impl<'analyzer> Ord for SignalEntry<'analyzer> { + fn cmp(&self, other: &Self) -> Ordering { + other.text_range.map(|x| x.start()).unwrap_or(TextSize::from(0)).cmp(&self.text_range.map(|x| x.start()).unwrap_or(TextSize::from(0))) + } +} + +impl<'a> PartialOrd for SignalEntry<'a> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl<'a> Eq for SignalEntry<'a> {} + +impl<'a> PartialEq for SignalEntry<'a> { + fn eq(&self, other: &Self) -> bool { + self.text_range.map(|x| x.start()) == other.text_range.map(|x| x.start()) + } +} + + +/// Analyzer-internal implementation of [AnalyzerSignal] for a specific [Rule](crate::registry::Rule) +pub(crate) struct RuleSignal<'analyzer, R: Rule> { + root: &'analyzer pg_query_ext::NodeEnum, + state: R::State, + /// A list of strings that are considered "globals" inside the analyzer + options: &'analyzer AnalyzerOptions, +} + +impl<'analyzer, R> RuleSignal<'analyzer, R> +where + R: Rule + 'static, +{ + pub(crate) fn new( + root: &'analyzer pg_query_ext::NodeEnum, + state: R::State, + options: &'analyzer AnalyzerOptions, + ) -> Self { + Self { + root, + state, + options, + } + } +} + +impl<'bag, R> AnalyzerSignal for RuleSignal<'bag, R> +where + R: Rule + 'static, +{ + fn diagnostic(&self) -> Option { + let options = self.options.rule_options::().unwrap_or_default(); + let ctx = RuleContext::new(self.root, &self.options.file_path, &options).ok()?; + + R::diagnostic(&ctx, &self.state).map(AnalyzerDiagnostic::from) + } +} diff --git a/crates/pg_lint/Cargo.toml b/crates/pg_lint/Cargo.toml index a4f593895..22cf92ce6 100644 --- a/crates/pg_lint/Cargo.toml +++ b/crates/pg_lint/Cargo.toml @@ -14,12 +14,15 @@ version = "0.0.0" [dependencies] lazy_static = "1.4.0" pg_base_db.workspace = true +pg_diagnostics.workspace = true +pg_console.workspace = true pg_query_ext.workspace = true pg_syntax.workspace = true serde.workspace = true serde_json.workspace = true serde_plain = "1.0" text-size.workspace = true +enumflags2.workspace = true [dev-dependencies] diff --git a/crates/pg_lint/src/rules/lint/safety.rs b/crates/pg_lint/src/rules/lint/safety.rs new file mode 100644 index 000000000..e69de29bb diff --git a/crates/pg_lint/src/rules/lint/safety/ban_drop_table.rs b/crates/pg_lint/src/rules/lint/safety/ban_drop_table.rs new file mode 100644 index 000000000..e69de29bb From 031e38d356a103499704d45c3a7753c7edffb840 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 18 Dec 2024 09:19:30 +0100 Subject: [PATCH 02/27] make it simpleeeer --- Cargo.toml | 2 +- crates/pg_analyse/Cargo.toml | 17 ++- crates/pg_analyse/src/categories.rs | 4 - crates/pg_analyse/src/context.rs | 8 +- crates/pg_analyse/src/diagnostics.rs | 149 --------------------------- crates/pg_analyse/src/filter.rs | 77 +++++++++++++- crates/pg_analyse/src/lib.rs | 24 +---- crates/pg_analyse/src/matcher.rs | 135 ------------------------ crates/pg_analyse/src/options.rs | 3 +- crates/pg_analyse/src/registry.rs | 63 +++-------- crates/pg_analyse/src/rule.rs | 58 ++--------- crates/pg_analyse/src/signals.rs | 123 ---------------------- crates/pg_lint/Cargo.toml | 22 ++-- justfile | 3 + 14 files changed, 124 insertions(+), 564 deletions(-) delete mode 100644 crates/pg_analyse/src/diagnostics.rs delete mode 100644 crates/pg_analyse/src/matcher.rs delete mode 100644 crates/pg_analyse/src/signals.rs diff --git a/Cargo.toml b/Cargo.toml index bc6bff597..60dbd2159 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,8 +46,8 @@ tree_sitter_sql = { path = "./lib/tree_sitter_sql", version = "0.0.0" } unicode-width = "0.1.12" # postgres specific crates -pg_base_db = { path = "./crates/pg_base_db", version = "0.0.0" } pg_analyse = { path = "./crates/pg_analyse", version = "0.0.0" } +pg_base_db = { path = "./crates/pg_base_db", version = "0.0.0" } pg_cli = { path = "./crates/pg_cli", version = "0.0.0" } pg_commands = { path = "./crates/pg_commands", version = "0.0.0" } pg_completions = { path = "./crates/pg_completions", version = "0.0.0" } diff --git a/crates/pg_analyse/Cargo.toml b/crates/pg_analyse/Cargo.toml index b165ff749..823be2ef0 100644 --- a/crates/pg_analyse/Cargo.toml +++ b/crates/pg_analyse/Cargo.toml @@ -13,18 +13,17 @@ version = "0.0.0" [dependencies] -pg_schema_cache.workspace = true -pg_diagnostics.workspace = true -pg_console.workspace = true -pg_query_ext.workspace = true -rustc-hash = { workspace = true } +pg_console.workspace = true +pg_diagnostics.workspace = true +pg_query_ext.workspace = true +pg_schema_cache.workspace = true +rustc-hash = { workspace = true } -text-size.workspace = true -enumflags2.workspace = true -serde = { workspace = true, features = ["derive"], optional = true } biome_deserialize = { workspace = true, optional = true } biome_deserialize_macros = { workspace = true, optional = true } +enumflags2.workspace = true +serde = { workspace = true, features = ["derive"], optional = true } +text-size.workspace = true [features] serde = ["dep:serde", "dep:biome_deserialize", "dep:biome_deserialize_macros"] - diff --git a/crates/pg_analyse/src/categories.rs b/crates/pg_analyse/src/categories.rs index 66c7f884d..115af8fea 100644 --- a/crates/pg_analyse/src/categories.rs +++ b/crates/pg_analyse/src/categories.rs @@ -332,7 +332,3 @@ impl RuleCategoriesBuilder { RuleCategories(self.flags) } } - - - - diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs index ab226b843..b8e67db11 100644 --- a/crates/pg_analyse/src/context.rs +++ b/crates/pg_analyse/src/context.rs @@ -1,7 +1,10 @@ use pg_diagnostics::{Error, Result}; use std::path::Path; -use crate::{categories::RuleCategory, rule::{GroupCategory, Rule, RuleGroup, RuleMetadata}}; +use crate::{ + categories::RuleCategory, + rule::{GroupCategory, Rule, RuleGroup, RuleMetadata}, +}; pub struct RuleContext<'a, R: Rule> { stmt: &'a pg_query_ext::NodeEnum, @@ -15,7 +18,7 @@ where { #[allow(clippy::too_many_arguments)] pub fn new( - stmt: &'a pg_query_ext::NodeEnum, + stmt: &'a pg_query_ext::NodeEnum, file_path: &'a Path, options: &'a R::Options, ) -> Result { @@ -86,4 +89,3 @@ where self.file_path } } - diff --git a/crates/pg_analyse/src/diagnostics.rs b/crates/pg_analyse/src/diagnostics.rs deleted file mode 100644 index b02a07574..000000000 --- a/crates/pg_analyse/src/diagnostics.rs +++ /dev/null @@ -1,149 +0,0 @@ -use pg_console::MarkupBuf; -use pg_diagnostics::{ - advice::CodeSuggestionAdvice, category, Advices, Category, Diagnostic, DiagnosticExt, - DiagnosticTags, Error, Location, Severity, Visit, -}; -use text_size::TextRange; -use std::borrow::Cow; -use std::fmt::{Debug, Display, Formatter}; - -use crate::rule::RuleDiagnostic; - -/// Small wrapper for diagnostics during the analysis phase. -/// -/// During these phases, analyzers can create various type diagnostics and some of them -/// don't have all the info to actually create a real [Diagnostic]. -/// -/// This wrapper serves as glue, which eventually is able to spit out full fledged diagnostics. -/// -#[derive(Debug)] -pub struct AnalyzerDiagnostic { - kind: DiagnosticKind, -} - -impl From for AnalyzerDiagnostic { - fn from(rule_diagnostic: RuleDiagnostic) -> Self { - Self { - kind: DiagnosticKind::Rule(rule_diagnostic), - } - } -} - -#[derive(Debug)] -enum DiagnosticKind { - /// It holds various info related to diagnostics emitted by the rules - Rule(RuleDiagnostic), - /// We have raw information to create a basic [Diagnostic] - Raw(Error), -} - -impl Diagnostic for AnalyzerDiagnostic { - fn category(&self) -> Option<&'static Category> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => Some(rule_diagnostic.category), - DiagnosticKind::Raw(error) => error.category(), - } - } - fn description(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => Debug::fmt(&rule_diagnostic.message, fmt), - DiagnosticKind::Raw(error) => error.description(fmt), - } - } - - fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => { - pg_console::fmt::Display::fmt(&rule_diagnostic.message, fmt) - } - DiagnosticKind::Raw(error) => error.message(fmt), - } - } - - fn severity(&self) -> Severity { - match &self.kind { - DiagnosticKind::Rule { .. } => Severity::Error, - DiagnosticKind::Raw(error) => error.severity(), - } - } - - fn tags(&self) -> DiagnosticTags { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.tags, - DiagnosticKind::Raw(error) => error.tags(), - } - } - - fn location(&self) -> Location<'_> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => { - Location::builder().span(&rule_diagnostic.span).build() - } - DiagnosticKind::Raw(error) => error.location(), - } - } - - fn advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.advices().record(visitor)?, - DiagnosticKind::Raw(error) => error.advices(visitor)?, - } - - Ok(()) - } -} - -impl AnalyzerDiagnostic { - /// Creates a diagnostic from a generic [Error] - pub fn from_error(error: Error) -> Self { - Self { - kind: DiagnosticKind::Raw(error), - } - } - - pub fn get_span(&self) -> Option { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.span, - DiagnosticKind::Raw(error) => error.location().span, - } - } - - pub const fn is_raw(&self) -> bool { - matches!(self.kind, DiagnosticKind::Raw(_)) - } -} - -#[derive(Debug, Diagnostic, Clone)] -#[diagnostic(severity = Warning)] -pub struct SuppressionDiagnostic { - #[category] - category: &'static Category, - #[location(span)] - range: TextRange, - #[message] - #[description] - message: String, - #[tags] - tags: DiagnosticTags, -} - -impl SuppressionDiagnostic { - pub(crate) fn new( - category: &'static Category, - range: TextRange, - message: impl Display, - ) -> Self { - Self { - category, - range, - message: message.to_string(), - tags: DiagnosticTags::empty(), - } - } - - pub(crate) fn with_tags(mut self, tags: DiagnosticTags) -> Self { - self.tags |= tags; - self - } -} - diff --git a/crates/pg_analyse/src/filter.rs b/crates/pg_analyse/src/filter.rs index b4f004b69..800c9111b 100644 --- a/crates/pg_analyse/src/filter.rs +++ b/crates/pg_analyse/src/filter.rs @@ -1,8 +1,18 @@ -use std::fmt::{Display, Formatter, Debug}; +use std::fmt::{Debug, Display, Formatter}; use text_size::TextRange; -use crate::{categories::RuleCategories, rule::{GroupCategory, Rule, RuleGroup}, RuleFilter}; +use crate::{ + categories::RuleCategories, + rule::{GroupCategory, Rule, RuleGroup}, +}; + +/// Allow filtering a single rule or group of rules by their names +#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)] +pub enum RuleFilter<'a> { + Group(&'a str), + Rule(&'a str, &'a str), +} /// Allows filtering the list of rules that will be executed in a run of the analyzer, /// and at what source code range signals (diagnostics or actions) may be raised @@ -58,8 +68,6 @@ impl<'analysis> AnalysisFilter<'analysis> { } } - - impl<'a> RuleFilter<'a> { // Returns the group name of this filter. pub fn group(self) -> &'a str { @@ -122,3 +130,64 @@ impl<'a> pg_console::fmt::Display for RuleFilter<'a> { } } +/// Opaque identifier for a group of rule +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct GroupKey { + group: &'static str, +} + +impl GroupKey { + pub(crate) fn new(group: &'static str) -> Self { + Self { group } + } + + pub fn group() -> Self { + Self::new(G::NAME) + } +} + +impl From for RuleFilter<'static> { + fn from(key: GroupKey) -> Self { + RuleFilter::Group(key.group) + } +} + +/// Opaque identifier for a single rule +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct RuleKey { + group: &'static str, + rule: &'static str, +} + +impl RuleKey { + pub fn new(group: &'static str, rule: &'static str) -> Self { + Self { group, rule } + } + + pub fn rule() -> Self { + Self::new(::NAME, R::METADATA.name) + } + + pub fn group(&self) -> &'static str { + self.group + } + + pub fn rule_name(&self) -> &'static str { + self.rule + } +} + +impl From for RuleFilter<'static> { + fn from(key: RuleKey) -> Self { + RuleFilter::Rule(key.group, key.rule) + } +} + +impl PartialEq for RuleFilter<'static> { + fn eq(&self, other: &RuleKey) -> bool { + match *self { + RuleFilter::Group(group) => group == other.group, + RuleFilter::Rule(group, rule) => group == other.group && rule == other.rule, + } + } +} diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index d476b0575..bf268de83 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -1,34 +1,14 @@ mod categories; mod context; -mod diagnostics; mod filter; -mod matcher; mod options; mod registry; mod rule; -mod signals; pub use crate::categories::{ ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; -// pub use crate::diagnostics::{AnalyzerDiagnostic, RuleError, SuppressionDiagnostic}; -pub use crate::matcher::RuleKey; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; -// pub use crate::query::{AddVisitor, QueryKey, QueryMatch, Queryable}; -pub use crate::registry::{ - MetadataRegistry, RegistryVisitor, - RuleRegistry, RuleRegistryBuilder, -}; -pub use crate::rule::{Rule}; -// pub use crate::rule::{ -// GroupCategory, Rule, RuleAction, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata, RuleSource, -// RuleSourceKind, SuppressAction, -// }; - -/// Allow filtering a single rule or group of rules by their names -#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)] -pub enum RuleFilter<'a> { - Group(&'a str), - Rule(&'a str, &'a str), -} +pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; +pub use crate::rule::Rule; diff --git a/crates/pg_analyse/src/matcher.rs b/crates/pg_analyse/src/matcher.rs deleted file mode 100644 index b61375b7f..000000000 --- a/crates/pg_analyse/src/matcher.rs +++ /dev/null @@ -1,135 +0,0 @@ -use std::fmt::{Display, Formatter, Debug}; - -use crate::rule::{Rule, RuleGroup}; - -/// Allow filtering a single rule or group of rules by their names -#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)] -pub enum RuleFilter<'a> { - Group(&'a str), - Rule(&'a str, &'a str), -} - -impl<'a> RuleFilter<'a> { - // Returns the group name of this filter. - pub fn group(self) -> &'a str { - match self { - RuleFilter::Group(group) => group, - RuleFilter::Rule(group, _) => group, - } - } - /// Return `true` if the group `G` matches this filter - pub fn match_group(self) -> bool { - match self { - RuleFilter::Group(group) => group == G::NAME, - RuleFilter::Rule(group, _) => group == G::NAME, - } - } - - /// Return `true` if the rule `R` matches this filter - pub fn match_rule(self) -> bool - where - R: Rule, - { - match self { - RuleFilter::Group(group) => group == ::NAME, - RuleFilter::Rule(group, rule) => { - group == ::NAME && rule == R::METADATA.name - } - } - } -} - -impl<'a> Debug for RuleFilter<'a> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - Display::fmt(self, f) - } -} - -impl<'a> Display for RuleFilter<'a> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - RuleFilter::Group(group) => { - write!(f, "{group}") - } - RuleFilter::Rule(group, rule) => { - write!(f, "{group}/{rule}") - } - } - } -} - -impl<'a> pg_console::fmt::Display for RuleFilter<'a> { - fn fmt(&self, fmt: &mut pg_console::fmt::Formatter) -> std::io::Result<()> { - match self { - RuleFilter::Group(group) => { - write!(fmt, "{group}") - } - RuleFilter::Rule(group, rule) => { - write!(fmt, "{group}/{rule}") - } - } - } -} - -/// Opaque identifier for a group of rule -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct GroupKey { - group: &'static str, -} - -impl GroupKey { - pub(crate) fn new(group: &'static str) -> Self { - Self { group } - } - - pub fn group() -> Self { - Self::new(G::NAME) - } -} - -impl From for RuleFilter<'static> { - fn from(key: GroupKey) -> Self { - RuleFilter::Group(key.group) - } -} - -/// Opaque identifier for a single rule -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct RuleKey { - group: &'static str, - rule: &'static str, -} - -impl RuleKey { - pub fn new(group: &'static str, rule: &'static str) -> Self { - Self { group, rule } - } - - pub fn rule() -> Self { - Self::new(::NAME, R::METADATA.name) - } - - pub fn group(&self) -> &'static str { - self.group - } - - pub fn rule_name(&self) -> &'static str { - self.rule - } -} - -impl From for RuleFilter<'static> { - fn from(key: RuleKey) -> Self { - RuleFilter::Rule(key.group, key.rule) - } -} - -impl PartialEq for RuleFilter<'static> { - fn eq(&self, other: &RuleKey) -> bool { - match *self { - RuleFilter::Group(group) => group == other.group, - RuleFilter::Rule(group, rule) => group == other.group && rule == other.rule, - } - } -} - diff --git a/crates/pg_analyse/src/options.rs b/crates/pg_analyse/src/options.rs index 11a2059e1..4544fcd5c 100644 --- a/crates/pg_analyse/src/options.rs +++ b/crates/pg_analyse/src/options.rs @@ -17,7 +17,7 @@ impl RuleOptions { /// It returns the deserialized rule option pub fn value(&self) -> &O { - let RuleOptions(type_id, value)= &self; + let RuleOptions(type_id, value) = &self; let current_id = TypeId::of::(); debug_assert_eq!(type_id, ¤t_id); // SAFETY: the code should fail when asserting the types. @@ -71,4 +71,3 @@ impl AnalyzerOptions { .cloned() } } - diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs index ced6c72f7..1ed582bd5 100644 --- a/crates/pg_analyse/src/registry.rs +++ b/crates/pg_analyse/src/registry.rs @@ -1,8 +1,13 @@ -use std::{borrow, collections::{BTreeSet, BinaryHeap}}; +use std::{borrow, collections::BTreeSet}; use pg_diagnostics::Error; -use crate::{context::RuleContext, filter::AnalysisFilter, matcher::{GroupKey, RuleKey}, rule::{GroupCategory, Rule, RuleGroup}, signals::{RuleSignal, SignalEntry}, AnalyzerOptions}; +use crate::{ + context::RuleContext, + filter::{AnalysisFilter, GroupKey, RuleKey}, + rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup}, + AnalyzerOptions, +}; pub trait RegistryVisitor { /// Record the category `C` to this visitor @@ -21,7 +26,6 @@ pub trait RegistryVisitor { R: Rule + 'static; } - /// Key struct for a rule in the metadata map, sorted alphabetically #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub struct MetadataKey { @@ -52,7 +56,6 @@ impl borrow::Borrow for MetadataKey { } } - /// Stores metadata information for all the rules in the registry, sorted /// alphabetically #[derive(Debug, Default)] @@ -97,13 +100,6 @@ pub struct RuleRegistryBuilder<'a> { diagnostics: Vec, } -// TOOD: add build, then run through rules in registry and emit signals -// i hope we can simplify this. -// - suppresions are on statement level anyways, so we can just run through the statements and check if they are suppressing certain rules -// - range is always the statement for now. we will add the weak ref thingy later -// - do we really need the RegistryRule stuff? the registry rule doesnt execute, it just provides -// the run function - impl RegistryVisitor for RuleRegistryBuilder<'_> { fn record_category(&mut self) { if self.filter.match_category::() { @@ -143,16 +139,13 @@ pub struct RegistryRule { run: RuleExecutor, } - -pub struct RegistryRuleParams<'analyzer, 'query> { +pub struct RegistryRuleParams<'analyzer> { pub root: &'analyzer pg_query_ext::NodeEnum, - pub signal_queue: &'query mut BinaryHeap>, pub options: &'analyzer AnalyzerOptions, } - /// Executor for rule as a generic function pointer -type RuleExecutor = fn(&mut RegistryRuleParams) -> Result<(), Error>; +type RuleExecutor = fn(&mut RegistryRuleParams) -> Result, Error>; impl RegistryRule { fn new() -> Self @@ -160,49 +153,19 @@ impl RegistryRule { R: Rule + 'static, { /// Generic implementation of RuleExecutor for any rule type R - fn run( - params: &mut RegistryRuleParams - ) -> Result<(), Error> + fn run(params: &mut RegistryRuleParams) -> Result, Error> where R: Rule + 'static, { let options = params.options.rule_options::().unwrap_or_default(); - let ctx = match RuleContext::new( - params.root, - ¶ms.options.file_path, - &options - ) { + let ctx = match RuleContext::new(params.root, ¶ms.options.file_path, &options) { Ok(ctx) => ctx, Err(error) => return Err(error), }; - for result in R::run(&ctx) { - // `None` means the entire range of the statement - let text_range = R::text_range(&ctx, &result); - - let signal = Box::new(RuleSignal::::new( - params.root, - result, - params.options, - )); - - params.signal_queue.push(SignalEntry { - signal, - rule: RuleKey::rule::(), - text_range, - }); - } - - Ok(()) + Ok(R::run(&ctx)) } - Self { - run: run::, - } + Self { run: run:: } } } - - - - - diff --git a/crates/pg_analyse/src/rule.rs b/crates/pg_analyse/src/rule.rs index 8b9f3425d..336f2d437 100644 --- a/crates/pg_analyse/src/rule.rs +++ b/crates/pg_analyse/src/rule.rs @@ -1,19 +1,16 @@ -use text_size::TextRange; -use std::fmt::Debug; use pg_console::fmt::Display; use pg_console::{markup, MarkupBuf}; use pg_diagnostics::advice::CodeSuggestionAdvice; use pg_diagnostics::location::AsSpan; use pg_diagnostics::{ - Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, Visit + Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, + Visit, }; +use std::fmt::Debug; +use text_size::TextRange; use crate::{categories::RuleCategory, context::RuleContext, registry::RegistryVisitor}; -// use crate::categories::RuleCategory; -// use crate::context::RuleContext; -// use crate::registry::RegistryVisitor; - #[derive(Clone, Debug)] #[cfg_attr(feature = "serde", derive(serde::Serialize))] /// Static metadata containing information about a rule @@ -31,17 +28,13 @@ pub struct RuleMetadata { } impl RuleMetadata { - pub const fn new( - version: &'static str, - name: &'static str, - docs: &'static str - ) -> Self { + pub const fn new(version: &'static str, name: &'static str, docs: &'static str) -> Self { Self { deprecated: None, version, name, docs, - recommended: false + recommended: false, } } @@ -87,46 +80,11 @@ pub trait GroupCategory { /// and a callback function to be executed on all nodes matching the query to possibly /// raise an analysis event pub trait Rule: RuleMeta + Sized { - /// A generic type that will be kept in memory between a call to `run` and - /// subsequent executions of `diagnostic` or `action`, allows the rule to - /// hold some temporary state between the moment a signal is raised and - /// when a diagnostic or action needs to be built - type State; - /// An iterator type returned by `run` to yield zero or more signals to the - /// analyzer - type Signals: IntoIterator; - /// The options that belong to a rule type Options: Default + Clone + Debug; - /// This function is called once for each node matching `Query` in the tree - /// being analyzed. If it returns `Some` the state object will be wrapped - /// in a generic `AnalyzerSignal`, and the consumer of the analyzer may call - /// `diagnostic` or `action` on it - fn run(ctx: &RuleContext) -> Self::Signals; - - /// Used by the analyzer to associate a range of source text to a signal in - /// order to support suppression comments. - /// - /// If this function returns [None], the range of the query node will be used instead - /// - /// The default implementation returns the range of `Self::diagnostic`, and - /// should return the correct value for most rules however you may want to - /// override this if generating a diagnostic for this rule requires heavy - /// processing and the range could be determined through a faster path - fn text_range(ctx: &RuleContext, state: &Self::State) -> Option { - Self::diagnostic(ctx, state).and_then(|diag| diag.span()) - } - - /// Called by the consumer of the analyzer to try to generate a diagnostic - /// from a signal raised by `run` - /// - /// The default implementation returns None - fn diagnostic(_ctx: &RuleContext, _state: &Self::State) -> Option { - None - } + fn run(ctx: &RuleContext) -> Vec; } - /// Diagnostic object returned by a single analysis rule #[derive(Debug, Diagnostic)] pub struct RuleDiagnostic { @@ -298,5 +256,3 @@ impl RuleDiagnostic { &self.rule_advice } } - - diff --git a/crates/pg_analyse/src/signals.rs b/crates/pg_analyse/src/signals.rs deleted file mode 100644 index 89004af7a..000000000 --- a/crates/pg_analyse/src/signals.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::diagnostics::AnalyzerDiagnostic; -use crate::rule::RuleGroup; -use crate::{AnalyzerOptions, RuleKey}; -use crate::{categories::ActionCategory, context::RuleContext, rule::Rule}; -use pg_console::MarkupBuf; -use pg_diagnostics::{advice::CodeSuggestionAdvice, Applicability, CodeSuggestion, Error}; -use text_size::{TextRange, TextSize}; -use std::borrow::Cow; -use std::cmp::Ordering; -use std::iter::FusedIterator; -use std::marker::PhantomData; -use std::vec::IntoIter; - -/// Event raised by the analyzer when a [Rule](crate::Rule) -/// emits a diagnostic, a code action, or both -pub trait AnalyzerSignal { - fn diagnostic(&self) -> Option; -} - -/// Simple implementation of [AnalyzerSignal] generating a [AnalyzerDiagnostic] -/// from a provided factory function. Optionally, this signal can be configured -/// to also emit a code action, by calling `.with_action` with a secondary -/// factory function for said action. -pub struct DiagnosticSignal { - diagnostic: D, - _diag: PhantomData, -} - -impl DiagnosticSignal -where - D: Fn() -> T, - Error: From, -{ - pub fn new(factory: D) -> Self { - Self { - diagnostic: factory, - _diag: PhantomData, - } - } -} - -impl AnalyzerSignal for DiagnosticSignal -where - D: Fn() -> T, - Error: From, -{ - fn diagnostic(&self) -> Option { - let diag = (self.diagnostic)(); - let error = Error::from(diag); - Some(AnalyzerDiagnostic::from_error(error)) - } -} - -/// Entry for a pending signal in the `signal_queue` -pub struct SignalEntry<'analyzer> { - /// Boxed analyzer signal to be emitted - pub signal: Box, - /// Unique identifier for the rule that emitted this signal - pub rule: RuleKey, - /// Text range in the statement this signal covers. If `None`, the signal covers the entire - /// statement - pub text_range: Option, -} - - -// SignalEntry is ordered based on the starting point of its `text_range` -impl<'analyzer> Ord for SignalEntry<'analyzer> { - fn cmp(&self, other: &Self) -> Ordering { - other.text_range.map(|x| x.start()).unwrap_or(TextSize::from(0)).cmp(&self.text_range.map(|x| x.start()).unwrap_or(TextSize::from(0))) - } -} - -impl<'a> PartialOrd for SignalEntry<'a> { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl<'a> Eq for SignalEntry<'a> {} - -impl<'a> PartialEq for SignalEntry<'a> { - fn eq(&self, other: &Self) -> bool { - self.text_range.map(|x| x.start()) == other.text_range.map(|x| x.start()) - } -} - - -/// Analyzer-internal implementation of [AnalyzerSignal] for a specific [Rule](crate::registry::Rule) -pub(crate) struct RuleSignal<'analyzer, R: Rule> { - root: &'analyzer pg_query_ext::NodeEnum, - state: R::State, - /// A list of strings that are considered "globals" inside the analyzer - options: &'analyzer AnalyzerOptions, -} - -impl<'analyzer, R> RuleSignal<'analyzer, R> -where - R: Rule + 'static, -{ - pub(crate) fn new( - root: &'analyzer pg_query_ext::NodeEnum, - state: R::State, - options: &'analyzer AnalyzerOptions, - ) -> Self { - Self { - root, - state, - options, - } - } -} - -impl<'bag, R> AnalyzerSignal for RuleSignal<'bag, R> -where - R: Rule + 'static, -{ - fn diagnostic(&self) -> Option { - let options = self.options.rule_options::().unwrap_or_default(); - let ctx = RuleContext::new(self.root, &self.options.file_path, &options).ok()?; - - R::diagnostic(&ctx, &self.state).map(AnalyzerDiagnostic::from) - } -} diff --git a/crates/pg_lint/Cargo.toml b/crates/pg_lint/Cargo.toml index 22cf92ce6..a349c57d1 100644 --- a/crates/pg_lint/Cargo.toml +++ b/crates/pg_lint/Cargo.toml @@ -12,17 +12,17 @@ version = "0.0.0" [dependencies] -lazy_static = "1.4.0" -pg_base_db.workspace = true -pg_diagnostics.workspace = true -pg_console.workspace = true -pg_query_ext.workspace = true -pg_syntax.workspace = true -serde.workspace = true -serde_json.workspace = true -serde_plain = "1.0" -text-size.workspace = true -enumflags2.workspace = true +enumflags2.workspace = true +lazy_static = "1.4.0" +pg_base_db.workspace = true +pg_console.workspace = true +pg_diagnostics.workspace = true +pg_query_ext.workspace = true +pg_syntax.workspace = true +serde.workspace = true +serde_json.workspace = true +serde_plain = "1.0" +text-size.workspace = true [dev-dependencies] diff --git a/justfile b/justfile index 55002e492..401e2a6aa 100644 --- a/justfile +++ b/justfile @@ -114,6 +114,9 @@ test-doc: lint: cargo clippy +lint-fix: + cargo clippy --fix + # When you finished coding, run this command to run the same commands in the CI. # ready: # git diff --exit-code --quiet From 60007d044555aaf1b3abf47f3100faf404bbb49b Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 18 Dec 2024 09:23:20 +0100 Subject: [PATCH 03/27] cleanup --- crates/pg_analyse/src/filter.rs | 4 ---- crates/pg_analyse/src/lib.rs | 1 + crates/pg_cli/src/commands/check.rs | 2 +- crates/pg_cli/src/commands/mod.rs | 11 ++++------- crates/pg_console/src/write/termcolor.rs | 10 +++++----- crates/pg_diagnostics/src/serde.rs | 2 +- crates/pg_lsp/src/db_connection.rs | 4 ++-- crates/pg_lsp/src/debouncer.rs | 2 +- crates/pg_lsp/src/main.rs | 2 +- crates/pg_lsp/src/server.rs | 2 +- crates/pg_lsp/src/session.rs | 3 +-- crates/pg_lsp_new/src/utils.rs | 6 +++--- xtask/src/install.rs | 2 +- 13 files changed, 22 insertions(+), 29 deletions(-) diff --git a/crates/pg_analyse/src/filter.rs b/crates/pg_analyse/src/filter.rs index 800c9111b..ad3c2df0d 100644 --- a/crates/pg_analyse/src/filter.rs +++ b/crates/pg_analyse/src/filter.rs @@ -1,7 +1,5 @@ use std::fmt::{Debug, Display, Formatter}; -use text_size::TextRange; - use crate::{ categories::RuleCategories, rule::{GroupCategory, Rule, RuleGroup}, @@ -25,8 +23,6 @@ pub struct AnalysisFilter<'a> { pub enabled_rules: Option<&'a [RuleFilter<'a>]>, /// Do not allow rules matching these names to emit signals pub disabled_rules: &'a [RuleFilter<'a>], - /// Only emit signals matching this text range - pub range: Option, } impl<'analysis> AnalysisFilter<'analysis> { diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index bf268de83..f394adfeb 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -10,5 +10,6 @@ pub use crate::categories::{ SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; +pub use crate::filter::{RuleKey, GroupKey}; pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; pub use crate::rule::Rule; diff --git a/crates/pg_cli/src/commands/check.rs b/crates/pg_cli/src/commands/check.rs index bf09d5fa5..986060a6a 100644 --- a/crates/pg_cli/src/commands/check.rs +++ b/crates/pg_cli/src/commands/check.rs @@ -9,7 +9,7 @@ use pg_fs::FileSystem; use pg_workspace_new::{configuration::LoadedConfiguration, DynRef, Workspace, WorkspaceError}; use std::ffi::OsString; -use super::{determine_fix_file_mode, get_files_to_process_with_cli_options, CommandRunner}; +use super::{get_files_to_process_with_cli_options, CommandRunner}; pub(crate) struct CheckCommandPayload { pub(crate) write: bool, diff --git a/crates/pg_cli/src/commands/mod.rs b/crates/pg_cli/src/commands/mod.rs index 05c433a36..013fc19e2 100644 --- a/crates/pg_cli/src/commands/mod.rs +++ b/crates/pg_cli/src/commands/mod.rs @@ -1,6 +1,5 @@ use crate::changed::{get_changed_files, get_staged_files}; use crate::cli_options::{cli_options, CliOptions, CliReporter, ColorsArg}; -use crate::diagnostics::DeprecatedArgument; use crate::execute::Stdin; use crate::logging::LoggingKind; use crate::{ @@ -8,8 +7,7 @@ use crate::{ }; use bpaf::Bpaf; use pg_configuration::{partial_configuration, PartialConfiguration}; -use pg_console::{markup, Console, ConsoleExt}; -use pg_diagnostics::{Diagnostic, PrintDiagnostic}; +use pg_console::Console; use pg_fs::FileSystem; use pg_workspace_new::configuration::{load_configuration, LoadedConfiguration}; use pg_workspace_new::settings::PartialConfigurationExt; @@ -430,7 +428,7 @@ pub(crate) fn determine_fix_file_mode( check_fix_incompatible_arguments(options)?; let safe_fixes = write || fix; - let unsafe_fixes = ((write || safe_fixes) && unsafe_); + let unsafe_fixes = (write || safe_fixes) && unsafe_; if unsafe_fixes { Ok(Some(FixFileMode::SafeAndUnsafeFixes)) @@ -477,9 +475,8 @@ mod tests { #[test] fn incompatible_arguments() { - for (write, suppress, suppression_reason, fix, unsafe_) in [ - (true, false, None, true, false), // --write --fix - ] { + { + let (write, suppress, suppression_reason, fix, unsafe_) = (true, false, None, true, false); assert!(check_fix_incompatible_arguments(FixFileModeOptions { write, suppress, diff --git a/crates/pg_console/src/write/termcolor.rs b/crates/pg_console/src/write/termcolor.rs index fc94e7d3a..de6e3b2df 100644 --- a/crates/pg_console/src/write/termcolor.rs +++ b/crates/pg_console/src/write/termcolor.rs @@ -217,13 +217,13 @@ fn unicode_to_ascii(c: char) -> char { mod tests { use std::{fmt::Write, str::from_utf8}; - use pg_markup::markup; - use termcolor::Ansi; + + - use crate as pg_console; - use crate::fmt::Formatter; + + - use super::{SanitizeAdapter, Termcolor}; + use super::SanitizeAdapter; #[test] fn test_printing_complex_emojis() { diff --git a/crates/pg_diagnostics/src/serde.rs b/crates/pg_diagnostics/src/serde.rs index 8d02c2b03..f99a11956 100644 --- a/crates/pg_diagnostics/src/serde.rs +++ b/crates/pg_diagnostics/src/serde.rs @@ -358,7 +358,7 @@ impl<'de> Deserialize<'de> for DiagnosticTags { mod tests { use std::io; - use serde_json::{from_value, json, to_value, Value}; + use serde_json::{json, Value}; use text_size::{TextRange, TextSize}; use crate::{ diff --git a/crates/pg_lsp/src/db_connection.rs b/crates/pg_lsp/src/db_connection.rs index 6688bc1c9..ef1e79f83 100644 --- a/crates/pg_lsp/src/db_connection.rs +++ b/crates/pg_lsp/src/db_connection.rs @@ -40,7 +40,7 @@ impl DbConnection { res = listener.recv() => { match res { Ok(not) => { - if not.payload().to_string() == "reload schema" { + if not.payload() == "reload schema" { let schema_cache = SchemaCache::load(&cloned_pool).await; ide.write().await.set_schema_cache(schema_cache); }; @@ -62,7 +62,7 @@ impl DbConnection { Ok(Self { pool, - connection_string: connection_string, + connection_string, schema_update_handle, close_tx, }) diff --git a/crates/pg_lsp/src/debouncer.rs b/crates/pg_lsp/src/debouncer.rs index 61f17bf38..a970e2787 100644 --- a/crates/pg_lsp/src/debouncer.rs +++ b/crates/pg_lsp/src/debouncer.rs @@ -81,6 +81,6 @@ impl SimpleTokioDebouncer { self.shutdown_flag .store(true, std::sync::atomic::Ordering::Relaxed); - let _ = self.handle.abort(); // we don't care about any errors during shutdown + self.handle.abort(); // we don't care about any errors during shutdown } } diff --git a/crates/pg_lsp/src/main.rs b/crates/pg_lsp/src/main.rs index ea366fb64..a52026269 100644 --- a/crates/pg_lsp/src/main.rs +++ b/crates/pg_lsp/src/main.rs @@ -23,7 +23,7 @@ async fn main() -> anyhow::Result<()> { tracing::info!("Starting server."); - let (service, socket) = LspService::new(|client| LspServer::new(client)); + let (service, socket) = LspService::new(LspServer::new); Server::new(stdin, stdout, socket).serve(service).await; diff --git a/crates/pg_lsp/src/server.rs b/crates/pg_lsp/src/server.rs index bf21b0c20..3efb4f170 100644 --- a/crates/pg_lsp/src/server.rs +++ b/crates/pg_lsp/src/server.rs @@ -435,7 +435,7 @@ impl LanguageServer for LspServer { let completions = self.session.get_available_completions(path, position).await; - Ok(completions.map(|c| CompletionResponse::List(c))) + Ok(completions.map(CompletionResponse::List)) } #[tracing::instrument( diff --git a/crates/pg_lsp/src/session.rs b/crates/pg_lsp/src/session.rs index c9b634c4f..667f36b13 100644 --- a/crates/pg_lsp/src/session.rs +++ b/crates/pg_lsp/src/session.rs @@ -248,8 +248,7 @@ impl Session { tree: ide .tree_sitter .tree(&stmt) - .as_ref() - .and_then(|t| Some(t.as_ref())), + .as_ref().map(|t| t.as_ref()), schema: &schema_cache, }) .into_iter() diff --git a/crates/pg_lsp_new/src/utils.rs b/crates/pg_lsp_new/src/utils.rs index fb0bd196b..73bce0f70 100644 --- a/crates/pg_lsp_new/src/utils.rs +++ b/crates/pg_lsp_new/src/utils.rs @@ -304,12 +304,12 @@ pub(crate) fn apply_document_changes( #[cfg(test)] mod tests { - use super::apply_document_changes; + use pg_lsp_converters::line_index::LineIndex; - use pg_lsp_converters::{PositionEncoding, WideEncoding}; + use pg_lsp_converters::PositionEncoding; use pg_text_edit::TextEdit; use tower_lsp::lsp_types as lsp; - use tower_lsp::lsp_types::{Position, Range, TextDocumentContentChangeEvent}; + #[test] fn test_diff_1() { diff --git a/xtask/src/install.rs b/xtask/src/install.rs index c149bd5a3..b0367b2ec 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs @@ -12,7 +12,7 @@ impl flags::Install { if cfg!(target_os = "macos") { fix_path_for_mac(sh).context("Fix path for mac")?; } - if let Some(_) = self.server() { + if self.server().is_some() { install_server(sh).context("install server")?; } if let Some(client) = self.client() { From 271558642679ada6bde4b728333b0ce86a0b8de5 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 18 Dec 2024 09:41:43 +0100 Subject: [PATCH 04/27] simplify more and implement runner --- crates/pg_analyse/src/context.rs | 6 ++-- crates/pg_analyse/src/lib.rs | 38 ++++++++++++++++++++++-- crates/pg_analyse/src/registry.rs | 25 +++++++++------- crates/pg_cli/src/commands/mod.rs | 3 +- crates/pg_console/src/write/termcolor.rs | 6 ---- crates/pg_lsp/src/session.rs | 5 +--- crates/pg_lsp_new/src/utils.rs | 3 +- 7 files changed, 57 insertions(+), 29 deletions(-) diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs index b8e67db11..f2c1fd7a6 100644 --- a/crates/pg_analyse/src/context.rs +++ b/crates/pg_analyse/src/context.rs @@ -21,12 +21,12 @@ where stmt: &'a pg_query_ext::NodeEnum, file_path: &'a Path, options: &'a R::Options, - ) -> Result { - Ok(Self { + ) -> Self { + Self { stmt, file_path, options, - }) + } } /// Returns the group that belongs to the current rule diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index f394adfeb..87ed684bd 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -5,11 +5,45 @@ mod options; mod registry; mod rule; +use registry::RegistryRuleParams; + pub use crate::categories::{ ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; +pub use crate::filter::{GroupKey, RuleKey}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; -pub use crate::filter::{RuleKey, GroupKey}; pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; -pub use crate::rule::Rule; +pub use crate::rule::{Rule, RuleDiagnostic}; + +pub struct Analyzer<'analyzer> { + /// Holds the metadata for all the rules statically known to the analyzer + /// we need this later when we add suppression support + #[allow(dead_code)] + metadata: &'analyzer MetadataRegistry, +} + +pub struct AnalyzerContext<'a> { + pub root: pg_query_ext::NodeEnum, + pub options: &'a AnalyzerOptions, + pub registry: RuleRegistry, +} + +impl<'analyzer> Analyzer<'analyzer> { + /// Construct a new instance of the analyzer with the given rule registry + pub fn new(metadata: &'analyzer MetadataRegistry) -> Self { + Self { metadata } + } + + pub fn run(self, ctx: AnalyzerContext) -> Vec { + let params = RegistryRuleParams { + root: &ctx.root, + options: ctx.options, + }; + + ctx.registry + .into_iter() + .flat_map(|rule| (rule.run)(¶ms)) + .collect::>() + } +} diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs index 1ed582bd5..b2d1c212d 100644 --- a/crates/pg_analyse/src/registry.rs +++ b/crates/pg_analyse/src/registry.rs @@ -94,10 +94,8 @@ impl RegistryVisitor for MetadataRegistry { pub struct RuleRegistryBuilder<'a> { filter: &'a AnalysisFilter<'a>, - root: &'a pg_query_ext::NodeEnum, // Rule Registry registry: RuleRegistry, - diagnostics: Vec, } impl RegistryVisitor for RuleRegistryBuilder<'_> { @@ -133,10 +131,19 @@ pub struct RuleRegistry { rules: Vec, } +impl IntoIterator for RuleRegistry { + type Item = RegistryRule; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.rules.into_iter() + } +} + /// Internal representation of a single rule in the registry #[derive(Copy, Clone)] pub struct RegistryRule { - run: RuleExecutor, + pub(crate) run: RuleExecutor, } pub struct RegistryRuleParams<'analyzer> { @@ -145,7 +152,7 @@ pub struct RegistryRuleParams<'analyzer> { } /// Executor for rule as a generic function pointer -type RuleExecutor = fn(&mut RegistryRuleParams) -> Result, Error>; +type RuleExecutor = fn(&RegistryRuleParams) -> Vec; impl RegistryRule { fn new() -> Self @@ -153,17 +160,13 @@ impl RegistryRule { R: Rule + 'static, { /// Generic implementation of RuleExecutor for any rule type R - fn run(params: &mut RegistryRuleParams) -> Result, Error> + fn run(params: &RegistryRuleParams) -> Vec where R: Rule + 'static, { let options = params.options.rule_options::().unwrap_or_default(); - let ctx = match RuleContext::new(params.root, ¶ms.options.file_path, &options) { - Ok(ctx) => ctx, - Err(error) => return Err(error), - }; - - Ok(R::run(&ctx)) + let ctx = RuleContext::new(params.root, ¶ms.options.file_path, &options); + R::run(&ctx) } Self { run: run:: } diff --git a/crates/pg_cli/src/commands/mod.rs b/crates/pg_cli/src/commands/mod.rs index 013fc19e2..a0934a906 100644 --- a/crates/pg_cli/src/commands/mod.rs +++ b/crates/pg_cli/src/commands/mod.rs @@ -476,7 +476,8 @@ mod tests { #[test] fn incompatible_arguments() { { - let (write, suppress, suppression_reason, fix, unsafe_) = (true, false, None, true, false); + let (write, suppress, suppression_reason, fix, unsafe_) = + (true, false, None, true, false); assert!(check_fix_incompatible_arguments(FixFileModeOptions { write, suppress, diff --git a/crates/pg_console/src/write/termcolor.rs b/crates/pg_console/src/write/termcolor.rs index de6e3b2df..2364fde26 100644 --- a/crates/pg_console/src/write/termcolor.rs +++ b/crates/pg_console/src/write/termcolor.rs @@ -217,12 +217,6 @@ fn unicode_to_ascii(c: char) -> char { mod tests { use std::{fmt::Write, str::from_utf8}; - - - - - - use super::SanitizeAdapter; #[test] diff --git a/crates/pg_lsp/src/session.rs b/crates/pg_lsp/src/session.rs index 667f36b13..afb2017dc 100644 --- a/crates/pg_lsp/src/session.rs +++ b/crates/pg_lsp/src/session.rs @@ -245,10 +245,7 @@ impl Session { let completion_items: Vec = pg_completions::complete(CompletionParams { position: offset - range.start() - TextSize::from(1), text: &stmt.text, - tree: ide - .tree_sitter - .tree(&stmt) - .as_ref().map(|t| t.as_ref()), + tree: ide.tree_sitter.tree(&stmt).as_ref().map(|t| t.as_ref()), schema: &schema_cache, }) .into_iter() diff --git a/crates/pg_lsp_new/src/utils.rs b/crates/pg_lsp_new/src/utils.rs index 73bce0f70..33eef1f70 100644 --- a/crates/pg_lsp_new/src/utils.rs +++ b/crates/pg_lsp_new/src/utils.rs @@ -304,12 +304,11 @@ pub(crate) fn apply_document_changes( #[cfg(test)] mod tests { - + use pg_lsp_converters::line_index::LineIndex; use pg_lsp_converters::PositionEncoding; use pg_text_edit::TextEdit; use tower_lsp::lsp_types as lsp; - #[test] fn test_diff_1() { From ab8c4e4ae6d2de9af3f057acd3ddf8f0c6db4934 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 18 Dec 2024 09:42:24 +0100 Subject: [PATCH 05/27] fix: lint --- crates/pg_analyse/src/context.rs | 1 - crates/pg_analyse/src/registry.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs index f2c1fd7a6..56a549784 100644 --- a/crates/pg_analyse/src/context.rs +++ b/crates/pg_analyse/src/context.rs @@ -1,4 +1,3 @@ -use pg_diagnostics::{Error, Result}; use std::path::Path; use crate::{ diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs index b2d1c212d..e217aae94 100644 --- a/crates/pg_analyse/src/registry.rs +++ b/crates/pg_analyse/src/registry.rs @@ -1,6 +1,5 @@ use std::{borrow, collections::BTreeSet}; -use pg_diagnostics::Error; use crate::{ context::RuleContext, From b4bb56c1dbd380c3224e267322163536439d1f22 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 20 Dec 2024 15:50:50 +0100 Subject: [PATCH 06/27] add macros --- Cargo.lock | 8 + Cargo.toml | 1 + crates/pg_analyse/src/diagnostics.rs | 148 ++++++++++++++++++ crates/pg_analyse/src/lib.rs | 7 +- crates/pg_analyse/src/macros.rs | 122 +++++++++++++++ crates/pg_analyse/src/registry.rs | 1 - crates/pg_linter/Cargo.toml | 16 ++ crates/pg_linter/src/lib.rs | 1 + crates/pg_linter/src/lint.rs | 4 + crates/pg_linter/src/lint/performance.rs | 14 ++ .../src/lint/performance/prefer_text_field.rs | 42 +++++ 11 files changed, 362 insertions(+), 2 deletions(-) create mode 100644 crates/pg_analyse/src/diagnostics.rs create mode 100644 crates/pg_analyse/src/macros.rs create mode 100644 crates/pg_linter/Cargo.toml create mode 100644 crates/pg_linter/src/lib.rs create mode 100644 crates/pg_linter/src/lint.rs create mode 100644 crates/pg_linter/src/lint/performance.rs create mode 100644 crates/pg_linter/src/lint/performance/prefer_text_field.rs diff --git a/Cargo.lock b/Cargo.lock index c934feece..dc6adb026 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2456,6 +2456,14 @@ dependencies = [ "text-size", ] +[[package]] +name = "pg_linter" +version = "0.0.0" +dependencies = [ + "pg_analyse", + "pg_console", +] + [[package]] name = "pg_lsp" version = "0.0.0" diff --git a/Cargo.toml b/Cargo.toml index 60dbd2159..0e27adb4f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -63,6 +63,7 @@ pg_inlay_hints = { path = "./crates/pg_inlay_hints", version = "0.0.0 pg_lexer = { path = "./crates/pg_lexer", version = "0.0.0" } pg_lexer_codegen = { path = "./crates/pg_lexer_codegen", version = "0.0.0" } pg_lint = { path = "./crates/pg_lint", version = "0.0.0" } +pg_linter = { path = "./crates/pg_linter", version = "0.0.0" } pg_lsp_converters = { path = "./crates/pg_lsp_converters", version = "0.0.0" } pg_lsp_new = { path = "./crates/pg_lsp_new", version = "0.0.0" } pg_markup = { path = "./crates/pg_markup", version = "0.0.0" } diff --git a/crates/pg_analyse/src/diagnostics.rs b/crates/pg_analyse/src/diagnostics.rs new file mode 100644 index 000000000..ceba084dd --- /dev/null +++ b/crates/pg_analyse/src/diagnostics.rs @@ -0,0 +1,148 @@ +use pg_diagnostics::{ + category, Advices, Category, Diagnostic, + DiagnosticTags, Error, Location, Severity, Visit, +}; +use text_size::TextRange; +use std::borrow::Cow; +use std::fmt::{Debug, Display, Formatter}; + +use crate::rule::RuleDiagnostic; + +/// Small wrapper for diagnostics during the analysis phase. +/// +/// During these phases, analyzers can create various type diagnostics and some of them +/// don't have all the info to actually create a real [Diagnostic]. +/// +/// This wrapper serves as glue, which eventually is able to spit out full fledged diagnostics. +/// +#[derive(Debug)] +pub struct AnalyzerDiagnostic { + kind: DiagnosticKind, +} + +impl From for AnalyzerDiagnostic { + fn from(rule_diagnostic: RuleDiagnostic) -> Self { + Self { + kind: DiagnosticKind::Rule(rule_diagnostic), + } + } +} + +#[derive(Debug)] +enum DiagnosticKind { + /// It holds various info related to diagnostics emitted by the rules + Rule(RuleDiagnostic), + /// We have raw information to create a basic [Diagnostic] + Raw(Error), +} + +impl Diagnostic for AnalyzerDiagnostic { + fn category(&self) -> Option<&'static Category> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => Some(rule_diagnostic.category), + DiagnosticKind::Raw(error) => error.category(), + } + } + fn description(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => Debug::fmt(&rule_diagnostic.message, fmt), + DiagnosticKind::Raw(error) => error.description(fmt), + } + } + + fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => { + pg_console::fmt::Display::fmt(&rule_diagnostic.message, fmt) + } + DiagnosticKind::Raw(error) => error.message(fmt), + } + } + + fn severity(&self) -> Severity { + match &self.kind { + DiagnosticKind::Rule { .. } => Severity::Error, + DiagnosticKind::Raw(error) => error.severity(), + } + } + + fn tags(&self) -> DiagnosticTags { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.tags, + DiagnosticKind::Raw(error) => error.tags(), + } + } + + fn location(&self) -> Location<'_> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => { + Location::builder().span(&rule_diagnostic.span).build() + } + DiagnosticKind::Raw(error) => error.location(), + } + } + + fn advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.advices().record(visitor)?, + DiagnosticKind::Raw(error) => error.advices(visitor)?, + } + + Ok(()) + } +} + +impl AnalyzerDiagnostic { + /// Creates a diagnostic from a generic [Error] + pub fn from_error(error: Error) -> Self { + Self { + kind: DiagnosticKind::Raw(error), + } + } + + pub fn get_span(&self) -> Option { + match &self.kind { + DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.span, + DiagnosticKind::Raw(error) => error.location().span, + } + } + + pub const fn is_raw(&self) -> bool { + matches!(self.kind, DiagnosticKind::Raw(_)) + } +} + +#[derive(Debug, Diagnostic, Clone)] +#[diagnostic(severity = Warning)] +pub struct SuppressionDiagnostic { + #[category] + category: &'static Category, + #[location(span)] + range: TextRange, + #[message] + #[description] + message: String, + #[tags] + tags: DiagnosticTags, +} + +impl SuppressionDiagnostic { + pub(crate) fn new( + category: &'static Category, + range: TextRange, + message: impl Display, + ) -> Self { + Self { + category, + range, + message: message.to_string(), + tags: DiagnosticTags::empty(), + } + } + + pub(crate) fn with_tags(mut self, tags: DiagnosticTags) -> Self { + self.tags |= tags; + self + } +} + diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index 87ed684bd..f9f216892 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -1,20 +1,25 @@ mod categories; mod context; mod filter; +pub mod macros; mod options; mod registry; mod rule; +// Re-exported for use in the `declare_group` macro +pub use pg_diagnostics::category_concat; + use registry::RegistryRuleParams; pub use crate::categories::{ ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; +pub use crate::context::RuleContext; pub use crate::filter::{GroupKey, RuleKey}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; -pub use crate::rule::{Rule, RuleDiagnostic}; +pub use crate::rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata}; pub struct Analyzer<'analyzer> { /// Holds the metadata for all the rules statically known to the analyzer diff --git a/crates/pg_analyse/src/macros.rs b/crates/pg_analyse/src/macros.rs new file mode 100644 index 000000000..7d838981a --- /dev/null +++ b/crates/pg_analyse/src/macros.rs @@ -0,0 +1,122 @@ +use pg_diagnostics::Category; + +/// This macro is used to declare an analyzer rule type, and implement the +// [RuleMeta] trait for it +/// # Example +/// +/// The macro itself expect the following syntax: +/// +/// ```rust,ignore +///use pg_analyse::declare_rule; +/// +/// declare_lint_rule! { +/// /// Documentation +/// pub(crate) ExampleRule { +/// version: "1.0.0", +/// name: "rule-name", +/// recommended: false, +/// } +/// } +/// ``` +/// +/// Check [crate](module documentation) for a better +/// understanding of how the macro works +#[macro_export] +macro_rules! declare_lint_rule { + ( $( #[doc = $doc:literal] )+ $vis:vis $id:ident { + version: $version:literal, + name: $name:tt, + $( $key:ident: $value:expr, )* + } ) => { + + pg_analyse::declare_rule!( + $( #[doc = $doc] )* + $vis $id { + version: $version, + name: $name, + $( $key: $value, )* + } + ); + + // Declare a new `rule_category!` macro in the module context that + // expands to the category of this rule + // This is implemented by calling the `group_category!` macro from the + // parent module (that should be declared by a call to `declare_group!`) + // and providing it with the name of this rule as a string literal token + #[allow(unused_macros)] + macro_rules! rule_category { + () => { super::group_category!( $name ) }; + } + }; +} + +#[macro_export] +macro_rules! declare_rule { + ( $( #[doc = $doc:literal] )+ $vis:vis $id:ident { + version: $version:literal, + name: $name:tt, + $( $key:ident: $value:expr, )* + } ) => { + $( #[doc = $doc] )* + $vis enum $id {} + + impl $crate::RuleMeta for $id { + type Group = super::Group; + const METADATA: $crate::RuleMetadata = + $crate::RuleMetadata::new($version, $name, concat!( $( $doc, "\n", )* )) $( .$key($value) )*; + } + } +} + +/// This macro is used by the codegen script to declare an analyser rule group, +/// and implement the [RuleGroup] trait for it +#[macro_export] +macro_rules! declare_lint_group { + ( $vis:vis $id:ident { name: $name:tt, rules: [ $( $( $rule:ident )::* , )* ] } ) => { + $vis enum $id {} + + impl $crate::RuleGroup for $id { + type Category = super::Category; + + const NAME: &'static str = $name; + + fn record_rules(registry: &mut V) { + $( registry.record_rule::<$( $rule )::*>(); )* + } + } + + pub(self) use $id as Group; + + // Declare a `group_category!` macro in the context of this module (and + // all its children). This macro takes the name of a rule as a string + // literal token and expands to the category of the lint rule with this + // name within this group. + // This is implemented by calling the `category_concat!` macro with the + // "lint" prefix, the name of this group, and the rule name argument + #[allow(unused_macros)] + macro_rules! group_category { + ( $rule_name:tt ) => { $crate::category_concat!( "lint", $name, $rule_name ) }; + } + + // Re-export the macro for child modules, so `declare_rule!` can access + // the category of its parent group by using the `super` module + pub(self) use group_category; + }; +} + +#[macro_export] +macro_rules! declare_category { + ( $vis:vis $id:ident { kind: $kind:ident, groups: [ $( $( $group:ident )::* , )* ] } ) => { + $vis enum $id {} + + impl $crate::GroupCategory for $id { + const CATEGORY: $crate::RuleCategory = $crate::RuleCategory::$kind; + + fn record_groups(registry: &mut V) { + $( registry.record_group::<$( $group )::*>(); )* + } + } + + pub(self) use $id as Category; + }; +} diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs index e217aae94..1e8105604 100644 --- a/crates/pg_analyse/src/registry.rs +++ b/crates/pg_analyse/src/registry.rs @@ -1,6 +1,5 @@ use std::{borrow, collections::BTreeSet}; - use crate::{ context::RuleContext, filter::{AnalysisFilter, GroupKey, RuleKey}, diff --git a/crates/pg_linter/Cargo.toml b/crates/pg_linter/Cargo.toml new file mode 100644 index 000000000..bf9fe2ed4 --- /dev/null +++ b/crates/pg_linter/Cargo.toml @@ -0,0 +1,16 @@ + +[package] +authors.workspace = true +categories.workspace = true +description = "" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "pg_linter" +repository.workspace = true +version = "0.0.0" + +[dependencies] +pg_analyse = { workspace = true } +pg_console = { workspace = true } diff --git a/crates/pg_linter/src/lib.rs b/crates/pg_linter/src/lib.rs new file mode 100644 index 000000000..35115db05 --- /dev/null +++ b/crates/pg_linter/src/lib.rs @@ -0,0 +1 @@ +mod lint; diff --git a/crates/pg_linter/src/lint.rs b/crates/pg_linter/src/lint.rs new file mode 100644 index 000000000..deb9acbbb --- /dev/null +++ b/crates/pg_linter/src/lint.rs @@ -0,0 +1,4 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +pub mod performance; +::pg_analyse::declare_category! { pub Lint { kind : Lint , groups : [self :: performance :: Performance ,] } } diff --git a/crates/pg_linter/src/lint/performance.rs b/crates/pg_linter/src/lint/performance.rs new file mode 100644 index 000000000..1fcc4f57a --- /dev/null +++ b/crates/pg_linter/src/lint/performance.rs @@ -0,0 +1,14 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use pg_analyse::declare_lint_group; + +pub mod prefer_text_field; + +declare_lint_group! { + pub Performance { + name : "performance" , + rules : [ + self :: prefer_text_field :: PreferTextField , + ] + } +} diff --git a/crates/pg_linter/src/lint/performance/prefer_text_field.rs b/crates/pg_linter/src/lint/performance/prefer_text_field.rs new file mode 100644 index 000000000..bbd9588ca --- /dev/null +++ b/crates/pg_linter/src/lint/performance/prefer_text_field.rs @@ -0,0 +1,42 @@ +use pg_analyse::{Rule, RuleContext}; + +pg_analyse::declare_lint_rule! { + /// Changing the size of a varchar field requires an ACCESS EXCLUSIVE lock, that will prevent all reads and writes to the table. + /// + /// Use a text field with a CHECK CONSTRAINT makes it easier to change the max length. + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```sql,expect_diagnostic + /// CREATE TABLE "app_user" ( + /// "id" serial NOT NULL PRIMARY KEY, + /// "email" varchar(100) NOT NULL + /// ); + /// ``` + /// + /// ### Valid + /// + /// ```sql + /// CREATE TABLE "app_user" ( + /// "id" serial NOT NULL PRIMARY KEY, + /// "email" TEXT NOT NULL + /// ); + /// ALTER TABLE "app_user" ADD CONSTRAINT "text_size" CHECK (LENGTH("email") <= 100); + /// ``` + /// + pub PreferTextField { + version: "0.0.1", + name: "prefer-text-field", + recommended: true, + } +} + +impl Rule for PreferTextField { + type Options = (); + + fn run(ctx: &RuleContext) -> Vec { + todo!() + } +} From 23f43bfbf1bcaf46ecc2f0da998100a5a94f31b4 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 20 Dec 2024 16:34:11 +0100 Subject: [PATCH 07/27] feat: analyser --- Cargo.lock | 1 + crates/pg_analyse/src/lib.rs | 2 +- crates/pg_analyse/src/registry.rs | 17 ++++++++++++++++ crates/pg_linter/Cargo.toml | 5 +++-- crates/pg_linter/src/lib.rs | 34 +++++++++++++++++++++++++++++++ crates/pg_linter/src/options.rs | 6 ++++++ crates/pg_linter/src/registry.rs | 7 +++++++ 7 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 crates/pg_linter/src/options.rs create mode 100644 crates/pg_linter/src/registry.rs diff --git a/Cargo.lock b/Cargo.lock index dc6adb026..bc87dfeed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2462,6 +2462,7 @@ version = "0.0.0" dependencies = [ "pg_analyse", "pg_console", + "pg_query_ext", ] [[package]] diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index f9f216892..968c6192d 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -16,7 +16,7 @@ pub use crate::categories::{ SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; pub use crate::context::RuleContext; -pub use crate::filter::{GroupKey, RuleKey}; +pub use crate::filter::{AnalysisFilter, GroupKey, RuleKey}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; pub use crate::rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata}; diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs index 1e8105604..207682ec7 100644 --- a/crates/pg_analyse/src/registry.rs +++ b/crates/pg_analyse/src/registry.rs @@ -144,6 +144,17 @@ pub struct RegistryRule { pub(crate) run: RuleExecutor, } +impl RuleRegistry { + pub fn builder<'a>(filter: &'a AnalysisFilter<'a>) -> RuleRegistryBuilder<'a> { + RuleRegistryBuilder { + filter, + registry: RuleRegistry { + rules: Default::default(), + }, + } + } +} + pub struct RegistryRuleParams<'analyzer> { pub root: &'analyzer pg_query_ext::NodeEnum, pub options: &'analyzer AnalyzerOptions, @@ -170,3 +181,9 @@ impl RegistryRule { Self { run: run:: } } } + +impl RuleRegistryBuilder<'_> { + pub fn build(self) -> RuleRegistry { + self.registry + } +} diff --git a/crates/pg_linter/Cargo.toml b/crates/pg_linter/Cargo.toml index bf9fe2ed4..0d1cc9710 100644 --- a/crates/pg_linter/Cargo.toml +++ b/crates/pg_linter/Cargo.toml @@ -12,5 +12,6 @@ repository.workspace = true version = "0.0.0" [dependencies] -pg_analyse = { workspace = true } -pg_console = { workspace = true } +pg_analyse = { workspace = true } +pg_console = { workspace = true } +pg_query_ext = { workspace = true } diff --git a/crates/pg_linter/src/lib.rs b/crates/pg_linter/src/lib.rs index 35115db05..43300a3e0 100644 --- a/crates/pg_linter/src/lib.rs +++ b/crates/pg_linter/src/lib.rs @@ -1 +1,35 @@ +use std::{ops::Deref, sync::LazyLock}; + +use pg_analyse::{AnalysisFilter, AnalyzerOptions, MetadataRegistry, RuleDiagnostic, RuleRegistry}; +use registry::visit_registry; + mod lint; +pub mod options; +mod registry; + +pub static METADATA: LazyLock = LazyLock::new(|| { + let mut metadata = MetadataRegistry::default(); + visit_registry(&mut metadata); + metadata +}); + +pub fn analyse<'a, B>( + root: &pg_query_ext::NodeEnum, + filter: AnalysisFilter, + options: &'a AnalyzerOptions, +) -> Vec +where + B: 'a, +{ + let mut builder = RuleRegistry::builder(&filter); + visit_registry(&mut builder); + let registry = builder.build(); + + let analyser = pg_analyse::Analyzer::new(METADATA.deref()); + + analyser.run(pg_analyse::AnalyzerContext { + root: root.clone(), + options, + registry, + }) +} diff --git a/crates/pg_linter/src/options.rs b/crates/pg_linter/src/options.rs new file mode 100644 index 000000000..b322850d8 --- /dev/null +++ b/crates/pg_linter/src/options.rs @@ -0,0 +1,6 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use crate::lint; + +pub type NoDescendingSpecificity = + ::Options; diff --git a/crates/pg_linter/src/registry.rs b/crates/pg_linter/src/registry.rs new file mode 100644 index 000000000..40a73ca1c --- /dev/null +++ b/crates/pg_linter/src/registry.rs @@ -0,0 +1,7 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use pg_analyse::RegistryVisitor; + +pub fn visit_registry(registry: &mut V) { + registry.record_category::(); +} From aa6bca3c6728d43de73a31b62c7a98c2abdab4d2 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 20 Dec 2024 16:34:55 +0100 Subject: [PATCH 08/27] cleanup --- crates/pg_linter/src/lib.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/crates/pg_linter/src/lib.rs b/crates/pg_linter/src/lib.rs index 43300a3e0..b9a3a9bde 100644 --- a/crates/pg_linter/src/lib.rs +++ b/crates/pg_linter/src/lib.rs @@ -13,14 +13,11 @@ pub static METADATA: LazyLock = LazyLock::new(|| { metadata }); -pub fn analyse<'a, B>( +pub fn analyse( root: &pg_query_ext::NodeEnum, filter: AnalysisFilter, - options: &'a AnalyzerOptions, -) -> Vec -where - B: 'a, -{ + options: &AnalyzerOptions, +) -> Vec { let mut builder = RuleRegistry::builder(&filter); visit_registry(&mut builder); let registry = builder.build(); From 469ea91b754fa4bad6b6380213648f54504886ce Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 20 Dec 2024 17:15:56 +0100 Subject: [PATCH 09/27] feat: codegen new analyser rule --- Cargo.lock | 5 +- Cargo.toml | 1 + crates/pg_analyse/src/context.rs | 6 +- crates/pg_linter/src/lib.rs | 2 +- xtask/codegen/Cargo.toml | 5 +- .../codegen/src/generate_new_analyser_rule.rs | 173 ++++++++++++++++++ xtask/codegen/src/lib.rs | 14 ++ xtask/codegen/src/main.rs | 5 +- 8 files changed, 202 insertions(+), 9 deletions(-) create mode 100644 xtask/codegen/src/generate_new_analyser_rule.rs diff --git a/Cargo.lock b/Cargo.lock index bc87dfeed..6cf07ad07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -500,9 +500,9 @@ dependencies = [ [[package]] name = "biome_string_case" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28b4d0e08c2f13f1c9e0df4e7a8f9bfa03ef3803713d1bcd5110578cc5c67be" +checksum = "5868798da491b19a5b27a0bad5d8727e1e65060fa2dac360b382df00ff520774" [[package]] name = "biome_text_edit" @@ -4771,6 +4771,7 @@ dependencies = [ name = "xtask_codegen" version = "0.0.0" dependencies = [ + "biome_string_case", "bpaf", "xtask", ] diff --git a/Cargo.toml b/Cargo.toml index 0e27adb4f..b5b467386 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,7 @@ rust-version = "1.82.0" anyhow = "1.0.92" biome_deserialize = "0.6.0" biome_deserialize_macros = "0.6.0" +biome_string_case = "0.5.8" bpaf = { version = "0.9.15", features = ["derive"] } crossbeam = "0.8.4" enumflags2 = "0.7.10" diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs index 56a549784..ca526392d 100644 --- a/crates/pg_analyse/src/context.rs +++ b/crates/pg_analyse/src/context.rs @@ -38,9 +38,9 @@ where <::Category as GroupCategory>::CATEGORY } - /// Returns a clone of the AST root - pub fn stmt(&self) -> pg_query_ext::NodeEnum { - self.stmt.clone() + /// Returns the AST root + pub fn stmt(&self) -> &pg_query_ext::NodeEnum { + self.stmt } /// Returns the metadata of the rule diff --git a/crates/pg_linter/src/lib.rs b/crates/pg_linter/src/lib.rs index b9a3a9bde..75b4afbc6 100644 --- a/crates/pg_linter/src/lib.rs +++ b/crates/pg_linter/src/lib.rs @@ -13,7 +13,7 @@ pub static METADATA: LazyLock = LazyLock::new(|| { metadata }); -pub fn analyse( +pub fn lint( root: &pg_query_ext::NodeEnum, filter: AnalysisFilter, options: &AnalyzerOptions, diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml index 7c5ba92b3..6fbdb6190 100644 --- a/xtask/codegen/Cargo.toml +++ b/xtask/codegen/Cargo.toml @@ -5,5 +5,6 @@ publish = false version = "0.0.0" [dependencies] -bpaf = { workspace = true, features = ["derive"] } -xtask = { path = '../', version = "0.0" } +biome_string_case = { workspace = true } +bpaf = { workspace = true, features = ["derive"] } +xtask = { path = '../', version = "0.0" } diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs new file mode 100644 index 000000000..c69741df5 --- /dev/null +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -0,0 +1,173 @@ +use biome_string_case::Case; +use bpaf::Bpaf; +use std::str::FromStr; +use xtask::project_root; + +#[derive(Debug, Clone, Bpaf)] +pub enum Category { + /// Lint rules + Lint, +} + +impl FromStr for Category { + type Err = &'static str; + + fn from_str(s: &str) -> std::result::Result { + match s { + "lint" => Ok(Self::Lint), + _ => Err("Not supported"), + } + } +} + +fn generate_rule_template( + category: &Category, + rule_name_upper_camel: &str, + rule_name_lower_camel: &str, +) -> String { + let macro_name = match category { + Category::Lint => "declare_lint_rule", + }; + format!( + r#"use biome_analyze::{{ + context::RuleContext, {macro_name}, Rule, RuleDiagnostic, Ast +}}; +use biome_console::markup; +use biome_js_syntax::JsIdentifierBinding; +use biome_rowan::AstNode; + +{macro_name}! {{ + /// Succinct description of the rule. + /// + /// Put context and details about the rule. + /// As a starting point, you can take the description of the corresponding _ESLint_ rule (if any). + /// + /// Try to stay consistent with the descriptions of implemented rules. + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```js,expect_diagnostic + /// var a = 1; + /// a = 2; + /// ``` + /// + /// ### Valid + /// + /// ```js + /// // var a = 1; + /// ``` + /// + pub {rule_name_upper_camel} {{ + version: "next", + name: "{rule_name_lower_camel}", + language: "js", + recommended: false, + }} +}} + +impl Rule for {rule_name_upper_camel} {{ + type Query = Ast; + type State = (); + type Signals = Option; + type Options = (); + + fn run(ctx: &RuleContext) -> Self::Signals {{ + let _binding = ctx.query(); + Some(()) + }} + + fn diagnostic(ctx: &RuleContext, _state: &Self::State) -> Option {{ + // + // Read our guidelines to write great diagnostics: + // https://docs.rs/biome_analyze/latest/biome_analyze/#what-a-rule-should-say-to-the-user + // + let node = ctx.query(); + Some( + RuleDiagnostic::new( + rule_category!(), + node.range(), + markup! {{ + "Variable is read here." + }}, + ) + .note(markup! {{ + "This note will give you more information." + }}), + ) + }} +}} +"# + ) +} + +pub fn generate_new_analyser_rule(category: Category, rule_name: &str) { + let rule_name_camel = Case::Camel.convert(rule_name); + let crate_folder = project_root().join(format!("crates/pg_lint")); + let test_folder = crate_folder.join("tests/specs/nursery"); + let rule_folder = match &category { + Category::Lint => crate_folder.join("src/lint/nursery"), + }; + // Generate rule code + let code = generate_rule_template( + &category, + Case::Pascal.convert(rule_name).as_str(), + rule_name_camel.as_str(), + ); + if !rule_folder.exists() { + std::fs::create_dir(rule_folder.clone()).expect("To create the rule folder"); + } + let file_name = format!( + "{}/{}.rs", + rule_folder.display(), + Case::Snake.convert(rule_name) + ); + std::fs::write(file_name.clone(), code).unwrap_or_else(|_| panic!("To write {}", &file_name)); + + let categories_path = "crates/pg_diagnostics_categories/src/categories.rs"; + let mut categories = std::fs::read_to_string(categories_path).unwrap(); + + if !categories.contains(&rule_name_camel) { + let kebab_case_rule = Case::Kebab.convert(&rule_name_camel); + // We sort rules to reduce conflicts between contributions made in parallel. + let rule_line = match category { + Category::Lint => format!( + r#" "lint/nursery/{rule_name_camel}": "https://biomejs.dev/linter/rules/{kebab_case_rule}","# + ), + }; + let lint_start = match category { + Category::Lint => "define_categories! {\n", + }; + let lint_end = match category { + Category::Lint => "\n // end lint rules\n", + }; + debug_assert!(categories.contains(lint_start), "{}", lint_start); + debug_assert!(categories.contains(lint_end), "{}", lint_end); + let lint_start_index = categories.find(lint_start).unwrap() + lint_start.len(); + let lint_end_index = categories.find(lint_end).unwrap(); + let lint_rule_text = &categories[lint_start_index..lint_end_index]; + let mut lint_rules: Vec<_> = lint_rule_text.lines().chain(Some(&rule_line[..])).collect(); + lint_rules.sort_unstable(); + let new_lint_rule_text = lint_rules.join("\n"); + categories.replace_range(lint_start_index..lint_end_index, &new_lint_rule_text); + std::fs::write(categories_path, categories).unwrap(); + } + + // Generate test code + let tests_path = format!("{}/{rule_name_camel}", test_folder.display()); + let _ = std::fs::create_dir_all(tests_path); + + let test_file = format!("{}/{rule_name_camel}/valid.sql", test_folder.display()); + if std::fs::File::open(&test_file).is_err() { + let _ = std::fs::write( + test_file, + "/* should not generate diagnostics */\n// var a = 1;", + ); + } + + let test_file = format!("{}/{rule_name_camel}/invalid.sql", test_folder.display()); + if std::fs::File::open(&test_file).is_err() { + let _ = std::fs::write(test_file, "var a = 1;\na = 2;\na = 3;"); + } +} diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index d16bc644c..b4a4e2ea4 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -1,9 +1,12 @@ //! Codegen tools. Derived from Biome's codegen mod generate_crate; +mod generate_new_analyser_rule; pub use self::generate_crate::generate_crate; +pub use self::generate_new_analyser_rule::generate_new_analyser_rule; use bpaf::Bpaf; +use generate_new_analyser_rule::Category; #[derive(Debug, Clone, Bpaf)] #[bpaf(options)] @@ -15,4 +18,15 @@ pub enum TaskCommand { #[bpaf(long("name"), argument("STRING"))] name: String, }, + /// Creates a new lint rule + #[bpaf(command, long("new-lintrule"))] + NewRule { + /// Name of the rule + #[bpaf(long("name"))] + name: String, + + /// Name of the rule + #[bpaf(long("category"))] + category: Category, + }, } diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs index 5d6246cc3..29e1363e1 100644 --- a/xtask/codegen/src/main.rs +++ b/xtask/codegen/src/main.rs @@ -1,6 +1,6 @@ use xtask::{project_root, pushd, Result}; -use xtask_codegen::{generate_crate, task_command, TaskCommand}; +use xtask_codegen::{generate_crate, generate_new_analyser_rule, task_command, TaskCommand}; fn main() -> Result<()> { let _d = pushd(project_root()); @@ -10,6 +10,9 @@ fn main() -> Result<()> { TaskCommand::NewCrate { name } => { generate_crate(name)?; } + TaskCommand::NewRule { name, category } => { + generate_new_analyser_rule(category, &name); + } } Ok(()) From fe7f2e078bed88191ee9d644cb6ff3e72073cff6 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 20 Dec 2024 17:20:07 +0100 Subject: [PATCH 10/27] feat: codegen promote rule --- .../codegen/src/generate_new_analyser_rule.rs | 44 ++--------- xtask/codegen/src/lib.rs | 12 +++ xtask/codegen/src/main.rs | 7 +- xtask/codegen/src/promote_rule.rs | 79 +++++++++++++++++++ 4 files changed, 105 insertions(+), 37 deletions(-) create mode 100644 xtask/codegen/src/promote_rule.rs diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index c69741df5..022521dbc 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -29,12 +29,10 @@ fn generate_rule_template( Category::Lint => "declare_lint_rule", }; format!( - r#"use biome_analyze::{{ + r#"use pg_analyse::{{ context::RuleContext, {macro_name}, Rule, RuleDiagnostic, Ast }}; -use biome_console::markup; -use biome_js_syntax::JsIdentifierBinding; -use biome_rowan::AstNode; +use pg_console::markup; {macro_name}! {{ /// Succinct description of the rule. @@ -48,54 +46,28 @@ use biome_rowan::AstNode; /// /// ### Invalid /// - /// ```js,expect_diagnostic - /// var a = 1; - /// a = 2; + /// ```sql,expect_diagnostic + /// select 1; /// ``` /// /// ### Valid /// - /// ```js - /// // var a = 1; + /// ``sql` + /// select 2; /// ``` /// pub {rule_name_upper_camel} {{ version: "next", name: "{rule_name_lower_camel}", - language: "js", recommended: false, }} }} impl Rule for {rule_name_upper_camel} {{ - type Query = Ast; - type State = (); - type Signals = Option; type Options = (); - fn run(ctx: &RuleContext) -> Self::Signals {{ - let _binding = ctx.query(); - Some(()) - }} - - fn diagnostic(ctx: &RuleContext, _state: &Self::State) -> Option {{ - // - // Read our guidelines to write great diagnostics: - // https://docs.rs/biome_analyze/latest/biome_analyze/#what-a-rule-should-say-to-the-user - // - let node = ctx.query(); - Some( - RuleDiagnostic::new( - rule_category!(), - node.range(), - markup! {{ - "Variable is read here." - }}, - ) - .note(markup! {{ - "This note will give you more information." - }}), - ) + fn run(ctx: &RuleContext) -> Vec {{ + Vec::new() }} }} "# diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index b4a4e2ea4..ea8a96faa 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -2,9 +2,11 @@ mod generate_crate; mod generate_new_analyser_rule; +mod promote_rule; pub use self::generate_crate::generate_crate; pub use self::generate_new_analyser_rule::generate_new_analyser_rule; +pub use self::promote_rule::promote_rule; use bpaf::Bpaf; use generate_new_analyser_rule::Category; @@ -29,4 +31,14 @@ pub enum TaskCommand { #[bpaf(long("category"))] category: Category, }, + /// Promotes a nursery rule + #[bpaf(command, long("promote-rule"))] + PromoteRule { + /// Path of the rule + #[bpaf(long("name"), argument("STRING"))] + name: String, + /// Name of the rule + #[bpaf(long("group"), argument("STRING"))] + group: String, + }, } diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs index 29e1363e1..0137fb047 100644 --- a/xtask/codegen/src/main.rs +++ b/xtask/codegen/src/main.rs @@ -1,6 +1,8 @@ use xtask::{project_root, pushd, Result}; -use xtask_codegen::{generate_crate, generate_new_analyser_rule, task_command, TaskCommand}; +use xtask_codegen::{ + generate_crate, generate_new_analyser_rule, promote_rule, task_command, TaskCommand, +}; fn main() -> Result<()> { let _d = pushd(project_root()); @@ -13,6 +15,9 @@ fn main() -> Result<()> { TaskCommand::NewRule { name, category } => { generate_new_analyser_rule(category, &name); } + TaskCommand::PromoteRule { name, group } => { + promote_rule(&name, &group); + } } Ok(()) diff --git a/xtask/codegen/src/promote_rule.rs b/xtask/codegen/src/promote_rule.rs new file mode 100644 index 000000000..f8112832f --- /dev/null +++ b/xtask/codegen/src/promote_rule.rs @@ -0,0 +1,79 @@ +use biome_string_case::Case; +use std::env; +use std::fs; +use std::path::PathBuf; + +const KNOWN_GROUPS: [&str; 1] = ["performance"]; + +const KNOWN_PATHS: &[&str] = &["crates/pg_lint"]; +pub fn promote_rule(rule_name: &str, new_group: &str) { + let current_dir = env::current_dir().ok().unwrap(); + + if !KNOWN_GROUPS.contains(&new_group) { + panic!( + "The group '{}' doesn't exist. Available groups: {}", + new_group, + KNOWN_GROUPS.join(", ") + ) + } + + let rule_name_snake = Case::Snake.convert(rule_name); + + // look for the rule in the source code + let mut rule_path = None; + let mut analyzers_path = None; + for known_path in KNOWN_PATHS { + let local_rule_path = current_dir + .join(known_path) + .join("src/lint/nursery") + .join(format!("{rule_name_snake}.rs")); + if local_rule_path.exists() { + rule_path = Some(local_rule_path); + analyzers_path = Some(PathBuf::from(known_path)); + break; + } + } + + if let (Some(rule_path), Some(analyzers_path)) = (rule_path, analyzers_path) { + // rule found! + let new_group_src_path = analyzers_path.join("src/lint").join(new_group); + let new_rule_path = new_group_src_path.join(format!("{rule_name_snake}.rs")); + let new_group_test_path = analyzers_path.join("tests/specs").join(new_group); + + let categories_path = "crates/pg_diagnostics_categories/src/categories.rs"; + let categories = std::fs::read_to_string(categories_path).unwrap(); + + let mut categories = categories.replace( + &format!("lint/nursery/{rule_name}"), + &format!("lint/{new_group}/{rule_name}"), + ); + + // We sort rules to reduce conflicts between contributions made in parallel. + let lint_start = "define_categories! {\n"; + let lint_end = "\n // end lint rules\n"; + debug_assert!(categories.contains(lint_start)); + debug_assert!(categories.contains(lint_end)); + let lint_start_index = categories.find(lint_start).unwrap() + lint_start.len(); + let lint_end_index = categories.find(lint_end).unwrap(); + let lint_rule_text = &categories[lint_start_index..lint_end_index]; + let mut lint_rules: Vec<_> = lint_rule_text.lines().collect(); + lint_rules.sort_unstable(); + let new_lint_rule_text = lint_rules.join("\n"); + categories.replace_range(lint_start_index..lint_end_index, &new_lint_rule_text); + + if !new_group_src_path.exists() { + fs::create_dir(&new_group_src_path).expect("To create the group source folder"); + } + fs::rename(&rule_path, &new_rule_path).expect("To move rule file"); + std::fs::write(categories_path, categories).unwrap(); + + if !new_group_test_path.exists() { + fs::create_dir(&new_group_test_path).expect("To create the group test folder"); + } + let old_test_path = analyzers_path.join("tests/specs/nursery").join(rule_name); + let new_test_path = new_group_test_path.join(rule_name); + fs::rename(old_test_path, new_test_path).expect("To move rule test folder"); + } else { + panic!("Couldn't find the rule {rule_name}"); + } +} From d33599dc357b55809331bdfc8506cf3a2071c231 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sat, 21 Dec 2024 14:55:55 +0100 Subject: [PATCH 11/27] add codegen for linter --- Cargo.lock | 3 + .../src/categories.rs | 9 +- crates/pg_linter/src/lint.rs | 3 +- crates/pg_linter/src/lint/nursery.rs | 5 + .../src/lint/nursery/ban_drop_column.rs | 38 +++ crates/pg_linter/src/lint/performance.rs | 11 +- .../src/lint/performance/prefer_text_field.rs | 2 +- crates/pg_linter/src/options.rs | 5 +- crates/pg_linter/src/registry.rs | 1 - .../specs/nursery/banDropColumn/invalid.sql | 1 + .../specs/nursery/banDropColumn/valid.sql | 2 + justfile | 24 +- xtask/codegen/Cargo.toml | 3 + xtask/codegen/src/generate_analyser.rs | 234 ++++++++++++++++++ .../codegen/src/generate_new_analyser_rule.rs | 11 +- xtask/codegen/src/lib.rs | 5 + xtask/codegen/src/main.rs | 6 +- 17 files changed, 328 insertions(+), 35 deletions(-) create mode 100644 crates/pg_linter/src/lint/nursery.rs create mode 100644 crates/pg_linter/src/lint/nursery/ban_drop_column.rs create mode 100644 crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql create mode 100644 crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql create mode 100644 xtask/codegen/src/generate_analyser.rs diff --git a/Cargo.lock b/Cargo.lock index 6cf07ad07..6db8b24e2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4771,8 +4771,11 @@ dependencies = [ name = "xtask_codegen" version = "0.0.0" dependencies = [ + "anyhow", "biome_string_case", "bpaf", + "proc-macro2", + "quote", "xtask", ] diff --git a/crates/pg_diagnostics_categories/src/categories.rs b/crates/pg_diagnostics_categories/src/categories.rs index 983406fdd..451117de0 100644 --- a/crates/pg_diagnostics_categories/src/categories.rs +++ b/crates/pg_diagnostics_categories/src/categories.rs @@ -13,8 +13,10 @@ // must be between `define_categories! {\n` and `\n ;\n`. define_categories! { - "somerule": "https://example.com/some-rule", + "lint/nursery/banDropColumn": "https://pglsp.dev/linter/rules/ban-drop-column", + // end lint rules ; + // General categories "stdin", "lint", "check", @@ -28,4 +30,9 @@ define_categories! { "internalError/panic", "syntax", "dummy", + + // Lint groups + "lint", + "lint/performance", + "lint/suspicious", } diff --git a/crates/pg_linter/src/lint.rs b/crates/pg_linter/src/lint.rs index deb9acbbb..35dd776b8 100644 --- a/crates/pg_linter/src/lint.rs +++ b/crates/pg_linter/src/lint.rs @@ -1,4 +1,5 @@ //! Generated file, do not edit by hand, see `xtask/codegen` +pub mod nursery; pub mod performance; -::pg_analyse::declare_category! { pub Lint { kind : Lint , groups : [self :: performance :: Performance ,] } } +::pg_analyse::declare_category! { pub Lint { kind : Lint , groups : [self :: nursery :: Nursery , self :: performance :: Performance ,] } } diff --git a/crates/pg_linter/src/lint/nursery.rs b/crates/pg_linter/src/lint/nursery.rs new file mode 100644 index 000000000..a2fb62a80 --- /dev/null +++ b/crates/pg_linter/src/lint/nursery.rs @@ -0,0 +1,5 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use pg_analyse::declare_lint_group; +pub mod ban_drop_column; +declare_lint_group! { pub Nursery { name : "nursery" , rules : [self :: ban_drop_column :: BanDropColumn ,] } } diff --git a/crates/pg_linter/src/lint/nursery/ban_drop_column.rs b/crates/pg_linter/src/lint/nursery/ban_drop_column.rs new file mode 100644 index 000000000..9d95f7503 --- /dev/null +++ b/crates/pg_linter/src/lint/nursery/ban_drop_column.rs @@ -0,0 +1,38 @@ +use pg_analyse::{context::RuleContext, declare_lint_rule, Rule, RuleDiagnostic}; +use pg_console::markup; + +declare_lint_rule! { + /// Succinct description of the rule. + /// + /// Put context and details about the rule. + /// + /// Try to stay consistent with the descriptions of implemented rules. + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```sql,expect_diagnostic + /// select 1; + /// ``` + /// + /// ### Valid + /// + /// ``sql` + /// select 2; + /// ``` + /// + pub BanDropColumn { + version: "next", + name: "banDropColumn", + recommended: false, + } +} + +impl Rule for BanDropColumn { + type Options = (); + + fn run(ctx: &RuleContext) -> Vec { + Vec::new() + } +} diff --git a/crates/pg_linter/src/lint/performance.rs b/crates/pg_linter/src/lint/performance.rs index 1fcc4f57a..b019a62ba 100644 --- a/crates/pg_linter/src/lint/performance.rs +++ b/crates/pg_linter/src/lint/performance.rs @@ -1,14 +1,5 @@ //! Generated file, do not edit by hand, see `xtask/codegen` use pg_analyse::declare_lint_group; - pub mod prefer_text_field; - -declare_lint_group! { - pub Performance { - name : "performance" , - rules : [ - self :: prefer_text_field :: PreferTextField , - ] - } -} +declare_lint_group! { pub Performance { name : "performance" , rules : [self :: prefer_text_field :: PreferTextField ,] } } diff --git a/crates/pg_linter/src/lint/performance/prefer_text_field.rs b/crates/pg_linter/src/lint/performance/prefer_text_field.rs index bbd9588ca..7912f1d27 100644 --- a/crates/pg_linter/src/lint/performance/prefer_text_field.rs +++ b/crates/pg_linter/src/lint/performance/prefer_text_field.rs @@ -28,7 +28,7 @@ pg_analyse::declare_lint_rule! { /// pub PreferTextField { version: "0.0.1", - name: "prefer-text-field", + name: "preferTextField", recommended: true, } } diff --git a/crates/pg_linter/src/options.rs b/crates/pg_linter/src/options.rs index b322850d8..8410d1af1 100644 --- a/crates/pg_linter/src/options.rs +++ b/crates/pg_linter/src/options.rs @@ -1,6 +1,7 @@ //! Generated file, do not edit by hand, see `xtask/codegen` use crate::lint; - -pub type NoDescendingSpecificity = +pub type BanDropColumn = + ::Options; +pub type PreferTextField = ::Options; diff --git a/crates/pg_linter/src/registry.rs b/crates/pg_linter/src/registry.rs index 40a73ca1c..27a5a4135 100644 --- a/crates/pg_linter/src/registry.rs +++ b/crates/pg_linter/src/registry.rs @@ -1,7 +1,6 @@ //! Generated file, do not edit by hand, see `xtask/codegen` use pg_analyse::RegistryVisitor; - pub fn visit_registry(registry: &mut V) { registry.record_category::(); } diff --git a/crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql b/crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql new file mode 100644 index 000000000..a14050d4b --- /dev/null +++ b/crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql @@ -0,0 +1 @@ +select 2; \ No newline at end of file diff --git a/crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql b/crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql new file mode 100644 index 000000000..fa18a59c2 --- /dev/null +++ b/crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql @@ -0,0 +1,2 @@ +/* should not generate diagnostics */ +-- select 1; \ No newline at end of file diff --git a/justfile b/justfile index 401e2a6aa..d9adfbffc 100644 --- a/justfile +++ b/justfile @@ -31,23 +31,23 @@ upgrade-tools: # cargo codegen-bindings # Generates code generated files for the linter -# gen-lint: -# cargo run -p xtask_codegen -- analyzer -# cargo codegen-configuration -# cargo codegen-migrate -# just gen-bindings -# cargo run -p rules_check -# just format +gen-lint: + cargo run -p xtask_codegen -- analyser + # cargo codegen-configuration + # cargo codegen-migrate + # just gen-bindings + # cargo run -p rules_check + just format # Generates the linter documentation and Rust documentation # documentation: # RUSTDOCFLAGS='-D warnings' cargo documentation -# Creates a new lint rule in the given path, with the given name. Name has to be camel case. -# new-lintrule rulename: -# cargo run -p xtask_codegen -- new-lintrule --kind=js --category=lint --name={{rulename}} -# just gen-lint -# just documentation +# Creates a new lint rule in the given path, with the given name. Name has to be kebab case. +new-lintrule rulename: + cargo run -p xtask_codegen -- new-lintrule --category=lint --name={{rulename}} + just gen-lint + # just documentation # Creates a new lint rule in the given path, with the given name. Name has to be camel case. # new-assistrule rulename: diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml index 6fbdb6190..ba7288b63 100644 --- a/xtask/codegen/Cargo.toml +++ b/xtask/codegen/Cargo.toml @@ -5,6 +5,9 @@ publish = false version = "0.0.0" [dependencies] +anyhow = { workspace = true } biome_string_case = { workspace = true } bpaf = { workspace = true, features = ["derive"] } +proc-macro2 = { workspace = true, features = ["span-locations"] } +quote = "1.0.36" xtask = { path = '../', version = "0.0" } diff --git a/xtask/codegen/src/generate_analyser.rs b/xtask/codegen/src/generate_analyser.rs new file mode 100644 index 000000000..6692094c8 --- /dev/null +++ b/xtask/codegen/src/generate_analyser.rs @@ -0,0 +1,234 @@ +use std::path::PathBuf; +use std::{collections::BTreeMap, path::Path}; + +use anyhow::{Context, Ok, Result}; +use biome_string_case::Case; +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; +use xtask::{glue::fs2, project_root}; + +pub fn generate_analyser() -> Result<()> { + generate_linter()?; + Ok(()) +} + +fn generate_linter() -> Result<()> { + let base_path = project_root().join("crates/pg_linter/src"); + let mut analyzers = BTreeMap::new(); + generate_category("lint", &mut analyzers, &base_path)?; + + generate_options(&base_path)?; + + update_linter_registry_builder(analyzers) +} + +fn generate_options(base_path: &Path) -> Result<()> { + let mut rules_options = BTreeMap::new(); + let mut crates = vec![]; + for category in ["lint"] { + let category_path = base_path.join(category); + if !category_path.exists() { + continue; + } + let category_name = format_ident!("{}", filename(&category_path)?); + for group_path in list_entry_paths(&category_path)?.filter(|path| path.is_dir()) { + let group_name = format_ident!("{}", filename(&group_path)?.to_string()); + for rule_path in list_entry_paths(&group_path)?.filter(|path| !path.is_dir()) { + let rule_filename = filename(&rule_path)?; + let rule_name = Case::Pascal.convert(rule_filename); + let rule_module_name = format_ident!("{}", rule_filename); + let rule_name = format_ident!("{}", rule_name); + rules_options.insert(rule_filename.to_string(), quote! { + pub type #rule_name = <#category_name::#group_name::#rule_module_name::#rule_name as pg_analyse::Rule>::Options; + }); + } + } + if category == "lint" { + crates.push(quote! { + use crate::lint; + }) + } + } + let rules_options = rules_options.values(); + let tokens = xtask::reformat(quote! { + #( #crates )* + + #( #rules_options )* + })?; + fs2::write(base_path.join("options.rs"), tokens)?; + + Ok(()) +} + +fn generate_category( + name: &'static str, + entries: &mut BTreeMap<&'static str, TokenStream>, + base_path: &Path, +) -> Result<()> { + let path = base_path.join(name); + + let mut groups = BTreeMap::new(); + for entry in fs2::read_dir(path)? { + let entry = entry?; + if !entry.file_type()?.is_dir() { + continue; + } + + let entry = entry.path(); + let file_name = entry + .file_stem() + .context("path has no file name")? + .to_str() + .context("could not convert file name to string")?; + + generate_group(name, file_name, base_path)?; + + let module_name = format_ident!("{}", file_name); + let group_name = format_ident!("{}", Case::Pascal.convert(file_name)); + + groups.insert( + file_name.to_string(), + ( + quote! { + pub mod #module_name; + }, + quote! { + self::#module_name::#group_name + }, + ), + ); + } + + let key = name; + let module_name = format_ident!("{name}"); + + let category_name = Case::Pascal.convert(name); + let category_name = format_ident!("{category_name}"); + + let kind = match name { + "lint" => format_ident!("Lint"), + _ => panic!("unimplemented analyzer category {name:?}"), + }; + + entries.insert( + key, + quote! { + registry.record_category::(); + }, + ); + + let (modules, paths): (Vec<_>, Vec<_>) = groups.into_values().unzip(); + let tokens = xtask::reformat(quote! { + #( #modules )* + ::pg_analyse::declare_category! { + pub #category_name { + kind: #kind, + groups: [ + #( #paths, )* + ] + } + } + })?; + + fs2::write(base_path.join(format!("{name}.rs")), tokens)?; + + Ok(()) +} + +fn generate_group(category: &'static str, group: &str, base_path: &Path) -> Result<()> { + let path = base_path.join(category).join(group); + + let mut rules = BTreeMap::new(); + for entry in fs2::read_dir(path)? { + let entry = entry?.path(); + let file_name = entry + .file_stem() + .context("path has no file name")? + .to_str() + .context("could not convert file name to string")?; + + let rule_type = Case::Pascal.convert(file_name); + + let key = rule_type.clone(); + let module_name = format_ident!("{}", file_name); + let rule_type = format_ident!("{}", rule_type); + + rules.insert( + key, + ( + quote! { + pub mod #module_name; + }, + quote! { + self::#module_name::#rule_type + }, + ), + ); + } + + let group_name = format_ident!("{}", Case::Pascal.convert(group)); + + let (rule_imports, rule_names): (Vec<_>, Vec<_>) = rules.into_values().unzip(); + + let (import_macro, use_macro) = match category { + "lint" => ( + quote!( + use pg_analyse::declare_lint_group; + ), + quote!(declare_lint_group), + ), + _ => panic!("Category not supported: {category}"), + }; + let tokens = xtask::reformat(quote! { + #import_macro + + #(#rule_imports)* + + #use_macro! { + pub #group_name { + name: #group, + rules: [ + #(#rule_names,)* + ] + } + } + })?; + + fs2::write(base_path.join(category).join(format!("{group}.rs")), tokens)?; + + Ok(()) +} + +fn update_linter_registry_builder(rules: BTreeMap<&'static str, TokenStream>) -> Result<()> { + let path = project_root().join("crates/pg_linter/src/registry.rs"); + + let categories = rules.into_values(); + + let tokens = xtask::reformat(quote! { + use pg_analyse::RegistryVisitor; + + pub fn visit_registry(registry: &mut V) { + #( #categories )* + } + })?; + + fs2::write(path, tokens)?; + + Ok(()) +} + +/// Returns file paths of the given directory. +fn list_entry_paths(dir: &Path) -> Result> { + Ok(fs2::read_dir(dir) + .context("A directory is expected")? + .filter_map(|entry| entry.ok()) + .map(|entry| entry.path())) +} + +/// Returns filename if any. +fn filename(file: &Path) -> Result<&str> { + file.file_stem() + .context("path has no file name")? + .to_str() + .context("could not convert file name to string") +} diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index 022521dbc..9088ba136 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -30,7 +30,7 @@ fn generate_rule_template( }; format!( r#"use pg_analyse::{{ - context::RuleContext, {macro_name}, Rule, RuleDiagnostic, Ast + context::RuleContext, {macro_name}, Rule, RuleDiagnostic }}; use pg_console::markup; @@ -38,7 +38,6 @@ use pg_console::markup; /// Succinct description of the rule. /// /// Put context and details about the rule. - /// As a starting point, you can take the description of the corresponding _ESLint_ rule (if any). /// /// Try to stay consistent with the descriptions of implemented rules. /// @@ -76,7 +75,7 @@ impl Rule for {rule_name_upper_camel} {{ pub fn generate_new_analyser_rule(category: Category, rule_name: &str) { let rule_name_camel = Case::Camel.convert(rule_name); - let crate_folder = project_root().join(format!("crates/pg_lint")); + let crate_folder = project_root().join(format!("crates/pg_linter")); let test_folder = crate_folder.join("tests/specs/nursery"); let rule_folder = match &category { Category::Lint => crate_folder.join("src/lint/nursery"), @@ -105,7 +104,7 @@ pub fn generate_new_analyser_rule(category: Category, rule_name: &str) { // We sort rules to reduce conflicts between contributions made in parallel. let rule_line = match category { Category::Lint => format!( - r#" "lint/nursery/{rule_name_camel}": "https://biomejs.dev/linter/rules/{kebab_case_rule}","# + r#" "lint/nursery/{rule_name_camel}": "https://pglsp.dev/linter/rules/{kebab_case_rule}","# ), }; let lint_start = match category { @@ -134,12 +133,12 @@ pub fn generate_new_analyser_rule(category: Category, rule_name: &str) { if std::fs::File::open(&test_file).is_err() { let _ = std::fs::write( test_file, - "/* should not generate diagnostics */\n// var a = 1;", + "/* should not generate diagnostics */\n-- select 1;", ); } let test_file = format!("{}/{rule_name_camel}/invalid.sql", test_folder.display()); if std::fs::File::open(&test_file).is_err() { - let _ = std::fs::write(test_file, "var a = 1;\na = 2;\na = 3;"); + let _ = std::fs::write(test_file, "select 2;"); } } diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index ea8a96faa..a3f460e16 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -1,9 +1,11 @@ //! Codegen tools. Derived from Biome's codegen +mod generate_analyser; mod generate_crate; mod generate_new_analyser_rule; mod promote_rule; +pub use self::generate_analyser::generate_analyser; pub use self::generate_crate::generate_crate; pub use self::generate_new_analyser_rule::generate_new_analyser_rule; pub use self::promote_rule::promote_rule; @@ -13,6 +15,9 @@ use generate_new_analyser_rule::Category; #[derive(Debug, Clone, Bpaf)] #[bpaf(options)] pub enum TaskCommand { + /// Generate factory functions for the analyzer and the configuration of the analysers + #[bpaf(command)] + Analyser, /// Creates a new crate #[bpaf(command, long("new-crate"))] NewCrate { diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs index 0137fb047..3ec90ebb4 100644 --- a/xtask/codegen/src/main.rs +++ b/xtask/codegen/src/main.rs @@ -1,7 +1,8 @@ use xtask::{project_root, pushd, Result}; use xtask_codegen::{ - generate_crate, generate_new_analyser_rule, promote_rule, task_command, TaskCommand, + generate_analyser, generate_crate, generate_new_analyser_rule, promote_rule, task_command, + TaskCommand, }; fn main() -> Result<()> { @@ -9,6 +10,9 @@ fn main() -> Result<()> { let result = task_command().fallback_to_usage().run(); match result { + TaskCommand::Analyser => { + generate_analyser()?; + } TaskCommand::NewCrate { name } => { generate_crate(name)?; } From 9b1493e6608d30ffe259d641afc9b13c6051eed4 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sat, 21 Dec 2024 18:06:00 +0100 Subject: [PATCH 12/27] finish codegen --- crates/pg_analyse/src/lib.rs | 3 +- crates/pg_analyse/src/rule.rs | 11 ++- .../src/categories.rs | 8 +- crates/pg_linter/src/lint.rs | 5 +- .../src/lint/nursery/ban_drop_column.rs | 38 --------- crates/pg_linter/src/lint/performance.rs | 5 -- .../src/lint/performance/prefer_text_field.rs | 42 ---------- .../src/lint/{nursery.rs => safety.rs} | 2 +- .../src/lint/safety/ban_drop_column.rs | 50 ++++++++++++ crates/pg_linter/src/options.rs | 4 +- .../specs/nursery/banDropColumn/invalid.sql | 1 - .../specs/nursery/banDropColumn/valid.sql | 2 - justfile | 15 +--- .../codegen/src/generate_new_analyser_rule.rs | 26 +----- xtask/codegen/src/lib.rs | 16 +--- xtask/codegen/src/main.rs | 14 ++-- xtask/codegen/src/promote_rule.rs | 79 ------------------- 17 files changed, 82 insertions(+), 239 deletions(-) delete mode 100644 crates/pg_linter/src/lint/nursery/ban_drop_column.rs delete mode 100644 crates/pg_linter/src/lint/performance.rs delete mode 100644 crates/pg_linter/src/lint/performance/prefer_text_field.rs rename crates/pg_linter/src/lint/{nursery.rs => safety.rs} (52%) create mode 100644 crates/pg_linter/src/lint/safety/ban_drop_column.rs delete mode 100644 crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql delete mode 100644 crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql delete mode 100644 xtask/codegen/src/promote_rule.rs diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index 968c6192d..34b4bd7af 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -1,5 +1,5 @@ mod categories; -mod context; +pub mod context; mod filter; pub mod macros; mod options; @@ -15,7 +15,6 @@ pub use crate::categories::{ ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; -pub use crate::context::RuleContext; pub use crate::filter::{AnalysisFilter, GroupKey, RuleKey}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; diff --git a/crates/pg_analyse/src/rule.rs b/crates/pg_analyse/src/rule.rs index 336f2d437..e615ea77c 100644 --- a/crates/pg_analyse/src/rule.rs +++ b/crates/pg_analyse/src/rule.rs @@ -1,7 +1,6 @@ use pg_console::fmt::Display; use pg_console::{markup, MarkupBuf}; use pg_diagnostics::advice::CodeSuggestionAdvice; -use pg_diagnostics::location::AsSpan; use pg_diagnostics::{ Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, Visit, @@ -162,11 +161,11 @@ pub struct Detail { impl RuleDiagnostic { /// Creates a new [`RuleDiagnostic`] with a severity and title that will be /// used in a builder-like way to modify labels. - pub fn new(category: &'static Category, span: impl AsSpan, title: impl Display) -> Self { + pub fn new(category: &'static Category, span: Option, title: impl Display) -> Self { let message = markup!({ title }).to_owned(); Self { category, - span: span.as_span(), + span, message: MessageAndDescription::from(message), tags: DiagnosticTags::empty(), rule_advice: RuleAdvice::default(), @@ -200,17 +199,17 @@ impl RuleDiagnostic { /// Attaches a label to this [`RuleDiagnostic`]. /// /// The given span has to be in the file that was provided while creating this [`RuleDiagnostic`]. - pub fn label(mut self, span: impl AsSpan, msg: impl Display) -> Self { + pub fn label(mut self, span: Option, msg: impl Display) -> Self { self.rule_advice.details.push(Detail { log_category: LogCategory::Info, message: markup!({ msg }).to_owned(), - range: span.as_span(), + range: span, }); self } /// Attaches a detailed message to this [`RuleDiagnostic`]. - pub fn detail(self, span: impl AsSpan, msg: impl Display) -> Self { + pub fn detail(self, span: Option, msg: impl Display) -> Self { self.label(span, msg) } diff --git a/crates/pg_diagnostics_categories/src/categories.rs b/crates/pg_diagnostics_categories/src/categories.rs index 451117de0..12b25b732 100644 --- a/crates/pg_diagnostics_categories/src/categories.rs +++ b/crates/pg_diagnostics_categories/src/categories.rs @@ -13,12 +13,11 @@ // must be between `define_categories! {\n` and `\n ;\n`. define_categories! { - "lint/nursery/banDropColumn": "https://pglsp.dev/linter/rules/ban-drop-column", + "lint/safety/banDropColumn": "https://pglsp.dev/linter/rules/ban-drop-column", // end lint rules ; // General categories "stdin", - "lint", "check", "configuration", "database/connection", @@ -31,8 +30,9 @@ define_categories! { "syntax", "dummy", - // Lint groups + // Lint groups start "lint", "lint/performance", - "lint/suspicious", + "lint/safety", + // Lint groups end } diff --git a/crates/pg_linter/src/lint.rs b/crates/pg_linter/src/lint.rs index 35dd776b8..0a2344ca2 100644 --- a/crates/pg_linter/src/lint.rs +++ b/crates/pg_linter/src/lint.rs @@ -1,5 +1,4 @@ //! Generated file, do not edit by hand, see `xtask/codegen` -pub mod nursery; -pub mod performance; -::pg_analyse::declare_category! { pub Lint { kind : Lint , groups : [self :: nursery :: Nursery , self :: performance :: Performance ,] } } +pub mod safety; +::pg_analyse::declare_category! { pub Lint { kind : Lint , groups : [self :: safety :: Safety ,] } } diff --git a/crates/pg_linter/src/lint/nursery/ban_drop_column.rs b/crates/pg_linter/src/lint/nursery/ban_drop_column.rs deleted file mode 100644 index 9d95f7503..000000000 --- a/crates/pg_linter/src/lint/nursery/ban_drop_column.rs +++ /dev/null @@ -1,38 +0,0 @@ -use pg_analyse::{context::RuleContext, declare_lint_rule, Rule, RuleDiagnostic}; -use pg_console::markup; - -declare_lint_rule! { - /// Succinct description of the rule. - /// - /// Put context and details about the rule. - /// - /// Try to stay consistent with the descriptions of implemented rules. - /// - /// ## Examples - /// - /// ### Invalid - /// - /// ```sql,expect_diagnostic - /// select 1; - /// ``` - /// - /// ### Valid - /// - /// ``sql` - /// select 2; - /// ``` - /// - pub BanDropColumn { - version: "next", - name: "banDropColumn", - recommended: false, - } -} - -impl Rule for BanDropColumn { - type Options = (); - - fn run(ctx: &RuleContext) -> Vec { - Vec::new() - } -} diff --git a/crates/pg_linter/src/lint/performance.rs b/crates/pg_linter/src/lint/performance.rs deleted file mode 100644 index b019a62ba..000000000 --- a/crates/pg_linter/src/lint/performance.rs +++ /dev/null @@ -1,5 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/codegen` - -use pg_analyse::declare_lint_group; -pub mod prefer_text_field; -declare_lint_group! { pub Performance { name : "performance" , rules : [self :: prefer_text_field :: PreferTextField ,] } } diff --git a/crates/pg_linter/src/lint/performance/prefer_text_field.rs b/crates/pg_linter/src/lint/performance/prefer_text_field.rs deleted file mode 100644 index 7912f1d27..000000000 --- a/crates/pg_linter/src/lint/performance/prefer_text_field.rs +++ /dev/null @@ -1,42 +0,0 @@ -use pg_analyse::{Rule, RuleContext}; - -pg_analyse::declare_lint_rule! { - /// Changing the size of a varchar field requires an ACCESS EXCLUSIVE lock, that will prevent all reads and writes to the table. - /// - /// Use a text field with a CHECK CONSTRAINT makes it easier to change the max length. - /// - /// ## Examples - /// - /// ### Invalid - /// - /// ```sql,expect_diagnostic - /// CREATE TABLE "app_user" ( - /// "id" serial NOT NULL PRIMARY KEY, - /// "email" varchar(100) NOT NULL - /// ); - /// ``` - /// - /// ### Valid - /// - /// ```sql - /// CREATE TABLE "app_user" ( - /// "id" serial NOT NULL PRIMARY KEY, - /// "email" TEXT NOT NULL - /// ); - /// ALTER TABLE "app_user" ADD CONSTRAINT "text_size" CHECK (LENGTH("email") <= 100); - /// ``` - /// - pub PreferTextField { - version: "0.0.1", - name: "preferTextField", - recommended: true, - } -} - -impl Rule for PreferTextField { - type Options = (); - - fn run(ctx: &RuleContext) -> Vec { - todo!() - } -} diff --git a/crates/pg_linter/src/lint/nursery.rs b/crates/pg_linter/src/lint/safety.rs similarity index 52% rename from crates/pg_linter/src/lint/nursery.rs rename to crates/pg_linter/src/lint/safety.rs index a2fb62a80..4d78797b5 100644 --- a/crates/pg_linter/src/lint/nursery.rs +++ b/crates/pg_linter/src/lint/safety.rs @@ -2,4 +2,4 @@ use pg_analyse::declare_lint_group; pub mod ban_drop_column; -declare_lint_group! { pub Nursery { name : "nursery" , rules : [self :: ban_drop_column :: BanDropColumn ,] } } +declare_lint_group! { pub Safety { name : "safety" , rules : [self :: ban_drop_column :: BanDropColumn ,] } } diff --git a/crates/pg_linter/src/lint/safety/ban_drop_column.rs b/crates/pg_linter/src/lint/safety/ban_drop_column.rs new file mode 100644 index 000000000..b0fb3d792 --- /dev/null +++ b/crates/pg_linter/src/lint/safety/ban_drop_column.rs @@ -0,0 +1,50 @@ +use pg_analyse::{context::RuleContext, declare_lint_rule, Rule, RuleDiagnostic}; +use pg_console::markup; + +declare_lint_rule! { + /// Dropping a column may break existing clients. + /// + /// Update your application code to no longer read or write the column. + /// + /// You can leave the column as nullable or delete the column once queries no longer select or modify the column. + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```sql,expect_diagnostic + /// alter table test drop column id; + /// ``` + /// + pub BanDropColumn { + version: "next", + name: "banDropColumn", + recommended: true, + } +} + +impl Rule for BanDropColumn { + type Options = (); + + fn run(ctx: &RuleContext) -> Vec { + let mut diagnostics = Vec::new(); + + if let pg_query_ext::NodeEnum::AlterTableStmt(stmt) = &ctx.stmt() { + for cmd in &stmt.cmds { + if let Some(pg_query_ext::NodeEnum::AlterTableCmd(cmd)) = &cmd.node { + if cmd.subtype() == pg_query_ext::protobuf::AlterTableType::AtDropColumn { + diagnostics.push(RuleDiagnostic::new( + rule_category!(), + None, + markup! { + "Dropping a column may break existing clients." + }, + ).detail(None, "You can leave the column as nullable or delete the column once queries no longer select or modify the column.")); + } + } + } + } + + diagnostics + } +} diff --git a/crates/pg_linter/src/options.rs b/crates/pg_linter/src/options.rs index 8410d1af1..13e540681 100644 --- a/crates/pg_linter/src/options.rs +++ b/crates/pg_linter/src/options.rs @@ -2,6 +2,4 @@ use crate::lint; pub type BanDropColumn = - ::Options; -pub type PreferTextField = - ::Options; + ::Options; diff --git a/crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql b/crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql deleted file mode 100644 index a14050d4b..000000000 --- a/crates/pg_linter/tests/specs/nursery/banDropColumn/invalid.sql +++ /dev/null @@ -1 +0,0 @@ -select 2; \ No newline at end of file diff --git a/crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql b/crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql deleted file mode 100644 index fa18a59c2..000000000 --- a/crates/pg_linter/tests/specs/nursery/banDropColumn/valid.sql +++ /dev/null @@ -1,2 +0,0 @@ -/* should not generate diagnostics */ --- select 1; \ No newline at end of file diff --git a/justfile b/justfile index d9adfbffc..a2fec934d 100644 --- a/justfile +++ b/justfile @@ -43,9 +43,9 @@ gen-lint: # documentation: # RUSTDOCFLAGS='-D warnings' cargo documentation -# Creates a new lint rule in the given path, with the given name. Name has to be kebab case. -new-lintrule rulename: - cargo run -p xtask_codegen -- new-lintrule --category=lint --name={{rulename}} +# Creates a new lint rule in the given path, with the given name. Name has to be camel case. Group should be lowercase. +new-lintrule group rulename: + cargo run -p xtask_codegen -- new-lintrule --category=lint --name={{rulename}} --group={{group}} just gen-lint # just documentation @@ -55,15 +55,6 @@ new-lintrule rulename: # just gen-lint # just documentation -# Promotes a rule from the nursery group to a new group -# promote-rule rulename group: -# cargo run -p xtask_codegen -- promote-rule --name={{rulename}} --group={{group}} -# just gen-lint -# just documentation -# -cargo test -p pg_analyze -- {{snakecase(rulename)}} -# cargo insta accept - - # Format Rust files and TOML files format: cargo fmt diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index 9088ba136..67f6da030 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -73,12 +73,11 @@ impl Rule for {rule_name_upper_camel} {{ ) } -pub fn generate_new_analyser_rule(category: Category, rule_name: &str) { +pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) { let rule_name_camel = Case::Camel.convert(rule_name); - let crate_folder = project_root().join(format!("crates/pg_linter")); - let test_folder = crate_folder.join("tests/specs/nursery"); + let crate_folder = project_root().join("crates/pg_linter"); let rule_folder = match &category { - Category::Lint => crate_folder.join("src/lint/nursery"), + Category::Lint => crate_folder.join(format!("src/lint/{group}")), }; // Generate rule code let code = generate_rule_template( @@ -104,7 +103,7 @@ pub fn generate_new_analyser_rule(category: Category, rule_name: &str) { // We sort rules to reduce conflicts between contributions made in parallel. let rule_line = match category { Category::Lint => format!( - r#" "lint/nursery/{rule_name_camel}": "https://pglsp.dev/linter/rules/{kebab_case_rule}","# + r#" "lint/{group}/{rule_name_camel}": "https://pglsp.dev/linter/rules/{kebab_case_rule}","# ), }; let lint_start = match category { @@ -124,21 +123,4 @@ pub fn generate_new_analyser_rule(category: Category, rule_name: &str) { categories.replace_range(lint_start_index..lint_end_index, &new_lint_rule_text); std::fs::write(categories_path, categories).unwrap(); } - - // Generate test code - let tests_path = format!("{}/{rule_name_camel}", test_folder.display()); - let _ = std::fs::create_dir_all(tests_path); - - let test_file = format!("{}/{rule_name_camel}/valid.sql", test_folder.display()); - if std::fs::File::open(&test_file).is_err() { - let _ = std::fs::write( - test_file, - "/* should not generate diagnostics */\n-- select 1;", - ); - } - - let test_file = format!("{}/{rule_name_camel}/invalid.sql", test_folder.display()); - if std::fs::File::open(&test_file).is_err() { - let _ = std::fs::write(test_file, "select 2;"); - } } diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index a3f460e16..0af5dd2c8 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -3,12 +3,10 @@ mod generate_analyser; mod generate_crate; mod generate_new_analyser_rule; -mod promote_rule; pub use self::generate_analyser::generate_analyser; pub use self::generate_crate::generate_crate; pub use self::generate_new_analyser_rule::generate_new_analyser_rule; -pub use self::promote_rule::promote_rule; use bpaf::Bpaf; use generate_new_analyser_rule::Category; @@ -32,18 +30,12 @@ pub enum TaskCommand { #[bpaf(long("name"))] name: String, - /// Name of the rule + /// Category of the rule #[bpaf(long("category"))] category: Category, - }, - /// Promotes a nursery rule - #[bpaf(command, long("promote-rule"))] - PromoteRule { - /// Path of the rule - #[bpaf(long("name"), argument("STRING"))] - name: String, - /// Name of the rule - #[bpaf(long("group"), argument("STRING"))] + + /// Group of the rule + #[bpaf(long("group"))] group: String, }, } diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs index 3ec90ebb4..78729485c 100644 --- a/xtask/codegen/src/main.rs +++ b/xtask/codegen/src/main.rs @@ -1,8 +1,7 @@ use xtask::{project_root, pushd, Result}; use xtask_codegen::{ - generate_analyser, generate_crate, generate_new_analyser_rule, promote_rule, task_command, - TaskCommand, + generate_analyser, generate_crate, generate_new_analyser_rule, task_command, TaskCommand, }; fn main() -> Result<()> { @@ -16,11 +15,12 @@ fn main() -> Result<()> { TaskCommand::NewCrate { name } => { generate_crate(name)?; } - TaskCommand::NewRule { name, category } => { - generate_new_analyser_rule(category, &name); - } - TaskCommand::PromoteRule { name, group } => { - promote_rule(&name, &group); + TaskCommand::NewRule { + name, + category, + group, + } => { + generate_new_analyser_rule(category, &name, &group); } } diff --git a/xtask/codegen/src/promote_rule.rs b/xtask/codegen/src/promote_rule.rs deleted file mode 100644 index f8112832f..000000000 --- a/xtask/codegen/src/promote_rule.rs +++ /dev/null @@ -1,79 +0,0 @@ -use biome_string_case::Case; -use std::env; -use std::fs; -use std::path::PathBuf; - -const KNOWN_GROUPS: [&str; 1] = ["performance"]; - -const KNOWN_PATHS: &[&str] = &["crates/pg_lint"]; -pub fn promote_rule(rule_name: &str, new_group: &str) { - let current_dir = env::current_dir().ok().unwrap(); - - if !KNOWN_GROUPS.contains(&new_group) { - panic!( - "The group '{}' doesn't exist. Available groups: {}", - new_group, - KNOWN_GROUPS.join(", ") - ) - } - - let rule_name_snake = Case::Snake.convert(rule_name); - - // look for the rule in the source code - let mut rule_path = None; - let mut analyzers_path = None; - for known_path in KNOWN_PATHS { - let local_rule_path = current_dir - .join(known_path) - .join("src/lint/nursery") - .join(format!("{rule_name_snake}.rs")); - if local_rule_path.exists() { - rule_path = Some(local_rule_path); - analyzers_path = Some(PathBuf::from(known_path)); - break; - } - } - - if let (Some(rule_path), Some(analyzers_path)) = (rule_path, analyzers_path) { - // rule found! - let new_group_src_path = analyzers_path.join("src/lint").join(new_group); - let new_rule_path = new_group_src_path.join(format!("{rule_name_snake}.rs")); - let new_group_test_path = analyzers_path.join("tests/specs").join(new_group); - - let categories_path = "crates/pg_diagnostics_categories/src/categories.rs"; - let categories = std::fs::read_to_string(categories_path).unwrap(); - - let mut categories = categories.replace( - &format!("lint/nursery/{rule_name}"), - &format!("lint/{new_group}/{rule_name}"), - ); - - // We sort rules to reduce conflicts between contributions made in parallel. - let lint_start = "define_categories! {\n"; - let lint_end = "\n // end lint rules\n"; - debug_assert!(categories.contains(lint_start)); - debug_assert!(categories.contains(lint_end)); - let lint_start_index = categories.find(lint_start).unwrap() + lint_start.len(); - let lint_end_index = categories.find(lint_end).unwrap(); - let lint_rule_text = &categories[lint_start_index..lint_end_index]; - let mut lint_rules: Vec<_> = lint_rule_text.lines().collect(); - lint_rules.sort_unstable(); - let new_lint_rule_text = lint_rules.join("\n"); - categories.replace_range(lint_start_index..lint_end_index, &new_lint_rule_text); - - if !new_group_src_path.exists() { - fs::create_dir(&new_group_src_path).expect("To create the group source folder"); - } - fs::rename(&rule_path, &new_rule_path).expect("To move rule file"); - std::fs::write(categories_path, categories).unwrap(); - - if !new_group_test_path.exists() { - fs::create_dir(&new_group_test_path).expect("To create the group test folder"); - } - let old_test_path = analyzers_path.join("tests/specs/nursery").join(rule_name); - let new_test_path = new_group_test_path.join(rule_name); - fs::rename(old_test_path, new_test_path).expect("To move rule test folder"); - } else { - panic!("Couldn't find the rule {rule_name}"); - } -} From 19a6214cb8154331ebaf4ac028322e3aeb0e4856 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sat, 21 Dec 2024 18:12:43 +0100 Subject: [PATCH 13/27] add rule source --- crates/pg_analyse/src/lib.rs | 4 +- crates/pg_analyse/src/rule.rs | 69 +++++++++++++++++++ .../src/lint/safety/ban_drop_column.rs | 3 +- 3 files changed, 74 insertions(+), 2 deletions(-) diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index 34b4bd7af..8da660764 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -18,7 +18,9 @@ pub use crate::categories::{ pub use crate::filter::{AnalysisFilter, GroupKey, RuleKey}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; -pub use crate::rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata}; +pub use crate::rule::{ + GroupCategory, Rule, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata, RuleSource, +}; pub struct Analyzer<'analyzer> { /// Holds the metadata for all the rules statically known to the analyzer diff --git a/crates/pg_analyse/src/rule.rs b/crates/pg_analyse/src/rule.rs index e615ea77c..f159861db 100644 --- a/crates/pg_analyse/src/rule.rs +++ b/crates/pg_analyse/src/rule.rs @@ -5,6 +5,7 @@ use pg_diagnostics::{ Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, Visit, }; +use std::cmp::Ordering; use std::fmt::Debug; use text_size::TextRange; @@ -24,6 +25,8 @@ pub struct RuleMetadata { pub docs: &'static str, /// Whether a rule is recommended or not pub recommended: bool, + /// The source URL of the rule + pub sources: &'static [RuleSource], } impl RuleMetadata { @@ -33,6 +36,7 @@ impl RuleMetadata { version, name, docs, + sources: &[], recommended: false, } } @@ -46,6 +50,11 @@ impl RuleMetadata { self.deprecated = Some(deprecated); self } + + pub const fn sources(mut self, sources: &'static [RuleSource]) -> Self { + self.sources = sources; + self + } } pub trait RuleMeta { @@ -255,3 +264,63 @@ impl RuleDiagnostic { &self.rule_advice } } + +#[derive(Debug, Clone, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, schemars::JsonSchema))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub enum RuleSource { + /// Rules from [Squawk](https://squawkhq.com) + Squawk(&'static str), +} + +impl PartialEq for RuleSource { + fn eq(&self, other: &Self) -> bool { + std::mem::discriminant(self) == std::mem::discriminant(other) + } +} + +impl std::fmt::Display for RuleSource { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Squawk(_) => write!(f, "Squawk"), + } + } +} + +impl PartialOrd for RuleSource { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for RuleSource { + fn cmp(&self, other: &Self) -> Ordering { + let self_rule = self.as_rule_name(); + let other_rule = other.as_rule_name(); + self_rule.cmp(other_rule) + } +} + +impl RuleSource { + pub fn as_rule_name(&self) -> &'static str { + match self { + Self::Squawk(rule_name) => rule_name, + } + } + + pub fn to_namespaced_rule_name(&self) -> String { + match self { + Self::Squawk(rule_name) => format!("squawk/{rule_name}"), + } + } + + pub fn to_rule_url(&self) -> String { + match self { + Self::Squawk(rule_name) => format!("https://squawkhq.com/docs/{rule_name}"), + } + } + + pub fn as_url_and_rule_name(&self) -> (String, &'static str) { + (self.to_rule_url(), self.as_rule_name()) + } +} diff --git a/crates/pg_linter/src/lint/safety/ban_drop_column.rs b/crates/pg_linter/src/lint/safety/ban_drop_column.rs index b0fb3d792..fe081d00e 100644 --- a/crates/pg_linter/src/lint/safety/ban_drop_column.rs +++ b/crates/pg_linter/src/lint/safety/ban_drop_column.rs @@ -1,4 +1,4 @@ -use pg_analyse::{context::RuleContext, declare_lint_rule, Rule, RuleDiagnostic}; +use pg_analyse::{context::RuleContext, declare_lint_rule, Rule, RuleDiagnostic, RuleSource}; use pg_console::markup; declare_lint_rule! { @@ -20,6 +20,7 @@ declare_lint_rule! { version: "next", name: "banDropColumn", recommended: true, + sources: &[RuleSource::Squawk("ban-drop-column")], } } From 2fc994f5aaa549fc28e75c6754ca240702557685 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 22 Dec 2024 13:35:31 +0100 Subject: [PATCH 14/27] add codegen for configs --- Cargo.lock | 40 + crates/pg_analyse/src/lib.rs | 4 +- crates/pg_configuration/Cargo.toml | 3 + .../src/analyser/linter/mod.rs | 58 ++ .../src/analyser/linter/rules.rs | 236 ++++++ crates/pg_configuration/src/analyser/mod.rs | 389 +++++++++ crates/pg_configuration/src/generated.rs | 3 + .../pg_configuration/src/generated/linter.rs | 19 + crates/pg_configuration/src/lib.rs | 22 +- crates/pg_linter/src/lib.rs | 2 +- crates/pg_workspace_new/src/handlers/lint.rs | 19 + crates/pg_workspace_new/src/handlers/mod.rs | 1 + crates/pg_workspace_new/src/lib.rs | 1 + justfile | 8 +- xtask/codegen/Cargo.toml | 3 + xtask/codegen/src/generate_configuration.rs | 744 ++++++++++++++++++ xtask/codegen/src/lib.rs | 44 ++ xtask/codegen/src/main.rs | 7 +- 18 files changed, 1594 insertions(+), 9 deletions(-) create mode 100644 crates/pg_configuration/src/analyser/linter/mod.rs create mode 100644 crates/pg_configuration/src/analyser/linter/rules.rs create mode 100644 crates/pg_configuration/src/analyser/mod.rs create mode 100644 crates/pg_configuration/src/generated.rs create mode 100644 crates/pg_configuration/src/generated/linter.rs create mode 100644 crates/pg_workspace_new/src/handlers/lint.rs create mode 100644 crates/pg_workspace_new/src/handlers/mod.rs create mode 100644 xtask/codegen/src/generate_configuration.rs diff --git a/Cargo.lock b/Cargo.lock index 6db8b24e2..546950c33 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1298,6 +1298,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "getopts" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" +dependencies = [ + "unicode-width", +] + [[package]] name = "getrandom" version = "0.2.15" @@ -2306,8 +2315,11 @@ dependencies = [ "biome_deserialize", "biome_deserialize_macros", "bpaf", + "pg_analyse", "pg_console", "pg_diagnostics", + "pg_linter", + "rustc-hash 2.1.0", "schemars", "serde", "serde_json", @@ -2965,6 +2977,25 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "pulldown-cmark" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" +dependencies = [ + "bitflags 2.6.0", + "getopts", + "memchr", + "pulldown-cmark-escape", + "unicase", +] + +[[package]] +name = "pulldown-cmark-escape" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" + [[package]] name = "quick-junit" version = "0.5.1" @@ -4242,6 +4273,12 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "unicase" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" + [[package]] name = "unicode-bidi" version = "0.3.17" @@ -4774,7 +4811,10 @@ dependencies = [ "anyhow", "biome_string_case", "bpaf", + "pg_analyse", + "pg_linter", "proc-macro2", + "pulldown-cmark", "quote", "xtask", ] diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index 8da660764..44dd394eb 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -2,7 +2,7 @@ mod categories; pub mod context; mod filter; pub mod macros; -mod options; +pub mod options; mod registry; mod rule; @@ -15,7 +15,7 @@ pub use crate::categories::{ ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; -pub use crate::filter::{AnalysisFilter, GroupKey, RuleKey}; +pub use crate::filter::{AnalysisFilter, GroupKey, RuleFilter, RuleKey}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; pub use crate::rule::{ diff --git a/crates/pg_configuration/Cargo.toml b/crates/pg_configuration/Cargo.toml index 13139916b..c4a89a695 100644 --- a/crates/pg_configuration/Cargo.toml +++ b/crates/pg_configuration/Cargo.toml @@ -15,8 +15,11 @@ version = "0.0.0" biome_deserialize = { workspace = true } biome_deserialize_macros = { workspace = true } bpaf = { workspace = true } +pg_analyse = { workspace = true } pg_console = { workspace = true } pg_diagnostics = { workspace = true } +pg_linter = { workspace = true } +rustc-hash = { workspace = true } schemars = { workspace = true, features = ["indexmap1"], optional = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["raw_value"] } diff --git a/crates/pg_configuration/src/analyser/linter/mod.rs b/crates/pg_configuration/src/analyser/linter/mod.rs new file mode 100644 index 000000000..20535a2e7 --- /dev/null +++ b/crates/pg_configuration/src/analyser/linter/mod.rs @@ -0,0 +1,58 @@ +mod rules; + +use biome_deserialize::StringSet; +use biome_deserialize_macros::{Merge, Partial}; +use bpaf::Bpaf; +pub use rules::*; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, Deserialize, Eq, Partial, PartialEq, Serialize)] +#[partial(derive(Bpaf, Clone, Eq, Merge, PartialEq))] +#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))] +#[partial(serde(rename_all = "camelCase", default, deny_unknown_fields))] +pub struct LinterConfiguration { + /// if `false`, it disables the feature and the linter won't be executed. `true` by default + #[partial(bpaf(hide))] + pub enabled: bool, + + /// List of rules + #[partial(bpaf(pure(Default::default()), optional, hide))] + pub rules: Rules, + + /// A list of Unix shell style patterns. The formatter will ignore files/folders that will + /// match these patterns. + #[partial(bpaf(hide))] + pub ignore: StringSet, + + /// A list of Unix shell style patterns. The formatter will include files/folders that will + /// match these patterns. + #[partial(bpaf(hide))] + pub include: StringSet, +} + +impl LinterConfiguration { + pub const fn is_disabled(&self) -> bool { + !self.enabled + } +} + +impl Default for LinterConfiguration { + fn default() -> Self { + Self { + enabled: true, + rules: Default::default(), + ignore: Default::default(), + include: Default::default(), + } + } +} + +impl PartialLinterConfiguration { + pub const fn is_disabled(&self) -> bool { + matches!(self.enabled, Some(false)) + } + + pub fn get_rules(&self) -> Rules { + self.rules.clone().unwrap_or_default() + } +} diff --git a/crates/pg_configuration/src/analyser/linter/rules.rs b/crates/pg_configuration/src/analyser/linter/rules.rs new file mode 100644 index 000000000..918518e4f --- /dev/null +++ b/crates/pg_configuration/src/analyser/linter/rules.rs @@ -0,0 +1,236 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use crate::analyser::{RuleConfiguration, RulePlainConfiguration}; +use biome_deserialize_macros::Merge; +use pg_analyse::{options::RuleOptions, RuleFilter}; +use pg_diagnostics::{Category, Severity}; +use rustc_hash::FxHashSet; +#[cfg(feature = "schema")] +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +#[derive( + Clone, + Copy, + Debug, + Eq, + Hash, + Merge, + Ord, + PartialEq, + PartialOrd, + serde :: Deserialize, + serde :: Serialize, +)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase")] +pub enum RuleGroup { + Safety, +} +impl RuleGroup { + pub const fn as_str(self) -> &'static str { + match self { + Self::Safety => Safety::GROUP_NAME, + } + } +} +impl std::str::FromStr for RuleGroup { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + Safety::GROUP_NAME => Ok(Self::Safety), + _ => Err("This rule group doesn't exist."), + } + } +} +#[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct Rules { + #[doc = r" It enables the lint rules recommended by Biome. `true` by default."] + #[serde(skip_serializing_if = "Option::is_none")] + pub recommended: Option, + #[doc = r" It enables ALL rules. The rules that belong to `nursery` won't be enabled."] + #[serde(skip_serializing_if = "Option::is_none")] + pub all: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub safety: Option, +} +impl Rules { + #[doc = r" Checks if the code coming from [pg_diagnostics::Diagnostic] corresponds to a rule."] + #[doc = r" Usually the code is built like {group}/{rule_name}"] + pub fn has_rule(group: RuleGroup, rule_name: &str) -> Option<&'static str> { + match group { + RuleGroup::Safety => Safety::has_rule(rule_name), + } + } + #[doc = r" Given a category coming from [Diagnostic](pg_diagnostics::Diagnostic), this function returns"] + #[doc = r" the [Severity](pg_diagnostics::Severity) associated to the rule, if the configuration changed it."] + #[doc = r" If the severity is off or not set, then the function returns the default severity of the rule:"] + #[doc = r" [Severity::Error] for recommended rules and [Severity::Warning] for other rules."] + #[doc = r""] + #[doc = r" If not, the function returns [None]."] + pub fn get_severity_from_code(&self, category: &Category) -> Option { + let mut split_code = category.name().split('/'); + let _lint = split_code.next(); + debug_assert_eq!(_lint, Some("lint")); + let group = ::from_str(split_code.next()?).ok()?; + let rule_name = split_code.next()?; + let rule_name = Self::has_rule(group, rule_name)?; + let severity = match group { + RuleGroup::Safety => self + .safety + .as_ref() + .and_then(|group| group.get_rule_configuration(rule_name)) + .filter(|(level, _)| !matches!(level, RulePlainConfiguration::Off)) + .map_or_else( + || { + if Safety::is_recommended_rule(rule_name) { + Severity::Error + } else { + Severity::Warning + } + }, + |(level, _)| level.into(), + ), + }; + Some(severity) + } + #[doc = r" Ensure that `recommended` is set to `true` or implied."] + pub fn set_recommended(&mut self) { + if self.all != Some(true) && self.recommended == Some(false) { + self.recommended = Some(true) + } + if let Some(group) = &mut self.safety { + group.recommended = None; + } + } + pub(crate) const fn is_recommended_false(&self) -> bool { + matches!(self.recommended, Some(false)) + } + pub(crate) const fn is_all_true(&self) -> bool { + matches!(self.all, Some(true)) + } + #[doc = r" It returns the enabled rules by default."] + #[doc = r""] + #[doc = r" The enabled rules are calculated from the difference with the disabled rules."] + pub fn as_enabled_rules(&self) -> FxHashSet> { + let mut enabled_rules = FxHashSet::default(); + let mut disabled_rules = FxHashSet::default(); + if let Some(group) = self.safety.as_ref() { + group.collect_preset_rules( + self.is_all_true(), + !self.is_recommended_false(), + &mut enabled_rules, + ); + enabled_rules.extend(&group.get_enabled_rules()); + disabled_rules.extend(&group.get_disabled_rules()); + } else if self.is_all_true() { + enabled_rules.extend(Safety::all_rules_as_filters()); + } else if !self.is_recommended_false() { + enabled_rules.extend(Safety::recommended_rules_as_filters()); + } + enabled_rules.difference(&disabled_rules).copied().collect() + } +} +#[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", default, deny_unknown_fields)] +#[doc = r" A list of rules that belong to this group"] +pub struct Safety { + #[doc = r" It enables the recommended rules for this group"] + #[serde(skip_serializing_if = "Option::is_none")] + pub recommended: Option, + #[doc = r" It enables ALL rules for this group."] + #[serde(skip_serializing_if = "Option::is_none")] + pub all: Option, + #[doc = "Dropping a column may break existing clients."] + #[serde(skip_serializing_if = "Option::is_none")] + pub ban_drop_column: Option>, +} +impl Safety { + const GROUP_NAME: &'static str = "safety"; + pub(crate) const GROUP_RULES: &'static [&'static str] = &["banDropColumn"]; + const RECOMMENDED_RULES: &'static [&'static str] = &["banDropColumn"]; + const RECOMMENDED_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = + &[RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0])]; + const ALL_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = + &[RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0])]; + #[doc = r" Retrieves the recommended rules"] + pub(crate) fn is_recommended_true(&self) -> bool { + matches!(self.recommended, Some(true)) + } + pub(crate) fn is_recommended_unset(&self) -> bool { + self.recommended.is_none() + } + pub(crate) fn is_all_true(&self) -> bool { + matches!(self.all, Some(true)) + } + pub(crate) fn is_all_unset(&self) -> bool { + self.all.is_none() + } + pub(crate) fn get_enabled_rules(&self) -> FxHashSet> { + let mut index_set = FxHashSet::default(); + if let Some(rule) = self.ban_drop_column.as_ref() { + if rule.is_enabled() { + index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0])); + } + } + index_set + } + pub(crate) fn get_disabled_rules(&self) -> FxHashSet> { + let mut index_set = FxHashSet::default(); + if let Some(rule) = self.ban_drop_column.as_ref() { + if rule.is_disabled() { + index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0])); + } + } + index_set + } + #[doc = r" Checks if, given a rule name, matches one of the rules contained in this category"] + pub(crate) fn has_rule(rule_name: &str) -> Option<&'static str> { + Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) + } + #[doc = r" Checks if, given a rule name, it is marked as recommended"] + pub(crate) fn is_recommended_rule(rule_name: &str) -> bool { + Self::RECOMMENDED_RULES.contains(&rule_name) + } + pub(crate) fn recommended_rules_as_filters() -> &'static [RuleFilter<'static>] { + Self::RECOMMENDED_RULES_AS_FILTERS + } + pub(crate) fn all_rules_as_filters() -> &'static [RuleFilter<'static>] { + Self::ALL_RULES_AS_FILTERS + } + #[doc = r" Select preset rules"] + pub(crate) fn collect_preset_rules( + &self, + parent_is_all: bool, + parent_is_recommended: bool, + enabled_rules: &mut FxHashSet>, + ) { + if self.is_all_true() || self.is_all_unset() && parent_is_all { + enabled_rules.extend(Self::all_rules_as_filters()); + } else if self.is_recommended_true() + || self.is_recommended_unset() && self.is_all_unset() && parent_is_recommended + { + enabled_rules.extend(Self::recommended_rules_as_filters()); + } + } + pub(crate) fn get_rule_configuration( + &self, + rule_name: &str, + ) -> Option<(RulePlainConfiguration, Option)> { + match rule_name { + "banDropColumn" => self + .ban_drop_column + .as_ref() + .map(|conf| (conf.level(), conf.get_options())), + _ => None, + } + } +} +#[test] +fn test_order() { + for items in Safety::GROUP_RULES.windows(2) { + assert!(items[0] < items[1], "{} < {}", items[0], items[1]); + } +} diff --git a/crates/pg_configuration/src/analyser/mod.rs b/crates/pg_configuration/src/analyser/mod.rs new file mode 100644 index 000000000..2273eff07 --- /dev/null +++ b/crates/pg_configuration/src/analyser/mod.rs @@ -0,0 +1,389 @@ +pub mod linter; + +pub use crate::analyser::linter::*; +use biome_deserialize::Merge; +use biome_deserialize_macros::Deserializable; +use pg_analyse::options::RuleOptions; +use pg_analyse::RuleFilter; +use pg_diagnostics::Severity; +#[cfg(feature = "schema")] +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::str::FromStr; + +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields, untagged)] +pub enum RuleConfiguration { + Plain(RulePlainConfiguration), + WithOptions(RuleWithOptions), +} +impl RuleConfiguration { + pub fn is_disabled(&self) -> bool { + matches!(self.level(), RulePlainConfiguration::Off) + } + pub fn is_enabled(&self) -> bool { + !self.is_disabled() + } + pub fn level(&self) -> RulePlainConfiguration { + match self { + Self::Plain(plain) => *plain, + Self::WithOptions(options) => options.level, + } + } + pub fn set_level(&mut self, level: RulePlainConfiguration) { + match self { + Self::Plain(plain) => *plain = level, + Self::WithOptions(options) => options.level = level, + } + } +} +// Rule configuration has a custom [Merge] implementation so that overriding the +// severity doesn't override the options. +impl Merge for RuleConfiguration { + fn merge_with(&mut self, other: Self) { + match self { + Self::Plain(_) => *self = other, + Self::WithOptions(this) => match other { + Self::Plain(level) => { + this.level = level; + } + Self::WithOptions(other) => { + this.merge_with(other); + } + }, + } + } +} +impl RuleConfiguration { + pub fn get_options(&self) -> Option { + match self { + Self::Plain(_) => None, + Self::WithOptions(options) => Some(RuleOptions::new(options.options.clone())), + } + } +} +impl Default for RuleConfiguration { + fn default() -> Self { + Self::Plain(RulePlainConfiguration::Error) + } +} + +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields, untagged)] +pub enum RuleFixConfiguration { + Plain(RulePlainConfiguration), + WithOptions(RuleWithFixOptions), +} +impl Default for RuleFixConfiguration { + fn default() -> Self { + Self::Plain(RulePlainConfiguration::Error) + } +} +impl RuleFixConfiguration { + pub fn is_disabled(&self) -> bool { + matches!(self.level(), RulePlainConfiguration::Off) + } + pub fn is_enabled(&self) -> bool { + !self.is_disabled() + } + pub fn level(&self) -> RulePlainConfiguration { + match self { + Self::Plain(plain) => *plain, + Self::WithOptions(options) => options.level, + } + } + pub fn set_level(&mut self, level: RulePlainConfiguration) { + match self { + Self::Plain(plain) => *plain = level, + Self::WithOptions(options) => options.level = level, + } + } +} +// Rule configuration has a custom [Merge] implementation so that overriding the +// severity doesn't override the options. +impl Merge for RuleFixConfiguration { + fn merge_with(&mut self, other: Self) { + match self { + Self::Plain(_) => *self = other, + Self::WithOptions(this) => match other { + Self::Plain(level) => { + this.level = level; + } + Self::WithOptions(other) => { + this.merge_with(other); + } + }, + } + } +} +impl RuleFixConfiguration { + pub fn get_options(&self) -> Option { + match self { + Self::Plain(_) => None, + Self::WithOptions(options) => Some(RuleOptions::new(options.options.clone())), + } + } +} +impl From<&RuleConfiguration> for Severity { + fn from(conf: &RuleConfiguration) -> Self { + match conf { + RuleConfiguration::Plain(p) => (*p).into(), + RuleConfiguration::WithOptions(conf) => { + let level = &conf.level; + (*level).into() + } + } + } +} +impl From for Severity { + fn from(conf: RulePlainConfiguration) -> Self { + match conf { + RulePlainConfiguration::Warn => Severity::Warning, + RulePlainConfiguration::Error => Severity::Error, + RulePlainConfiguration::Info => Severity::Information, + RulePlainConfiguration::Off => { + unreachable!("the rule is turned off, it should not step in here") + } + } + } +} +impl From for Severity { + fn from(conf: RuleAssistPlainConfiguration) -> Self { + match conf { + RuleAssistPlainConfiguration::On => Severity::Hint, + RuleAssistPlainConfiguration::Off => { + unreachable!("the rule is turned off, it should not step in here") + } + } + } +} + +#[derive(Clone, Copy, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase")] +pub enum RulePlainConfiguration { + #[default] + Warn, + Error, + Info, + Off, +} + +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields, untagged)] +pub enum RuleAssistConfiguration { + Plain(RuleAssistPlainConfiguration), + WithOptions(RuleAssistWithOptions), +} +impl RuleAssistConfiguration { + pub fn is_disabled(&self) -> bool { + matches!(self.level(), RuleAssistPlainConfiguration::Off) + } + pub fn is_enabled(&self) -> bool { + !self.is_disabled() + } + pub fn level(&self) -> RuleAssistPlainConfiguration { + match self { + Self::Plain(plain) => *plain, + Self::WithOptions(options) => options.level, + } + } + pub fn set_level(&mut self, level: RuleAssistPlainConfiguration) { + match self { + Self::Plain(plain) => *plain = level, + Self::WithOptions(options) => options.level = level, + } + } +} +// Rule configuration has a custom [Merge] implementation so that overriding the +// severity doesn't override the options. +impl Merge for RuleAssistConfiguration { + fn merge_with(&mut self, other: Self) { + match self { + Self::Plain(_) => *self = other, + Self::WithOptions(this) => match other { + Self::Plain(level) => { + this.level = level; + } + Self::WithOptions(other) => { + this.merge_with(other); + } + }, + } + } +} +impl RuleAssistConfiguration { + pub fn get_options(&self) -> Option { + match self { + Self::Plain(_) => None, + Self::WithOptions(options) => Some(RuleOptions::new(options.options.clone())), + } + } +} +impl Default for RuleAssistConfiguration { + fn default() -> Self { + Self::Plain(RuleAssistPlainConfiguration::Off) + } +} + +#[derive(Clone, Copy, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase")] +pub enum RuleAssistPlainConfiguration { + #[default] + On, + Off, +} +impl RuleAssistPlainConfiguration { + pub const fn is_enabled(&self) -> bool { + matches!(self, Self::On) + } + + pub const fn is_disabled(&self) -> bool { + matches!(self, Self::Off) + } +} +impl Merge for RuleAssistPlainConfiguration { + fn merge_with(&mut self, other: Self) { + *self = other; + } +} + +#[derive(Clone, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct RuleAssistWithOptions { + /// The severity of the emitted diagnostics by the rule + pub level: RuleAssistPlainConfiguration, + /// Rule's options + pub options: T, +} +impl Merge for RuleAssistWithOptions { + fn merge_with(&mut self, other: Self) { + self.level = other.level; + self.options = other.options; + } +} + +#[derive(Clone, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct RuleWithOptions { + /// The severity of the emitted diagnostics by the rule + pub level: RulePlainConfiguration, + /// Rule's options + pub options: T, +} +impl Merge for RuleWithOptions { + fn merge_with(&mut self, other: Self) { + self.level = other.level; + self.options = other.options; + } +} + +#[derive(Clone, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] +#[cfg_attr(feature = "schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct RuleWithFixOptions { + /// The severity of the emitted diagnostics by the rule + pub level: RulePlainConfiguration, + /// Rule's options + pub options: T, +} + +impl Merge for RuleWithFixOptions { + fn merge_with(&mut self, other: Self) { + self.level = other.level; + self.options = other.options; + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub enum RuleSelector { + Group(linter::RuleGroup), + Rule(linter::RuleGroup, &'static str), +} + +impl From for RuleFilter<'static> { + fn from(value: RuleSelector) -> Self { + match value { + RuleSelector::Group(group) => RuleFilter::Group(group.as_str()), + RuleSelector::Rule(group, name) => RuleFilter::Rule(group.as_str(), name), + } + } +} + +impl<'a> From<&'a RuleSelector> for RuleFilter<'static> { + fn from(value: &'a RuleSelector) -> Self { + match value { + RuleSelector::Group(group) => RuleFilter::Group(group.as_str()), + RuleSelector::Rule(group, name) => RuleFilter::Rule(group.as_str(), name), + } + } +} + +impl FromStr for RuleSelector { + type Err = &'static str; + fn from_str(selector: &str) -> Result { + let selector = selector.strip_prefix("lint/").unwrap_or(selector); + if let Some((group_name, rule_name)) = selector.split_once('/') { + let group = linter::RuleGroup::from_str(group_name)?; + if let Some(rule_name) = Rules::has_rule(group, rule_name) { + Ok(RuleSelector::Rule(group, rule_name)) + } else { + Err("This rule doesn't exist.") + } + } else { + match linter::RuleGroup::from_str(selector) { + Ok(group) => Ok(RuleSelector::Group(group)), + Err(_) => Err( + "This group doesn't exist. Use the syntax `/` to specify a rule.", + ), + } + } + } +} + +impl serde::Serialize for RuleSelector { + fn serialize(&self, serializer: S) -> Result { + match self { + RuleSelector::Group(group) => serializer.serialize_str(group.as_str()), + RuleSelector::Rule(group, rule_name) => { + let group_name = group.as_str(); + serializer.serialize_str(&format!("{group_name}/{rule_name}")) + } + } + } +} + +impl<'de> serde::Deserialize<'de> for RuleSelector { + fn deserialize>(deserializer: D) -> Result { + struct Visitor; + impl<'de> serde::de::Visitor<'de> for Visitor { + type Value = RuleSelector; + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("/") + } + fn visit_str(self, v: &str) -> Result { + match RuleSelector::from_str(v) { + Ok(result) => Ok(result), + Err(error) => Err(serde::de::Error::custom(error)), + } + } + } + deserializer.deserialize_str(Visitor) + } +} + +#[cfg(feature = "schema")] +impl schemars::JsonSchema for RuleSelector { + fn schema_name() -> String { + "RuleCode".to_string() + } + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + String::json_schema(gen) + } +} diff --git a/crates/pg_configuration/src/generated.rs b/crates/pg_configuration/src/generated.rs new file mode 100644 index 000000000..3bae7b808 --- /dev/null +++ b/crates/pg_configuration/src/generated.rs @@ -0,0 +1,3 @@ +mod linter; + +pub use linter::push_to_analyser_rules; diff --git a/crates/pg_configuration/src/generated/linter.rs b/crates/pg_configuration/src/generated/linter.rs new file mode 100644 index 000000000..324fe0635 --- /dev/null +++ b/crates/pg_configuration/src/generated/linter.rs @@ -0,0 +1,19 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use crate::analyser::linter::*; +use pg_analyse::{AnalyzerRules, MetadataRegistry}; +pub fn push_to_analyser_rules( + rules: &Rules, + metadata: &MetadataRegistry, + analyser_rules: &mut AnalyzerRules, +) { + if let Some(rules) = rules.safety.as_ref() { + for rule_name in Safety::GROUP_RULES { + if let Some((_, Some(rule_options))) = rules.get_rule_configuration(rule_name) { + if let Some(rule_key) = metadata.find_rule("safety", rule_name) { + analyser_rules.push_rule(rule_key, rule_options); + } + } + } + } +} diff --git a/crates/pg_configuration/src/lib.rs b/crates/pg_configuration/src/lib.rs index 4a089e59d..54ee39f75 100644 --- a/crates/pg_configuration/src/lib.rs +++ b/crates/pg_configuration/src/lib.rs @@ -1,18 +1,26 @@ -//! This module contains the configuration of `pg.json` +//! This module contains the configuration of `pglsp.toml` //! //! The configuration is divided by "tool", and then it's possible to further customise it //! by language. The language might further options divided by tool. +pub mod analyser; pub mod database; pub mod diagnostics; pub mod files; +pub mod generated; pub mod vcs; pub use crate::diagnostics::ConfigurationDiagnostic; use std::path::PathBuf; +pub use crate::generated::push_to_analyser_rules; use crate::vcs::{partial_vcs_configuration, PartialVcsConfiguration, VcsConfiguration}; +pub use analyser::{ + partial_linter_configuration, LinterConfiguration, PartialLinterConfiguration, + RuleConfiguration, RuleFixConfiguration, RulePlainConfiguration, RuleWithFixOptions, + RuleWithOptions, Rules, +}; use biome_deserialize_macros::Partial; use bpaf::Bpaf; use database::{ @@ -44,6 +52,10 @@ pub struct Configuration { )] pub files: FilesConfiguration, + /// The configuration for the linter + #[partial(type, bpaf(external(partial_linter_configuration), optional))] + pub linter: LinterConfiguration, + /// The configuration of the database connection #[partial( type, @@ -66,6 +78,14 @@ impl PartialConfiguration { use_ignore_file: Some(false), ..Default::default() }), + linter: Some(PartialLinterConfiguration { + enabled: Some(true), + rules: Some(Rules { + recommended: Some(true), + ..Default::default() + }), + ..Default::default() + }), db: Some(PartialDatabaseConfiguration { host: Some("127.0.0.1".to_string()), port: Some(5432), diff --git a/crates/pg_linter/src/lib.rs b/crates/pg_linter/src/lib.rs index 75b4afbc6..5abc05fd1 100644 --- a/crates/pg_linter/src/lib.rs +++ b/crates/pg_linter/src/lib.rs @@ -1,7 +1,7 @@ use std::{ops::Deref, sync::LazyLock}; use pg_analyse::{AnalysisFilter, AnalyzerOptions, MetadataRegistry, RuleDiagnostic, RuleRegistry}; -use registry::visit_registry; +pub use registry::visit_registry; mod lint; pub mod options; diff --git a/crates/pg_workspace_new/src/handlers/lint.rs b/crates/pg_workspace_new/src/handlers/lint.rs new file mode 100644 index 000000000..3c45891a0 --- /dev/null +++ b/crates/pg_workspace_new/src/handlers/lint.rs @@ -0,0 +1,19 @@ +use pg_fs::PgLspPath; + +#[derive(Debug)] +pub(crate) struct LintParams<'a> { + pub(crate) stmt: &'a pg_query_ext::NodeEnum, + pub(crate) workspace: &'a WorkspaceSettingsHandle<'a>, + pub(crate) max_diagnostics: u32, + pub(crate) path: &'a PgLspPath, + pub(crate) only: Vec, + pub(crate) skip: Vec, + pub(crate) categories: RuleCategories, + pub(crate) suppression_reason: Option, +} + +pub(crate) struct LintResults { + pub(crate) diagnostics: Vec, + pub(crate) errors: usize, + pub(crate) skipped_diagnostics: u32, +} diff --git a/crates/pg_workspace_new/src/handlers/mod.rs b/crates/pg_workspace_new/src/handlers/mod.rs new file mode 100644 index 000000000..e7034df69 --- /dev/null +++ b/crates/pg_workspace_new/src/handlers/mod.rs @@ -0,0 +1 @@ +pub mod lint; diff --git a/crates/pg_workspace_new/src/lib.rs b/crates/pg_workspace_new/src/lib.rs index 9467d1fb4..6ba1ca4c2 100644 --- a/crates/pg_workspace_new/src/lib.rs +++ b/crates/pg_workspace_new/src/lib.rs @@ -6,6 +6,7 @@ use pg_fs::{FileSystem, OsFileSystem}; pub mod configuration; pub mod diagnostics; pub mod dome; +pub mod handlers; pub mod matcher; pub mod settings; pub mod workspace; diff --git a/justfile b/justfile index a2fec934d..78029b20d 100644 --- a/justfile +++ b/justfile @@ -18,9 +18,9 @@ upgrade-tools: cargo binstall cargo-insta taplo-cli --force # Generate all files across crates and tools. You rarely want to use it locally. -# gen-all: -# cargo run -p xtask_codegen -- all -# cargo codegen-configuration +gen-all: + cargo run -p xtask_codegen -- all + # cargo codegen-configuration # cargo codegen-migrate # just gen-bindings # just format @@ -33,7 +33,7 @@ upgrade-tools: # Generates code generated files for the linter gen-lint: cargo run -p xtask_codegen -- analyser - # cargo codegen-configuration + cargo run -p xtask_codegen -- configuration # cargo codegen-migrate # just gen-bindings # cargo run -p rules_check diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml index ba7288b63..17938fef0 100644 --- a/xtask/codegen/Cargo.toml +++ b/xtask/codegen/Cargo.toml @@ -8,6 +8,9 @@ version = "0.0.0" anyhow = { workspace = true } biome_string_case = { workspace = true } bpaf = { workspace = true, features = ["derive"] } +pg_analyse = { workspace = true } +pg_linter = { workspace = true } proc-macro2 = { workspace = true, features = ["span-locations"] } +pulldown-cmark = { version = "0.12.2" } quote = "1.0.36" xtask = { path = '../', version = "0.0" } diff --git a/xtask/codegen/src/generate_configuration.rs b/xtask/codegen/src/generate_configuration.rs new file mode 100644 index 000000000..5eb67a9aa --- /dev/null +++ b/xtask/codegen/src/generate_configuration.rs @@ -0,0 +1,744 @@ +use crate::{to_capitalized, update}; +use biome_string_case::Case; +use pg_analyse::{GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleGroup, RuleMetadata}; +use proc_macro2::{Ident, Literal, Span, TokenStream}; +use pulldown_cmark::{Event, Parser, Tag, TagEnd}; +use quote::quote; +use std::collections::BTreeMap; +use std::path::Path; +use xtask::*; + +#[derive(Default)] +struct LintRulesVisitor { + groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, +} + +impl RegistryVisitor for LintRulesVisitor { + fn record_category(&mut self) { + if matches!(C::CATEGORY, RuleCategory::Lint) { + C::record_groups(self); + } + } + + fn record_rule(&mut self) + where + R: Rule + 'static, + { + self.groups + .entry(::NAME) + .or_default() + .insert(R::METADATA.name, R::METADATA); + } +} + +pub fn generate_rules_configuration(mode: Mode) -> Result<()> { + let linter_config_root = project_root().join("crates/pg_configuration/src/analyser/linter"); + let push_rules_directory = project_root().join("crates/pg_configuration/src/generated"); + + let mut lint_visitor = LintRulesVisitor::default(); + pg_linter::visit_registry(&mut lint_visitor); + + generate_for_groups( + lint_visitor.groups, + linter_config_root.as_path(), + push_rules_directory.as_path(), + &mode, + RuleCategory::Lint, + )?; + Ok(()) +} + +fn generate_for_groups( + groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, + root: &Path, + push_directory: &Path, + mode: &Mode, + kind: RuleCategory, +) -> Result<()> { + let mut struct_groups = Vec::with_capacity(groups.len()); + let mut group_pascal_idents = Vec::with_capacity(groups.len()); + let mut group_idents = Vec::with_capacity(groups.len()); + let mut group_strings = Vec::with_capacity(groups.len()); + let mut group_as_default_rules = Vec::with_capacity(groups.len()); + for (group, rules) in groups { + let group_pascal_ident = quote::format_ident!("{}", &Case::Pascal.convert(group)); + let group_ident = quote::format_ident!("{}", group); + + let (global_all, global_recommended) = { + ( + quote! { self.is_all_true() }, + quote! { !self.is_recommended_false() }, + ) + }; + group_as_default_rules.push(if kind == RuleCategory::Lint { + quote! { + if let Some(group) = self.#group_ident.as_ref() { + group.collect_preset_rules( + #global_all, + #global_recommended, + &mut enabled_rules, + ); + enabled_rules.extend(&group.get_enabled_rules()); + disabled_rules.extend(&group.get_disabled_rules()); + } else if #global_all { + enabled_rules.extend(#group_pascal_ident::all_rules_as_filters()); + } else if #global_recommended { + enabled_rules.extend(#group_pascal_ident::recommended_rules_as_filters()); + } + } + } else { + quote! { + if let Some(group) = self.#group_ident.as_ref() { + enabled_rules.extend(&group.get_enabled_rules()); + } + } + }); + + group_pascal_idents.push(group_pascal_ident); + group_idents.push(group_ident); + group_strings.push(Literal::string(group)); + struct_groups.push(generate_group_struct(group, &rules, kind)); + } + + let severity_fn = if kind == RuleCategory::Action { + quote! { + /// Given a category coming from [Diagnostic](pg_diagnostics::Diagnostic), this function returns + /// the [Severity](pg_diagnostics::Severity) associated to the rule, if the configuration changed it. + /// If the severity is off or not set, then the function returns the default severity of the rule: + /// [Severity::Error] for recommended rules and [Severity::Warning] for other rules. + /// + /// If not, the function returns [None]. + pub fn get_severity_from_code(&self, category: &Category) -> Option { + let mut split_code = category.name().split('/'); + + let _lint = split_code.next(); + debug_assert_eq!(_lint, Some("assists")); + + let group = ::from_str(split_code.next()?).ok()?; + let rule_name = split_code.next()?; + let rule_name = Self::has_rule(group, rule_name)?; + match group { + #( + RuleGroup::#group_pascal_idents => self + .#group_idents + .as_ref() + .and_then(|group| group.get_rule_configuration(rule_name)) + .filter(|(level, _)| !matches!(level, RuleAssistPlainConfiguration::Off)) + .map(|(level, _)| level.into()) + )* + } + } + + } + } else { + quote! { + + /// Given a category coming from [Diagnostic](pg_diagnostics::Diagnostic), this function returns + /// the [Severity](pg_diagnostics::Severity) associated to the rule, if the configuration changed it. + /// If the severity is off or not set, then the function returns the default severity of the rule: + /// [Severity::Error] for recommended rules and [Severity::Warning] for other rules. + /// + /// If not, the function returns [None]. + pub fn get_severity_from_code(&self, category: &Category) -> Option { + let mut split_code = category.name().split('/'); + + let _lint = split_code.next(); + debug_assert_eq!(_lint, Some("lint")); + + let group = ::from_str(split_code.next()?).ok()?; + let rule_name = split_code.next()?; + let rule_name = Self::has_rule(group, rule_name)?; + let severity = match group { + #( + RuleGroup::#group_pascal_idents => self + .#group_idents + .as_ref() + .and_then(|group| group.get_rule_configuration(rule_name)) + .filter(|(level, _)| !matches!(level, RulePlainConfiguration::Off)) + .map_or_else(|| { + if #group_pascal_idents::is_recommended_rule(rule_name) { + Severity::Error + } else { + Severity::Warning + } + }, |(level, _)| level.into()), + )* + }; + Some(severity) + } + + } + }; + + let use_rule_configuration = if kind == RuleCategory::Action { + quote! { + use crate::analyser::{RuleAssistConfiguration, RuleAssistPlainConfiguration}; + use pg_analyse::{options::RuleOptions, RuleFilter}; + } + } else { + quote! { + use crate::analyser::{RuleConfiguration, RulePlainConfiguration}; + use pg_analyse::{options::RuleOptions, RuleFilter}; + } + }; + + let groups = if kind == RuleCategory::Action { + quote! { + #use_rule_configuration + use biome_deserialize_macros::Merge; + use pg_diagnostics::{Category, Severity}; + use rustc_hash::FxHashSet; + use serde::{Deserialize, Serialize}; + #[cfg(feature = "schema")] + use schemars::JsonSchema; + + #[derive(Clone, Copy, Debug, Eq, Hash, Merge, Ord, PartialEq, PartialOrd, serde::Deserialize, serde::Serialize)] + #[cfg_attr(feature = "schema", derive(JsonSchema))] + #[serde(rename_all = "camelCase")] + pub enum RuleGroup { + #( #group_pascal_idents ),* + } + impl RuleGroup { + pub const fn as_str(self) -> &'static str { + match self { + #( Self::#group_pascal_idents => #group_pascal_idents::GROUP_NAME, )* + } + } + } + impl std::str::FromStr for RuleGroup { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + #( #group_pascal_idents::GROUP_NAME => Ok(Self::#group_pascal_idents), )* + _ => Err("This rule group doesn't exist.") + } + } + } + + #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] + #[cfg_attr(feature = "schema", derive(JsonSchema))] + #[serde(rename_all = "camelCase", deny_unknown_fields)] + pub struct Actions { + #( + #[serde(skip_serializing_if = "Option::is_none")] + pub #group_idents: Option<#group_pascal_idents>, + )* + } + + impl Actions { + /// Checks if the code coming from [pg_diagnostics::Diagnostic] corresponds to a rule. + /// Usually the code is built like {group}/{rule_name} + pub fn has_rule( + group: RuleGroup, + rule_name: &str, + ) -> Option<&'static str> { + match group { + #( + RuleGroup::#group_pascal_idents => #group_pascal_idents::has_rule(rule_name), + )* + } + } + + #severity_fn + + /// It returns the enabled rules by default. + /// + /// The enabled rules are calculated from the difference with the disabled rules. + pub fn as_enabled_rules(&self) -> FxHashSet> { + let mut enabled_rules = FxHashSet::default(); + #( #group_as_default_rules )* + enabled_rules + } + } + + #( #struct_groups )* + + #[test] + fn test_order() { + #( + for items in #group_pascal_idents::GROUP_RULES.windows(2) { + assert!(items[0] < items[1], "{} < {}", items[0], items[1]); + } + )* + } + } + } else { + quote! { + #use_rule_configuration + use biome_deserialize_macros::Merge; + use pg_diagnostics::{Category, Severity}; + use rustc_hash::FxHashSet; + use serde::{Deserialize, Serialize}; + #[cfg(feature = "schema")] + use schemars::JsonSchema; + + #[derive(Clone, Copy, Debug, Eq, Hash, Merge, Ord, PartialEq, PartialOrd, serde::Deserialize, serde::Serialize)] + #[cfg_attr(feature = "schema", derive(JsonSchema))] + #[serde(rename_all = "camelCase")] + pub enum RuleGroup { + #( #group_pascal_idents ),* + } + impl RuleGroup { + pub const fn as_str(self) -> &'static str { + match self { + #( Self::#group_pascal_idents => #group_pascal_idents::GROUP_NAME, )* + } + } + } + impl std::str::FromStr for RuleGroup { + type Err = &'static str; + fn from_str(s: &str) -> Result { + match s { + #( #group_pascal_idents::GROUP_NAME => Ok(Self::#group_pascal_idents), )* + _ => Err("This rule group doesn't exist.") + } + } + } + + #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] + #[cfg_attr(feature = "schema", derive(JsonSchema))] + #[serde(rename_all = "camelCase", deny_unknown_fields)] + pub struct Rules { + /// It enables the lint rules recommended by Biome. `true` by default. + #[serde(skip_serializing_if = "Option::is_none")] + pub recommended: Option, + + /// It enables ALL rules. The rules that belong to `nursery` won't be enabled. + #[serde(skip_serializing_if = "Option::is_none")] + pub all: Option, + + #( + #[serde(skip_serializing_if = "Option::is_none")] + pub #group_idents: Option<#group_pascal_idents>, + )* + } + + impl Rules { + /// Checks if the code coming from [pg_diagnostics::Diagnostic] corresponds to a rule. + /// Usually the code is built like {group}/{rule_name} + pub fn has_rule( + group: RuleGroup, + rule_name: &str, + ) -> Option<&'static str> { + match group { + #( + RuleGroup::#group_pascal_idents => #group_pascal_idents::has_rule(rule_name), + )* + } + } + + #severity_fn + + /// Ensure that `recommended` is set to `true` or implied. + pub fn set_recommended(&mut self) { + if self.all != Some(true) && self.recommended == Some(false) { + self.recommended = Some(true) + } + #( + if let Some(group) = &mut self.#group_idents { + group.recommended = None; + } + )* + } + + // Note: In top level, it is only considered _not_ recommended + // when the recommended option is false + pub(crate) const fn is_recommended_false(&self) -> bool { + matches!(self.recommended, Some(false)) + } + + pub(crate) const fn is_all_true(&self) -> bool { + matches!(self.all, Some(true)) + } + + /// It returns the enabled rules by default. + /// + /// The enabled rules are calculated from the difference with the disabled rules. + pub fn as_enabled_rules(&self) -> FxHashSet> { + let mut enabled_rules = FxHashSet::default(); + let mut disabled_rules = FxHashSet::default(); + #( #group_as_default_rules )* + + enabled_rules.difference(&disabled_rules).copied().collect() + } + } + + #( #struct_groups )* + + #[test] + fn test_order() { + #( + for items in #group_pascal_idents::GROUP_RULES.windows(2) { + assert!(items[0] < items[1], "{} < {}", items[0], items[1]); + } + )* + } + } + }; + + let push_rules = match kind { + RuleCategory::Lint => { + quote! { + use crate::analyser::linter::*; + use pg_analyse::{AnalyzerRules, MetadataRegistry}; + + pub fn push_to_analyser_rules( + rules: &Rules, + metadata: &MetadataRegistry, + analyser_rules: &mut AnalyzerRules, + ) { + #( + if let Some(rules) = rules.#group_idents.as_ref() { + for rule_name in #group_pascal_idents::GROUP_RULES { + if let Some((_, Some(rule_options))) = rules.get_rule_configuration(rule_name) { + if let Some(rule_key) = metadata.find_rule(#group_strings, rule_name) { + analyser_rules.push_rule(rule_key, rule_options); + } + } + } + } + )* + } + } + } + RuleCategory::Action => { + quote! { + use crate::analyser::assists::*; + use pg_analyse::{AnalyzerRules, MetadataRegistry}; + + pub fn push_to_analyser_assists( + rules: &Actions, + metadata: &MetadataRegistry, + analyser_rules: &mut AnalyzerRules, + ) { + #( + if let Some(rules) = rules.#group_idents.as_ref() { + for rule_name in #group_pascal_idents::GROUP_RULES { + if let Some((_, Some(rule_options))) = rules.get_rule_configuration(rule_name) { + if let Some(rule_key) = metadata.find_rule(#group_strings, rule_name) { + analyser_rules.push_rule(rule_key, rule_options); + } + } + } + } + )* + } + } + } + RuleCategory::Transformation => unimplemented!(), + }; + + let configuration = groups.to_string(); + let push_rules = push_rules.to_string(); + + let file_name = match kind { + RuleCategory::Lint => &push_directory.join("linter.rs"), + RuleCategory::Action => &push_directory.join("assists.rs"), + RuleCategory::Transformation => unimplemented!(), + }; + + let path = if kind == RuleCategory::Action { + &root.join("actions.rs") + } else { + &root.join("rules.rs") + }; + update(path, &xtask::reformat(configuration)?, mode)?; + update(file_name, &xtask::reformat(push_rules)?, mode)?; + + Ok(()) +} + +fn generate_group_struct( + group: &str, + rules: &BTreeMap<&'static str, RuleMetadata>, + kind: RuleCategory, +) -> TokenStream { + let mut lines_recommended_rule = Vec::new(); + let mut lines_recommended_rule_as_filter = Vec::new(); + let mut lines_all_rule_as_filter = Vec::new(); + let mut lines_rule = Vec::new(); + let mut schema_lines_rules = Vec::new(); + let mut rule_enabled_check_line = Vec::new(); + let mut rule_disabled_check_line = Vec::new(); + let mut get_rule_configuration_line = Vec::new(); + + for (index, (rule, metadata)) in rules.iter().enumerate() { + let summary = { + let mut docs = String::new(); + let parser = Parser::new(metadata.docs); + for event in parser { + match event { + Event::Text(text) => { + docs.push_str(text.as_ref()); + } + Event::Code(text) => { + // Escape `[` and `<` to obtain valid Markdown + docs.push_str(text.replace('[', "\\[").replace('<', "\\<").as_ref()); + } + Event::SoftBreak => { + docs.push(' '); + } + + Event::Start(Tag::Paragraph) => {} + Event::End(TagEnd::Paragraph) => { + break; + } + + Event::Start(tag) => match tag { + Tag::Strong | Tag::Paragraph => { + continue; + } + + _ => panic!("Unimplemented tag {:?}", { tag }), + }, + + Event::End(tag) => match tag { + TagEnd::Strong | TagEnd::Paragraph => { + continue; + } + _ => panic!("Unimplemented tag {:?}", { tag }), + }, + + _ => { + panic!("Unimplemented event {:?}", { event }) + } + } + } + docs + }; + + let rule_position = Literal::u8_unsuffixed(index as u8); + let rule_identifier = quote::format_ident!("{}", Case::Snake.convert(rule)); + let rule_config_type = quote::format_ident!( + "{}", + if kind == RuleCategory::Action { + "RuleAssistConfiguration" + } else { + "RuleConfiguration" + } + ); + let rule_name = Ident::new(&to_capitalized(rule), Span::call_site()); + if metadata.recommended { + lines_recommended_rule_as_filter.push(quote! { + RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position]) + }); + + lines_recommended_rule.push(quote! { + #rule + }); + } + lines_all_rule_as_filter.push(quote! { + RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position]) + }); + lines_rule.push(quote! { + #rule + }); + let rule_option_type = quote! { + pg_linter::options::#rule_name + }; + let rule_option = if kind == RuleCategory::Action { + quote! { Option<#rule_config_type<#rule_option_type>> } + } else { + quote! { + Option<#rule_config_type<#rule_option_type>> + } + }; + schema_lines_rules.push(quote! { + #[doc = #summary] + #[serde(skip_serializing_if = "Option::is_none")] + pub #rule_identifier: #rule_option + }); + + rule_enabled_check_line.push(quote! { + if let Some(rule) = self.#rule_identifier.as_ref() { + if rule.is_enabled() { + index_set.insert(RuleFilter::Rule( + Self::GROUP_NAME, + Self::GROUP_RULES[#rule_position], + )); + } + } + }); + rule_disabled_check_line.push(quote! { + if let Some(rule) = self.#rule_identifier.as_ref() { + if rule.is_disabled() { + index_set.insert(RuleFilter::Rule( + Self::GROUP_NAME, + Self::GROUP_RULES[#rule_position], + )); + } + } + }); + + if kind == RuleCategory::Action { + get_rule_configuration_line.push(quote! { + #rule => self.#rule_identifier.as_ref().map(|conf| (conf.level(), conf.get_options())) + }); + } else { + get_rule_configuration_line.push(quote! { + #rule => self.#rule_identifier.as_ref().map(|conf| (conf.level(), conf.get_options())) + }); + } + } + + let group_pascal_ident = Ident::new(&to_capitalized(group), Span::call_site()); + + let get_configuration_function = if kind == RuleCategory::Action { + quote! { + pub(crate) fn get_rule_configuration(&self, rule_name: &str) -> Option<(RuleAssistPlainConfiguration, Option)> { + match rule_name { + #( #get_rule_configuration_line ),*, + _ => None + } + } + } + } else { + quote! { + pub(crate) fn get_rule_configuration(&self, rule_name: &str) -> Option<(RulePlainConfiguration, Option)> { + match rule_name { + #( #get_rule_configuration_line ),*, + _ => None + } + } + } + }; + + if kind == RuleCategory::Action { + quote! { + #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] + #[cfg_attr(feature = "schema", derive(JsonSchema))] + #[serde(rename_all = "camelCase", default, deny_unknown_fields)] + /// A list of rules that belong to this group + pub struct #group_pascal_ident { + + #( #schema_lines_rules ),* + } + + impl #group_pascal_ident { + + const GROUP_NAME: &'static str = #group; + pub(crate) const GROUP_RULES: &'static [&'static str] = &[ + #( #lines_rule ),* + ]; + + pub(crate) fn get_enabled_rules(&self) -> FxHashSet> { + let mut index_set = FxHashSet::default(); + #( #rule_enabled_check_line )* + index_set + } + + /// Checks if, given a rule name, matches one of the rules contained in this category + pub(crate) fn has_rule(rule_name: &str) -> Option<&'static str> { + Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) + } + + #get_configuration_function + } + } + } else { + quote! { + #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] + #[cfg_attr(feature = "schema", derive(JsonSchema))] + #[serde(rename_all = "camelCase", default, deny_unknown_fields)] + /// A list of rules that belong to this group + pub struct #group_pascal_ident { + /// It enables the recommended rules for this group + #[serde(skip_serializing_if = "Option::is_none")] + pub recommended: Option, + + /// It enables ALL rules for this group. + #[serde(skip_serializing_if = "Option::is_none")] + pub all: Option, + + #( #schema_lines_rules ),* + } + + impl #group_pascal_ident { + + const GROUP_NAME: &'static str = #group; + pub(crate) const GROUP_RULES: &'static [&'static str] = &[ + #( #lines_rule ),* + ]; + + const RECOMMENDED_RULES: &'static [&'static str] = &[ + #( #lines_recommended_rule ),* + ]; + + const RECOMMENDED_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ + #( #lines_recommended_rule_as_filter ),* + ]; + + const ALL_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ + #( #lines_all_rule_as_filter ),* + ]; + + /// Retrieves the recommended rules + pub(crate) fn is_recommended_true(&self) -> bool { + // we should inject recommended rules only when they are set to "true" + matches!(self.recommended, Some(true)) + } + + pub(crate) fn is_recommended_unset(&self) -> bool { + self.recommended.is_none() + } + + pub(crate) fn is_all_true(&self) -> bool { + matches!(self.all, Some(true)) + } + + pub(crate) fn is_all_unset(&self) -> bool { + self.all.is_none() + } + + pub(crate) fn get_enabled_rules(&self) -> FxHashSet> { + let mut index_set = FxHashSet::default(); + #( #rule_enabled_check_line )* + index_set + } + + pub(crate) fn get_disabled_rules(&self) -> FxHashSet> { + let mut index_set = FxHashSet::default(); + #( #rule_disabled_check_line )* + index_set + } + + /// Checks if, given a rule name, matches one of the rules contained in this category + pub(crate) fn has_rule(rule_name: &str) -> Option<&'static str> { + Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) + } + + /// Checks if, given a rule name, it is marked as recommended + pub(crate) fn is_recommended_rule(rule_name: &str) -> bool { + Self::RECOMMENDED_RULES.contains(&rule_name) + } + + pub(crate) fn recommended_rules_as_filters() -> &'static [RuleFilter<'static>] { + Self::RECOMMENDED_RULES_AS_FILTERS + } + + pub(crate) fn all_rules_as_filters() -> &'static [RuleFilter<'static>] { + Self::ALL_RULES_AS_FILTERS + } + + /// Select preset rules + // Preset rules shouldn't populate disabled rules + // because that will make specific rules cannot be enabled later. + pub(crate) fn collect_preset_rules( + &self, + parent_is_all: bool, + parent_is_recommended: bool, + enabled_rules: &mut FxHashSet>, + ) { + // The order of the if-else branches MATTERS! + if self.is_all_true() || self.is_all_unset() && parent_is_all { + enabled_rules.extend(Self::all_rules_as_filters()); + } else if self.is_recommended_true() || self.is_recommended_unset() && self.is_all_unset() && parent_is_recommended { + enabled_rules.extend(Self::recommended_rules_as_filters()); + } + } + + #get_configuration_function + } + } + } +} diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index 0af5dd2c8..3a0542357 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -1,14 +1,55 @@ //! Codegen tools. Derived from Biome's codegen mod generate_analyser; +mod generate_configuration; mod generate_crate; mod generate_new_analyser_rule; pub use self::generate_analyser::generate_analyser; +pub use self::generate_configuration::generate_rules_configuration; pub use self::generate_crate::generate_crate; pub use self::generate_new_analyser_rule::generate_new_analyser_rule; use bpaf::Bpaf; use generate_new_analyser_rule::Category; +use std::path::Path; +use xtask::{glue::fs2, Mode, Result}; + +pub enum UpdateResult { + NotUpdated, + Updated, +} + +/// A helper to update file on disk if it has changed. +/// With verify = false, +pub fn update(path: &Path, contents: &str, mode: &Mode) -> Result { + match fs2::read_to_string(path) { + Ok(old_contents) if old_contents == contents => { + return Ok(UpdateResult::NotUpdated); + } + _ => (), + } + + if *mode == Mode::Verify { + anyhow::bail!("`{}` is not up-to-date", path.display()); + } + + eprintln!("updating {}", path.display()); + if let Some(parent) = path.parent() { + if !parent.exists() { + fs2::create_dir_all(parent)?; + } + } + fs2::write(path, contents)?; + Ok(UpdateResult::Updated) +} + +pub fn to_capitalized(s: &str) -> String { + let mut c = s.chars(); + match c.next() { + None => String::new(), + Some(f) => f.to_uppercase().collect::() + c.as_str(), + } +} #[derive(Debug, Clone, Bpaf)] #[bpaf(options)] @@ -16,6 +57,9 @@ pub enum TaskCommand { /// Generate factory functions for the analyzer and the configuration of the analysers #[bpaf(command)] Analyser, + /// Generate the part of the configuration that depends on some metadata + #[bpaf(command)] + Configuration, /// Creates a new crate #[bpaf(command, long("new-crate"))] NewCrate { diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs index 78729485c..c432c16ed 100644 --- a/xtask/codegen/src/main.rs +++ b/xtask/codegen/src/main.rs @@ -1,7 +1,9 @@ +use xtask::Mode::Overwrite; use xtask::{project_root, pushd, Result}; use xtask_codegen::{ - generate_analyser, generate_crate, generate_new_analyser_rule, task_command, TaskCommand, + generate_analyser, generate_crate, generate_new_analyser_rule, generate_rules_configuration, + task_command, TaskCommand, }; fn main() -> Result<()> { @@ -22,6 +24,9 @@ fn main() -> Result<()> { } => { generate_new_analyser_rule(category, &name, &group); } + TaskCommand::Configuration => { + generate_rules_configuration(Overwrite)?; + } } Ok(()) From 1b1216fa36a210b9a236042f880e39abb1b69421 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 22 Dec 2024 13:36:20 +0100 Subject: [PATCH 15/27] cleanup --- crates/pg_workspace_new/src/handlers/lint.rs | 19 ------------------- crates/pg_workspace_new/src/handlers/mod.rs | 1 - 2 files changed, 20 deletions(-) delete mode 100644 crates/pg_workspace_new/src/handlers/lint.rs delete mode 100644 crates/pg_workspace_new/src/handlers/mod.rs diff --git a/crates/pg_workspace_new/src/handlers/lint.rs b/crates/pg_workspace_new/src/handlers/lint.rs deleted file mode 100644 index 3c45891a0..000000000 --- a/crates/pg_workspace_new/src/handlers/lint.rs +++ /dev/null @@ -1,19 +0,0 @@ -use pg_fs::PgLspPath; - -#[derive(Debug)] -pub(crate) struct LintParams<'a> { - pub(crate) stmt: &'a pg_query_ext::NodeEnum, - pub(crate) workspace: &'a WorkspaceSettingsHandle<'a>, - pub(crate) max_diagnostics: u32, - pub(crate) path: &'a PgLspPath, - pub(crate) only: Vec, - pub(crate) skip: Vec, - pub(crate) categories: RuleCategories, - pub(crate) suppression_reason: Option, -} - -pub(crate) struct LintResults { - pub(crate) diagnostics: Vec, - pub(crate) errors: usize, - pub(crate) skipped_diagnostics: u32, -} diff --git a/crates/pg_workspace_new/src/handlers/mod.rs b/crates/pg_workspace_new/src/handlers/mod.rs deleted file mode 100644 index e7034df69..000000000 --- a/crates/pg_workspace_new/src/handlers/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod lint; From 13425b159ae38fa9421f0d5809db7a389b17e629 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sat, 28 Dec 2024 15:01:12 +0100 Subject: [PATCH 16/27] finish the beast --- Cargo.lock | 6 + crates/pg_analyse/Cargo.toml | 3 +- crates/pg_analyse/src/categories.rs | 10 +- crates/pg_analyse/src/context.rs | 10 -- crates/pg_analyse/src/diagnostics.rs | 148 ------------------ crates/pg_analyse/src/lib.rs | 8 +- crates/pg_analyse/src/macros.rs | 2 - crates/pg_analyse/src/options.rs | 8 +- crates/pg_analyse/src/registry.rs | 2 +- crates/pg_analyse/src/rule.rs | 1 + crates/pg_cli/Cargo.toml | 1 + .../pg_cli/src/execute/process_file/check.rs | 12 +- crates/pg_configuration/src/lib.rs | 2 +- crates/pg_linter/src/lib.rs | 2 +- crates/pg_lsp_new/Cargo.toml | 1 + crates/pg_lsp_new/src/session.rs | 6 + crates/pg_workspace_new/Cargo.toml | 5 +- crates/pg_workspace_new/src/configuration.rs | 18 ++- crates/pg_workspace_new/src/lib.rs | 1 - crates/pg_workspace_new/src/settings.rs | 67 +++++++- crates/pg_workspace_new/src/workspace.rs | 21 +-- .../pg_workspace_new/src/workspace/server.rs | 17 +- .../src/workspace/server/analyser/lint.rs | 61 ++++++++ .../src/workspace/server/analyser/mod.rs | 135 ++++++++++++++++ .../src/workspace/server/pg_query.rs | 14 +- .../src/workspace/server/store.rs | 15 +- .../src/workspace/server/tree_sitter.rs | 2 +- test.sql | 2 + 28 files changed, 369 insertions(+), 211 deletions(-) delete mode 100644 crates/pg_analyse/src/diagnostics.rs create mode 100644 crates/pg_workspace_new/src/workspace/server/analyser/lint.rs create mode 100644 crates/pg_workspace_new/src/workspace/server/analyser/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 546950c33..ac7a3782d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2237,6 +2237,7 @@ dependencies = [ "pg_query_ext", "pg_schema_cache", "rustc-hash 2.1.0", + "schemars", "serde", "text-size", ] @@ -2263,6 +2264,7 @@ dependencies = [ "libc", "mimalloc", "path-absolutize", + "pg_analyse", "pg_configuration", "pg_console", "pg_diagnostics", @@ -2527,6 +2529,7 @@ dependencies = [ "anyhow", "biome_deserialize", "futures", + "pg_analyse", "pg_configuration", "pg_console", "pg_diagnostics", @@ -2701,13 +2704,16 @@ dependencies = [ "dashmap 5.5.3", "futures", "ignore", + "pg_analyse", "pg_configuration", "pg_console", "pg_diagnostics", "pg_fs", + "pg_linter", "pg_query_ext", "pg_schema_cache", "pg_statement_splitter", + "rustc-hash 2.1.0", "serde", "serde_json", "sqlx", diff --git a/crates/pg_analyse/Cargo.toml b/crates/pg_analyse/Cargo.toml index 823be2ef0..3deee2961 100644 --- a/crates/pg_analyse/Cargo.toml +++ b/crates/pg_analyse/Cargo.toml @@ -24,6 +24,7 @@ biome_deserialize_macros = { workspace = true, optional = true } enumflags2.workspace = true serde = { workspace = true, features = ["derive"], optional = true } text-size.workspace = true +schemars = { workspace = true, optional = true } [features] -serde = ["dep:serde", "dep:biome_deserialize", "dep:biome_deserialize_macros"] +serde = ["dep:serde", "dep:schemars", "dep:biome_deserialize", "dep:biome_deserialize_macros"] diff --git a/crates/pg_analyse/src/categories.rs b/crates/pg_analyse/src/categories.rs index 115af8fea..79b897578 100644 --- a/crates/pg_analyse/src/categories.rs +++ b/crates/pg_analyse/src/categories.rs @@ -297,12 +297,13 @@ impl schemars::JsonSchema for RuleCategories { } } + #[derive(Debug, Default)] /// A convenient type create a [RuleCategories] type /// /// ``` -/// use biome_analyze::{RuleCategoriesBuilder, RuleCategory}; -/// let mut categories = RuleCategoriesBuilder::default().with_syntax().with_lint().build(); +/// use pg_analyse::{RuleCategoriesBuilder, RuleCategory}; +/// let mut categories = RuleCategoriesBuilder::default().with_lint().build(); /// /// assert!(categories.contains(RuleCategory::Lint)); /// assert!(!categories.contains(RuleCategory::Action)); @@ -328,6 +329,11 @@ impl RuleCategoriesBuilder { self } + pub fn all(mut self) -> Self { + self.flags = BitFlags::all(); + self + } + pub fn build(self) -> RuleCategories { RuleCategories(self.flags) } diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs index ca526392d..53658411d 100644 --- a/crates/pg_analyse/src/context.rs +++ b/crates/pg_analyse/src/context.rs @@ -1,5 +1,3 @@ -use std::path::Path; - use crate::{ categories::RuleCategory, rule::{GroupCategory, Rule, RuleGroup, RuleMetadata}, @@ -7,7 +5,6 @@ use crate::{ pub struct RuleContext<'a, R: Rule> { stmt: &'a pg_query_ext::NodeEnum, - file_path: &'a Path, options: &'a R::Options, } @@ -18,12 +15,10 @@ where #[allow(clippy::too_many_arguments)] pub fn new( stmt: &'a pg_query_ext::NodeEnum, - file_path: &'a Path, options: &'a R::Options, ) -> Self { Self { stmt, - file_path, options, } } @@ -82,9 +77,4 @@ where pub fn options(&self) -> &R::Options { self.options } - - /// The file path of the current file - pub fn file_path(&self) -> &Path { - self.file_path - } } diff --git a/crates/pg_analyse/src/diagnostics.rs b/crates/pg_analyse/src/diagnostics.rs deleted file mode 100644 index ceba084dd..000000000 --- a/crates/pg_analyse/src/diagnostics.rs +++ /dev/null @@ -1,148 +0,0 @@ -use pg_diagnostics::{ - category, Advices, Category, Diagnostic, - DiagnosticTags, Error, Location, Severity, Visit, -}; -use text_size::TextRange; -use std::borrow::Cow; -use std::fmt::{Debug, Display, Formatter}; - -use crate::rule::RuleDiagnostic; - -/// Small wrapper for diagnostics during the analysis phase. -/// -/// During these phases, analyzers can create various type diagnostics and some of them -/// don't have all the info to actually create a real [Diagnostic]. -/// -/// This wrapper serves as glue, which eventually is able to spit out full fledged diagnostics. -/// -#[derive(Debug)] -pub struct AnalyzerDiagnostic { - kind: DiagnosticKind, -} - -impl From for AnalyzerDiagnostic { - fn from(rule_diagnostic: RuleDiagnostic) -> Self { - Self { - kind: DiagnosticKind::Rule(rule_diagnostic), - } - } -} - -#[derive(Debug)] -enum DiagnosticKind { - /// It holds various info related to diagnostics emitted by the rules - Rule(RuleDiagnostic), - /// We have raw information to create a basic [Diagnostic] - Raw(Error), -} - -impl Diagnostic for AnalyzerDiagnostic { - fn category(&self) -> Option<&'static Category> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => Some(rule_diagnostic.category), - DiagnosticKind::Raw(error) => error.category(), - } - } - fn description(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => Debug::fmt(&rule_diagnostic.message, fmt), - DiagnosticKind::Raw(error) => error.description(fmt), - } - } - - fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => { - pg_console::fmt::Display::fmt(&rule_diagnostic.message, fmt) - } - DiagnosticKind::Raw(error) => error.message(fmt), - } - } - - fn severity(&self) -> Severity { - match &self.kind { - DiagnosticKind::Rule { .. } => Severity::Error, - DiagnosticKind::Raw(error) => error.severity(), - } - } - - fn tags(&self) -> DiagnosticTags { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.tags, - DiagnosticKind::Raw(error) => error.tags(), - } - } - - fn location(&self) -> Location<'_> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => { - Location::builder().span(&rule_diagnostic.span).build() - } - DiagnosticKind::Raw(error) => error.location(), - } - } - - fn advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.advices().record(visitor)?, - DiagnosticKind::Raw(error) => error.advices(visitor)?, - } - - Ok(()) - } -} - -impl AnalyzerDiagnostic { - /// Creates a diagnostic from a generic [Error] - pub fn from_error(error: Error) -> Self { - Self { - kind: DiagnosticKind::Raw(error), - } - } - - pub fn get_span(&self) -> Option { - match &self.kind { - DiagnosticKind::Rule(rule_diagnostic) => rule_diagnostic.span, - DiagnosticKind::Raw(error) => error.location().span, - } - } - - pub const fn is_raw(&self) -> bool { - matches!(self.kind, DiagnosticKind::Raw(_)) - } -} - -#[derive(Debug, Diagnostic, Clone)] -#[diagnostic(severity = Warning)] -pub struct SuppressionDiagnostic { - #[category] - category: &'static Category, - #[location(span)] - range: TextRange, - #[message] - #[description] - message: String, - #[tags] - tags: DiagnosticTags, -} - -impl SuppressionDiagnostic { - pub(crate) fn new( - category: &'static Category, - range: TextRange, - message: impl Display, - ) -> Self { - Self { - category, - range, - message: message.to_string(), - tags: DiagnosticTags::empty(), - } - } - - pub(crate) fn with_tags(mut self, tags: DiagnosticTags) -> Self { - self.tags |= tags; - self - } -} - diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index 44dd394eb..55b9d82cd 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -12,8 +12,8 @@ pub use pg_diagnostics::category_concat; use registry::RegistryRuleParams; pub use crate::categories::{ - ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, - SourceActionKind, SUPPRESSION_ACTION_CATEGORY, + ActionCategory, RefactorKind, RuleCategories, RuleCategory, + SourceActionKind, SUPPRESSION_ACTION_CATEGORY, RuleCategoriesBuilder }; pub use crate::filter::{AnalysisFilter, GroupKey, RuleFilter, RuleKey}; pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; @@ -30,7 +30,7 @@ pub struct Analyzer<'analyzer> { } pub struct AnalyzerContext<'a> { - pub root: pg_query_ext::NodeEnum, + pub root: &'a pg_query_ext::NodeEnum, pub options: &'a AnalyzerOptions, pub registry: RuleRegistry, } @@ -43,7 +43,7 @@ impl<'analyzer> Analyzer<'analyzer> { pub fn run(self, ctx: AnalyzerContext) -> Vec { let params = RegistryRuleParams { - root: &ctx.root, + root: ctx.root, options: ctx.options, }; diff --git a/crates/pg_analyse/src/macros.rs b/crates/pg_analyse/src/macros.rs index 7d838981a..9a7d62c5f 100644 --- a/crates/pg_analyse/src/macros.rs +++ b/crates/pg_analyse/src/macros.rs @@ -1,5 +1,3 @@ -use pg_diagnostics::Category; - /// This macro is used to declare an analyzer rule type, and implement the // [RuleMeta] trait for it /// # Example diff --git a/crates/pg_analyse/src/options.rs b/crates/pg_analyse/src/options.rs index 4544fcd5c..1c1b15d66 100644 --- a/crates/pg_analyse/src/options.rs +++ b/crates/pg_analyse/src/options.rs @@ -3,7 +3,6 @@ use rustc_hash::FxHashMap; use crate::{Rule, RuleKey}; use std::any::{Any, TypeId}; use std::fmt::Debug; -use std::path::PathBuf; /// A convenient new type data structure to store the options that belong to a rule #[derive(Debug)] @@ -43,7 +42,7 @@ impl AnalyzerRules { } } -/// A data structured derived from the `biome.json` file +/// A data structured derived from the `pglsp.toml` file #[derive(Debug, Default)] pub struct AnalyzerConfiguration { /// A list of rules and their options @@ -53,11 +52,8 @@ pub struct AnalyzerConfiguration { /// A set of information useful to the analyzer infrastructure #[derive(Debug, Default)] pub struct AnalyzerOptions { - /// A data structured derived from the [`biome.json`] file + /// A data structured derived from the [`pglsp.toml`] file pub configuration: AnalyzerConfiguration, - - /// The file that is being analyzed - pub file_path: PathBuf, } impl AnalyzerOptions { diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs index 207682ec7..178633315 100644 --- a/crates/pg_analyse/src/registry.rs +++ b/crates/pg_analyse/src/registry.rs @@ -174,7 +174,7 @@ impl RegistryRule { R: Rule + 'static, { let options = params.options.rule_options::().unwrap_or_default(); - let ctx = RuleContext::new(params.root, ¶ms.options.file_path, &options); + let ctx = RuleContext::new(params.root, &options); R::run(&ctx) } diff --git a/crates/pg_analyse/src/rule.rs b/crates/pg_analyse/src/rule.rs index f159861db..34827f7cc 100644 --- a/crates/pg_analyse/src/rule.rs +++ b/crates/pg_analyse/src/rule.rs @@ -95,6 +95,7 @@ pub trait Rule: RuleMeta + Sized { /// Diagnostic object returned by a single analysis rule #[derive(Debug, Diagnostic)] +#[diagnostic(severity = Warning)] pub struct RuleDiagnostic { #[category] pub(crate) category: &'static Category, diff --git a/crates/pg_cli/Cargo.toml b/crates/pg_cli/Cargo.toml index bbeab8ece..3279b3514 100644 --- a/crates/pg_cli/Cargo.toml +++ b/crates/pg_cli/Cargo.toml @@ -20,6 +20,7 @@ hdrhistogram = { version = "7.5.4", default-features = false } path-absolutize = { version = "3.1.1", optional = false, features = ["use_unix_paths_on_wasm"] } pg_configuration = { workspace = true } pg_console = { workspace = true } +pg_analyse = { workspace = true } pg_diagnostics = { workspace = true } pg_flags = { workspace = true } pg_fs = { workspace = true } diff --git a/crates/pg_cli/src/execute/process_file/check.rs b/crates/pg_cli/src/execute/process_file/check.rs index ff691a89f..134c5c4fb 100644 --- a/crates/pg_cli/src/execute/process_file/check.rs +++ b/crates/pg_cli/src/execute/process_file/check.rs @@ -1,3 +1,4 @@ +use pg_analyse::RuleCategoriesBuilder; use pg_diagnostics::{category, Error}; use crate::execute::diagnostics::ResultExt; @@ -24,10 +25,19 @@ pub(crate) fn check_with_guard<'ctx>( let input = workspace_file.input()?; let changed = false; + let (only, skip) = (Vec::new(), Vec::new()); + let max_diagnostics = ctx.remaining_diagnostics.load(Ordering::Relaxed); let pull_diagnostics_result = workspace_file .guard() - .pull_diagnostics(max_diagnostics) + .pull_diagnostics( + RuleCategoriesBuilder::default() + .all() + .build(), + max_diagnostics, + only, + skip, + ) .with_file_path_and_code( workspace_file.path.display().to_string(), category!("check"), diff --git a/crates/pg_configuration/src/lib.rs b/crates/pg_configuration/src/lib.rs index 54ee39f75..b053d89b1 100644 --- a/crates/pg_configuration/src/lib.rs +++ b/crates/pg_configuration/src/lib.rs @@ -19,7 +19,7 @@ use crate::vcs::{partial_vcs_configuration, PartialVcsConfiguration, VcsConfigur pub use analyser::{ partial_linter_configuration, LinterConfiguration, PartialLinterConfiguration, RuleConfiguration, RuleFixConfiguration, RulePlainConfiguration, RuleWithFixOptions, - RuleWithOptions, Rules, + RuleWithOptions, Rules, RuleSelector }; use biome_deserialize_macros::Partial; use bpaf::Bpaf; diff --git a/crates/pg_linter/src/lib.rs b/crates/pg_linter/src/lib.rs index 5abc05fd1..c016fb228 100644 --- a/crates/pg_linter/src/lib.rs +++ b/crates/pg_linter/src/lib.rs @@ -25,7 +25,7 @@ pub fn lint( let analyser = pg_analyse::Analyzer::new(METADATA.deref()); analyser.run(pg_analyse::AnalyzerContext { - root: root.clone(), + root, options, registry, }) diff --git a/crates/pg_lsp_new/Cargo.toml b/crates/pg_lsp_new/Cargo.toml index e9d1b30f2..f98aa9071 100644 --- a/crates/pg_lsp_new/Cargo.toml +++ b/crates/pg_lsp_new/Cargo.toml @@ -18,6 +18,7 @@ futures = "0.3.31" pg_configuration = { workspace = true } pg_console = { workspace = true } pg_diagnostics = { workspace = true } +pg_analyse = { workspace = true } pg_fs = { workspace = true } pg_lsp_converters = { workspace = true } pg_text_edit = { workspace = true } diff --git a/crates/pg_lsp_new/src/session.rs b/crates/pg_lsp_new/src/session.rs index 3908d0a01..204111f0e 100644 --- a/crates/pg_lsp_new/src/session.rs +++ b/crates/pg_lsp_new/src/session.rs @@ -4,6 +4,7 @@ use crate::utils; use anyhow::Result; use futures::stream::FuturesUnordered; use futures::StreamExt; +use pg_analyse::RuleCategoriesBuilder; use pg_configuration::ConfigurationPathHint; use pg_diagnostics::{DiagnosticExt, Error}; use pg_fs::{FileSystem, PgLspPath}; @@ -255,10 +256,15 @@ impl Session { .await; } + let categories = RuleCategoriesBuilder::default().all(); + let diagnostics: Vec = { let result = self.workspace.pull_diagnostics(PullDiagnosticsParams { path: pglsp_path.clone(), max_diagnostics: u64::MAX, + categories: categories.build(), + only: Vec::new(), + skip: Vec::new(), })?; tracing::trace!("biome diagnostics: {:#?}", result.diagnostics); diff --git a/crates/pg_workspace_new/Cargo.toml b/crates/pg_workspace_new/Cargo.toml index 520271f30..51bb03688 100644 --- a/crates/pg_workspace_new/Cargo.toml +++ b/crates/pg_workspace_new/Cargo.toml @@ -17,9 +17,12 @@ dashmap = "5.5.3" futures = "0.3.31" ignore = { workspace = true } pg_configuration = { workspace = true } +pg_analyse = { workspace = true, features = ["serde"] } pg_console = { workspace = true } pg_diagnostics = { workspace = true } +pg_linter = { workspace = true } pg_fs = { workspace = true, features = ["serde"] } +rustc-hash = { workspace = true } pg_query_ext = { workspace = true } pg_schema_cache = { workspace = true } pg_statement_splitter = { workspace = true } @@ -27,7 +30,7 @@ serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["raw_value"] } sqlx.workspace = true text-size.workspace = true -tokio = { workspace = true } +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } toml = { workspace = true } tracing = { workspace = true, features = ["attributes", "log"] } tree-sitter.workspace = true diff --git a/crates/pg_workspace_new/src/configuration.rs b/crates/pg_workspace_new/src/configuration.rs index 20c2e812d..671c487da 100644 --- a/crates/pg_workspace_new/src/configuration.rs +++ b/crates/pg_workspace_new/src/configuration.rs @@ -1,14 +1,14 @@ use std::{ - io::ErrorKind, - path::{Path, PathBuf}, + io::ErrorKind, ops::Deref, path::{Path, PathBuf} }; +use pg_analyse::AnalyzerRules; use pg_configuration::{ - ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, PartialConfiguration, + push_to_analyser_rules, ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, PartialConfiguration }; use pg_fs::{AutoSearchResult, ConfigName, FileSystem, OpenOptions}; -use crate::{DynRef, WorkspaceError}; +use crate::{settings::Settings, DynRef, WorkspaceError}; /// Information regarding the configuration that was found. /// @@ -174,3 +174,13 @@ pub fn create_config( Ok(()) } + +/// Returns the rules applied to a specific [Path], given the [Settings] +pub fn to_analyzer_rules(settings: &Settings) -> AnalyzerRules { + let mut analyzer_rules = AnalyzerRules::default(); + if let Some(rules) = settings.linter.rules.as_ref() { + push_to_analyser_rules(rules, pg_linter::METADATA.deref(), &mut analyzer_rules); + } + analyzer_rules +} + diff --git a/crates/pg_workspace_new/src/lib.rs b/crates/pg_workspace_new/src/lib.rs index 6ba1ca4c2..9467d1fb4 100644 --- a/crates/pg_workspace_new/src/lib.rs +++ b/crates/pg_workspace_new/src/lib.rs @@ -6,7 +6,6 @@ use pg_fs::{FileSystem, OsFileSystem}; pub mod configuration; pub mod diagnostics; pub mod dome; -pub mod handlers; pub mod matcher; pub mod settings; pub mod workspace; diff --git a/crates/pg_workspace_new/src/settings.rs b/crates/pg_workspace_new/src/settings.rs index d3abacdd9..7e878b393 100644 --- a/crates/pg_workspace_new/src/settings.rs +++ b/crates/pg_workspace_new/src/settings.rs @@ -1,14 +1,11 @@ use biome_deserialize::StringSet; use std::{ - num::NonZeroU64, - path::{Path, PathBuf}, - sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}, + borrow::Cow, num::NonZeroU64, path::{Path, PathBuf}, sync::{RwLock, RwLockReadGuard, RwLockWriteGuard} }; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use pg_configuration::{ - database::PartialDatabaseConfiguration, diagnostics::InvalidIgnorePattern, - files::FilesConfiguration, ConfigurationDiagnostic, PartialConfiguration, + database::PartialDatabaseConfiguration, diagnostics::InvalidIgnorePattern, files::FilesConfiguration, ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration }; use pg_fs::FileSystem; @@ -22,6 +19,9 @@ pub struct Settings { /// Database settings for the workspace pub db: DatabaseSettings, + + /// Linter settings applied to all files in the workspace + pub linter: LinterSettings, } #[derive(Debug)] @@ -88,8 +88,38 @@ impl Settings { self.db = db.into() } + // linter part + if let Some(linter) = configuration.linter { + self.linter = + to_linter_settings(working_directory.clone(), LinterConfiguration::from(linter))?; + } + Ok(()) } + + /// Retrieves the settings of the linter + pub fn linter(&self) -> &LinterSettings { + &self.linter + } + + /// Returns linter rules. + pub fn as_linter_rules( + &self + ) -> Option> { + self.linter.rules.as_ref().map(Cow::Borrowed) + } +} + +fn to_linter_settings( + working_directory: Option, + conf: LinterConfiguration, +) -> Result { + Ok(LinterSettings { + enabled: conf.enabled, + rules: Some(conf.rules), + ignored_files: to_matcher(working_directory.clone(), Some(&conf.ignore))?, + included_files: to_matcher(working_directory.clone(), Some(&conf.include))?, + }) } fn to_file_settings( @@ -170,6 +200,33 @@ pub fn to_matcher( Ok(matcher) } +/// Linter settings for the entire workspace +#[derive(Debug)] +pub struct LinterSettings { + /// Enabled by default + pub enabled: bool, + + /// List of rules + pub rules: Option, + + /// List of ignored paths/files to match + pub ignored_files: Matcher, + + /// List of included paths/files to match + pub included_files: Matcher, +} + +impl Default for LinterSettings { + fn default() -> Self { + Self { + enabled: true, + rules: Some(pg_configuration::analyser::linter::Rules::default()), + ignored_files: Matcher::empty(), + included_files: Matcher::empty(), + } + } +} + /// Database settings for the entire workspace #[derive(Debug)] pub struct DatabaseSettings { diff --git a/crates/pg_workspace_new/src/workspace.rs b/crates/pg_workspace_new/src/workspace.rs index 20293e7f9..3688c064f 100644 --- a/crates/pg_workspace_new/src/workspace.rs +++ b/crates/pg_workspace_new/src/workspace.rs @@ -1,7 +1,8 @@ use std::{panic::RefUnwindSafe, path::PathBuf, sync::Arc}; pub use self::client::{TransportRequest, WorkspaceClient, WorkspaceTransport}; -use pg_configuration::PartialConfiguration; +use pg_analyse::RuleCategories; +use pg_configuration::{PartialConfiguration, RuleSelector}; use pg_fs::PgLspPath; use serde::{Deserialize, Serialize}; use text_size::TextRange; @@ -33,10 +34,10 @@ pub struct ChangeFileParams { #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct PullDiagnosticsParams { pub path: PgLspPath, - // pub categories: RuleCategories, + pub categories: RuleCategories, pub max_diagnostics: u64, - // pub only: Vec, - // pub skip: Vec, + pub only: Vec, + pub skip: Vec, } #[derive(Debug, serde::Serialize, serde::Deserialize)] @@ -188,17 +189,17 @@ impl<'app, W: Workspace + ?Sized> FileGuard<'app, W> { pub fn pull_diagnostics( &self, - // categories: RuleCategories, + categories: RuleCategories, max_diagnostics: u32, - // only: Vec, - // skip: Vec, + only: Vec, + skip: Vec, ) -> Result { self.workspace.pull_diagnostics(PullDiagnosticsParams { path: self.path.clone(), - // categories, + categories, max_diagnostics: max_diagnostics.into(), - // only, - // skip, + only, + skip, }) } // diff --git a/crates/pg_workspace_new/src/workspace/server.rs b/crates/pg_workspace_new/src/workspace/server.rs index 164a6b9fa..148fbee61 100644 --- a/crates/pg_workspace_new/src/workspace/server.rs +++ b/crates/pg_workspace_new/src/workspace/server.rs @@ -1,5 +1,6 @@ use std::{fs, future::Future, panic::RefUnwindSafe, path::Path, sync::RwLock}; +use analyser::lint::Linter; use change::StatementChange; use dashmap::{DashMap, DashSet}; use document::{Document, StatementRef}; @@ -29,6 +30,7 @@ mod change; mod document; mod pg_query; mod store; +mod analyser; mod tree_sitter; /// Simple helper to manage the db connection and the associated connection string @@ -312,13 +314,26 @@ impl Workspace for WorkspaceServer { .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; + let linter = Linter::new( + analyser::lint::LinterParams { + settings: &self.settings(), + only: params.only, + skip: params.skip, + categories: Default::default(), + } + ); + let diagnostics: Vec = doc .statement_refs_with_ranges() .iter() .flat_map(|(stmt, r)| { let mut stmt_diagnostics = vec![]; - stmt_diagnostics.extend(self.pg_query.pull_diagnostics(stmt)); + stmt_diagnostics.extend(self.pg_query.diagnostics(stmt)); + let ast = self.pg_query.load(stmt); + if let Some(ast) = ast { + stmt_diagnostics.extend(linter.run(&ast).diagnostics); + } stmt_diagnostics .into_iter() diff --git a/crates/pg_workspace_new/src/workspace/server/analyser/lint.rs b/crates/pg_workspace_new/src/workspace/server/analyser/lint.rs new file mode 100644 index 000000000..d5e4cccd9 --- /dev/null +++ b/crates/pg_workspace_new/src/workspace/server/analyser/lint.rs @@ -0,0 +1,61 @@ +use pg_analyse::{AnalysisFilter, AnalyzerConfiguration, AnalyzerOptions, RuleCategories, RuleFilter}; +use pg_configuration::RuleSelector; +use pg_diagnostics::serde::Diagnostic; + +use crate::{configuration::to_analyzer_rules, settings::SettingsHandle}; + +use super::AnalyzerVisitorBuilder; + +#[derive(Debug)] +pub(crate) struct LinterParams<'a> { + pub(crate) settings: &'a SettingsHandle<'a>, + pub(crate) only: Vec, + pub(crate) skip: Vec, + pub(crate) categories: RuleCategories, +} + +pub(crate) struct LinterResults { + pub(crate) diagnostics: Vec, +} + +pub(crate) struct Linter<'a> { + categories: RuleCategories, + options: AnalyzerOptions, + enabled_rules: Vec>, + disabled_rules: Vec>, +} + +impl<'a> Linter<'a> { + pub fn new(params: LinterParams) -> Self { + let (enabled_rules, disabled_rules) = AnalyzerVisitorBuilder::new(params.settings.as_ref()) + .with_linter_rules(¶ms.only, ¶ms.skip) + .finish(); + + let configuration = AnalyzerConfiguration { + rules: to_analyzer_rules(params.settings.as_ref()), + }; + + let options = AnalyzerOptions { configuration }; + + Self { options, enabled_rules, disabled_rules, categories: params.categories } + } + + pub fn run(&self, stmt: &pg_query_ext::NodeEnum) -> LinterResults { + let filter = AnalysisFilter { + categories: self.categories, + enabled_rules: Some(self.enabled_rules.as_slice()), + disabled_rules: &self.disabled_rules, + }; + + + let diagnostics = pg_linter::lint(stmt, filter, &self.options); + + LinterResults { + diagnostics: diagnostics + .into_iter() + .map(Diagnostic::new) + .collect(), + } + } +} + diff --git a/crates/pg_workspace_new/src/workspace/server/analyser/mod.rs b/crates/pg_workspace_new/src/workspace/server/analyser/mod.rs new file mode 100644 index 000000000..b99a0a0f3 --- /dev/null +++ b/crates/pg_workspace_new/src/workspace/server/analyser/mod.rs @@ -0,0 +1,135 @@ +use pg_analyse::{GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleFilter, RuleGroup}; +use pg_configuration::RuleSelector; +use rustc_hash::FxHashSet; + +use crate::settings::Settings; + +pub mod lint; + +pub(crate) struct AnalyzerVisitorBuilder<'a, 'b> { + lint: Option>, + settings: &'b Settings, +} + +impl<'a, 'b> AnalyzerVisitorBuilder<'a, 'b> { + pub(crate) fn new(settings: &'b Settings) -> Self { + Self { + settings, + lint: None, + } + } + #[must_use] + pub(crate) fn with_linter_rules( + mut self, + only: &'b [RuleSelector], + skip: &'b [RuleSelector], + ) -> Self { + self.lint = Some(LintVisitor::new(only, skip, self.settings)); + self + } + + #[must_use] + pub(crate) fn finish(self) -> (Vec>, Vec>) { + let mut disabled_rules = vec![]; + let mut enabled_rules = vec![]; + if let Some(mut lint) = self.lint { + pg_linter::visit_registry(&mut lint); + let (linter_enabled_rules, linter_disabled_rules) = lint.finish(); + enabled_rules.extend(linter_enabled_rules); + disabled_rules.extend(linter_disabled_rules); + } + + (enabled_rules, disabled_rules) + } +} + + +/// Type meant to register all the lint rules +#[derive(Debug)] +struct LintVisitor<'a, 'b> { + pub(crate) enabled_rules: FxHashSet>, + pub(crate) disabled_rules: FxHashSet>, + only: &'b [RuleSelector], + skip: &'b [RuleSelector], + settings: &'b Settings, +} + +impl<'a, 'b> LintVisitor<'a, 'b> { + pub(crate) fn new( + only: &'b [RuleSelector], + skip: &'b [RuleSelector], + settings: &'b Settings, + ) -> Self { + Self { + enabled_rules: Default::default(), + disabled_rules: Default::default(), + only, + skip, + settings, + } + } + + fn finish(mut self) -> (FxHashSet>, FxHashSet>) { + let has_only_filter = !self.only.is_empty(); + if !has_only_filter { + let enabled_rules = self + .settings.as_linter_rules() + .map(|rules| rules.as_enabled_rules()) + .unwrap_or_default(); + self.enabled_rules.extend(enabled_rules); + } + (self.enabled_rules, self.disabled_rules) + } + + fn push_rule(&mut self) + where + R: Rule + 'static, + { + // Do not report unused suppression comment diagnostics if: + // - it is a syntax-only analyzer pass, or + // - if a single rule is run. + for selector in self.only { + let filter = RuleFilter::from(selector); + if filter.match_rule::() { + self.enabled_rules.insert(filter); + } + } + for selector in self.skip { + let filter = RuleFilter::from(selector); + if filter.match_rule::() { + self.disabled_rules.insert(filter); + } + } + } +} + + +impl<'a, 'b> RegistryVisitor for LintVisitor<'a, 'b> { + fn record_category(&mut self) { + if C::CATEGORY == RuleCategory::Lint { + C::record_groups(self) + } + } + + fn record_group(&mut self) { + for selector in self.only { + if RuleFilter::from(selector).match_group::() { + G::record_rules(self) + } + } + + for selector in self.skip { + if RuleFilter::from(selector).match_group::() { + G::record_rules(self) + } + } + } + + fn record_rule(&mut self) + where + R: Rule + 'static, + { + self.push_rule::() + } +} + diff --git a/crates/pg_workspace_new/src/workspace/server/pg_query.rs b/crates/pg_workspace_new/src/workspace/server/pg_query.rs index 8d433c4d8..8751c17ed 100644 --- a/crates/pg_workspace_new/src/workspace/server/pg_query.rs +++ b/crates/pg_workspace_new/src/workspace/server/pg_query.rs @@ -45,16 +45,10 @@ impl PgQueryStore { diagnostics: DashMap::new(), } } - - pub fn pull_diagnostics(&self, ref_: &StatementRef) -> Vec { - self.diagnostics - .get(ref_) - .map_or_else(Vec::new, |err| vec![SDiagnostic::new(err.value().clone())]) - } } impl Store for PgQueryStore { - fn fetch(&self, statement: &StatementRef) -> Option> { + fn load(&self, statement: &StatementRef) -> Option> { self.ast_db.get(statement).map(|x| x.clone()) } @@ -80,4 +74,10 @@ impl Store for PgQueryStore { self.remove_statement(&change.old.ref_); self.add_statement(&change.new_statement()); } + + fn diagnostics(&self, stmt: &StatementRef) -> Vec { + self.diagnostics + .get(stmt) + .map_or_else(Vec::new, |err| vec![SDiagnostic::new(err.value().clone())]) + } } diff --git a/crates/pg_workspace_new/src/workspace/server/store.rs b/crates/pg_workspace_new/src/workspace/server/store.rs index 157c27731..472e1a84d 100644 --- a/crates/pg_workspace_new/src/workspace/server/store.rs +++ b/crates/pg_workspace_new/src/workspace/server/store.rs @@ -6,12 +6,19 @@ use super::{ }; pub(crate) trait Store { + fn diagnostics(&self, _stmt: &StatementRef) -> Vec { + Vec::new() + } + #[allow(dead_code)] - fn fetch(&self, statement: &StatementRef) -> Option>; + fn load(&self, _stmt: &StatementRef) -> Option> { + None + } - fn add_statement(&self, statement: &Statement); + fn add_statement(&self, _stmt: &Statement) {} - fn remove_statement(&self, statement: &StatementRef); + fn remove_statement(&self, _stmt: &StatementRef) {} - fn modify_statement(&self, change: &ChangedStatement); + fn modify_statement(&self, _change: &ChangedStatement) {} } + diff --git a/crates/pg_workspace_new/src/workspace/server/tree_sitter.rs b/crates/pg_workspace_new/src/workspace/server/tree_sitter.rs index 5518535a4..e0e8f1200 100644 --- a/crates/pg_workspace_new/src/workspace/server/tree_sitter.rs +++ b/crates/pg_workspace_new/src/workspace/server/tree_sitter.rs @@ -30,7 +30,7 @@ impl TreeSitterStore { } impl Store for TreeSitterStore { - fn fetch(&self, statement: &StatementRef) -> Option> { + fn load(&self, statement: &StatementRef) -> Option> { self.db.get(statement).map(|x| x.clone()) } diff --git a/test.sql b/test.sql index d67be9836..ea357c148 100644 --- a/test.sql +++ b/test.sql @@ -6,4 +6,6 @@ select * from test; alter tqjable test drop column id; +alter table test drop column id; + select lower(); From 1a8adef7e9b4def3979609fc237a9b4800416514 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 3 Jan 2025 16:11:51 +0100 Subject: [PATCH 17/27] remove a few unnecessary layers of abstraction --- Cargo.lock | 24 +++---- Cargo.toml | 2 +- crates/pg_analyse/Cargo.toml | 2 +- crates/pg_analyse/src/categories.rs | 3 +- crates/pg_analyse/src/context.rs | 10 +-- crates/pg_analyse/src/filter.rs | 2 +- crates/pg_analyse/src/lib.rs | 44 ++---------- crates/pg_analyse/src/macros.rs | 2 +- crates/pg_analyse/src/options.rs | 22 ++---- crates/pg_analyse/src/registry.rs | 12 ++-- crates/{pg_linter => pg_analyser}/Cargo.toml | 2 +- crates/pg_analyser/src/lib.rs | 67 +++++++++++++++++++ crates/{pg_linter => pg_analyser}/src/lint.rs | 0 .../src/lint/safety.rs | 0 .../src/lint/safety/ban_drop_column.rs | 0 .../{pg_linter => pg_analyser}/src/options.rs | 0 .../src/registry.rs | 0 crates/pg_cli/Cargo.toml | 2 +- .../pg_cli/src/execute/process_file/check.rs | 4 +- crates/pg_configuration/Cargo.toml | 2 +- .../src/analyser/linter/rules.rs | 2 +- .../pg_configuration/src/generated/linter.rs | 4 +- crates/pg_configuration/src/lib.rs | 4 +- crates/pg_diagnostics_categories/build.rs | 2 +- crates/pg_linter/src/lib.rs | 32 --------- crates/pg_lsp_new/Cargo.toml | 2 +- crates/pg_workspace_new/Cargo.toml | 6 +- crates/pg_workspace_new/src/configuration.rs | 18 ++--- crates/pg_workspace_new/src/settings.rs | 12 ++-- .../pg_workspace_new/src/workspace/server.rs | 43 +++++++++--- .../server/{analyser/mod.rs => analyser.rs} | 18 ++--- .../src/workspace/server/analyser/lint.rs | 61 ----------------- .../src/workspace/server/store.rs | 1 - xtask/codegen/Cargo.toml | 2 +- xtask/codegen/src/generate_analyser.rs | 12 ++-- xtask/codegen/src/generate_configuration.rs | 12 ++-- .../codegen/src/generate_new_analyser_rule.rs | 2 +- xtask/codegen/src/lib.rs | 2 +- 38 files changed, 189 insertions(+), 246 deletions(-) rename crates/{pg_linter => pg_analyser}/Cargo.toml (91%) create mode 100644 crates/pg_analyser/src/lib.rs rename crates/{pg_linter => pg_analyser}/src/lint.rs (100%) rename crates/{pg_linter => pg_analyser}/src/lint/safety.rs (100%) rename crates/{pg_linter => pg_analyser}/src/lint/safety/ban_drop_column.rs (100%) rename crates/{pg_linter => pg_analyser}/src/options.rs (100%) rename crates/{pg_linter => pg_analyser}/src/registry.rs (100%) delete mode 100644 crates/pg_linter/src/lib.rs rename crates/pg_workspace_new/src/workspace/server/{analyser/mod.rs => analyser.rs} (92%) delete mode 100644 crates/pg_workspace_new/src/workspace/server/analyser/lint.rs diff --git a/Cargo.lock b/Cargo.lock index ac7a3782d..eb0e1c121 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2242,6 +2242,15 @@ dependencies = [ "text-size", ] +[[package]] +name = "pg_analyser" +version = "0.0.0" +dependencies = [ + "pg_analyse", + "pg_console", + "pg_query_ext", +] + [[package]] name = "pg_base_db" version = "0.0.0" @@ -2318,9 +2327,9 @@ dependencies = [ "biome_deserialize_macros", "bpaf", "pg_analyse", + "pg_analyser", "pg_console", "pg_diagnostics", - "pg_linter", "rustc-hash 2.1.0", "schemars", "serde", @@ -2470,15 +2479,6 @@ dependencies = [ "text-size", ] -[[package]] -name = "pg_linter" -version = "0.0.0" -dependencies = [ - "pg_analyse", - "pg_console", - "pg_query_ext", -] - [[package]] name = "pg_lsp" version = "0.0.0" @@ -2705,11 +2705,11 @@ dependencies = [ "futures", "ignore", "pg_analyse", + "pg_analyser", "pg_configuration", "pg_console", "pg_diagnostics", "pg_fs", - "pg_linter", "pg_query_ext", "pg_schema_cache", "pg_statement_splitter", @@ -4818,7 +4818,7 @@ dependencies = [ "biome_string_case", "bpaf", "pg_analyse", - "pg_linter", + "pg_analyser", "proc-macro2", "pulldown-cmark", "quote", diff --git a/Cargo.toml b/Cargo.toml index b5b467386..a79687421 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,6 +48,7 @@ unicode-width = "0.1.12" # postgres specific crates pg_analyse = { path = "./crates/pg_analyse", version = "0.0.0" } +pg_analyser = { path = "./crates/pg_analyser", version = "0.0.0" } pg_base_db = { path = "./crates/pg_base_db", version = "0.0.0" } pg_cli = { path = "./crates/pg_cli", version = "0.0.0" } pg_commands = { path = "./crates/pg_commands", version = "0.0.0" } @@ -64,7 +65,6 @@ pg_inlay_hints = { path = "./crates/pg_inlay_hints", version = "0.0.0 pg_lexer = { path = "./crates/pg_lexer", version = "0.0.0" } pg_lexer_codegen = { path = "./crates/pg_lexer_codegen", version = "0.0.0" } pg_lint = { path = "./crates/pg_lint", version = "0.0.0" } -pg_linter = { path = "./crates/pg_linter", version = "0.0.0" } pg_lsp_converters = { path = "./crates/pg_lsp_converters", version = "0.0.0" } pg_lsp_new = { path = "./crates/pg_lsp_new", version = "0.0.0" } pg_markup = { path = "./crates/pg_markup", version = "0.0.0" } diff --git a/crates/pg_analyse/Cargo.toml b/crates/pg_analyse/Cargo.toml index 3deee2961..464d5c506 100644 --- a/crates/pg_analyse/Cargo.toml +++ b/crates/pg_analyse/Cargo.toml @@ -22,9 +22,9 @@ rustc-hash = { workspace = true } biome_deserialize = { workspace = true, optional = true } biome_deserialize_macros = { workspace = true, optional = true } enumflags2.workspace = true +schemars = { workspace = true, optional = true } serde = { workspace = true, features = ["derive"], optional = true } text-size.workspace = true -schemars = { workspace = true, optional = true } [features] serde = ["dep:serde", "dep:schemars", "dep:biome_deserialize", "dep:biome_deserialize_macros"] diff --git a/crates/pg_analyse/src/categories.rs b/crates/pg_analyse/src/categories.rs index 79b897578..4f8dc4481 100644 --- a/crates/pg_analyse/src/categories.rs +++ b/crates/pg_analyse/src/categories.rs @@ -184,7 +184,7 @@ pub(crate) enum Categories { } #[derive(Debug, Copy, Clone)] -/// The categories supported by the analyzer. +/// The categories supported by the analyser. /// /// The default implementation of this type returns an instance with all the categories. /// @@ -297,7 +297,6 @@ impl schemars::JsonSchema for RuleCategories { } } - #[derive(Debug, Default)] /// A convenient type create a [RuleCategories] type /// diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs index 53658411d..82e1f7c25 100644 --- a/crates/pg_analyse/src/context.rs +++ b/crates/pg_analyse/src/context.rs @@ -13,14 +13,8 @@ where R: Rule + Sized + 'static, { #[allow(clippy::too_many_arguments)] - pub fn new( - stmt: &'a pg_query_ext::NodeEnum, - options: &'a R::Options, - ) -> Self { - Self { - stmt, - options, - } + pub fn new(stmt: &'a pg_query_ext::NodeEnum, options: &'a R::Options) -> Self { + Self { stmt, options } } /// Returns the group that belongs to the current rule diff --git a/crates/pg_analyse/src/filter.rs b/crates/pg_analyse/src/filter.rs index ad3c2df0d..391831f39 100644 --- a/crates/pg_analyse/src/filter.rs +++ b/crates/pg_analyse/src/filter.rs @@ -12,7 +12,7 @@ pub enum RuleFilter<'a> { Rule(&'a str, &'a str), } -/// Allows filtering the list of rules that will be executed in a run of the analyzer, +/// Allows filtering the list of rules that will be executed in a run of the analyser, /// and at what source code range signals (diagnostics or actions) may be raised #[derive(Debug, Default, Clone, Copy)] pub struct AnalysisFilter<'a> { diff --git a/crates/pg_analyse/src/lib.rs b/crates/pg_analyse/src/lib.rs index 55b9d82cd..ccaf82b7d 100644 --- a/crates/pg_analyse/src/lib.rs +++ b/crates/pg_analyse/src/lib.rs @@ -9,47 +9,15 @@ mod rule; // Re-exported for use in the `declare_group` macro pub use pg_diagnostics::category_concat; -use registry::RegistryRuleParams; - pub use crate::categories::{ - ActionCategory, RefactorKind, RuleCategories, RuleCategory, - SourceActionKind, SUPPRESSION_ACTION_CATEGORY, RuleCategoriesBuilder + ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, + SourceActionKind, SUPPRESSION_ACTION_CATEGORY, }; pub use crate::filter::{AnalysisFilter, GroupKey, RuleFilter, RuleKey}; -pub use crate::options::{AnalyzerConfiguration, AnalyzerOptions, AnalyzerRules}; -pub use crate::registry::{MetadataRegistry, RegistryVisitor, RuleRegistry, RuleRegistryBuilder}; +pub use crate::options::{AnalyserOptions, AnalyserRules}; +pub use crate::registry::{ + MetadataRegistry, RegistryRuleParams, RegistryVisitor, RuleRegistry, RuleRegistryBuilder, +}; pub use crate::rule::{ GroupCategory, Rule, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata, RuleSource, }; - -pub struct Analyzer<'analyzer> { - /// Holds the metadata for all the rules statically known to the analyzer - /// we need this later when we add suppression support - #[allow(dead_code)] - metadata: &'analyzer MetadataRegistry, -} - -pub struct AnalyzerContext<'a> { - pub root: &'a pg_query_ext::NodeEnum, - pub options: &'a AnalyzerOptions, - pub registry: RuleRegistry, -} - -impl<'analyzer> Analyzer<'analyzer> { - /// Construct a new instance of the analyzer with the given rule registry - pub fn new(metadata: &'analyzer MetadataRegistry) -> Self { - Self { metadata } - } - - pub fn run(self, ctx: AnalyzerContext) -> Vec { - let params = RegistryRuleParams { - root: ctx.root, - options: ctx.options, - }; - - ctx.registry - .into_iter() - .flat_map(|rule| (rule.run)(¶ms)) - .collect::>() - } -} diff --git a/crates/pg_analyse/src/macros.rs b/crates/pg_analyse/src/macros.rs index 9a7d62c5f..d97639a9f 100644 --- a/crates/pg_analyse/src/macros.rs +++ b/crates/pg_analyse/src/macros.rs @@ -1,4 +1,4 @@ -/// This macro is used to declare an analyzer rule type, and implement the +/// This macro is used to declare an analyser rule type, and implement the // [RuleMeta] trait for it /// # Example /// diff --git a/crates/pg_analyse/src/options.rs b/crates/pg_analyse/src/options.rs index 1c1b15d66..eaba5d37a 100644 --- a/crates/pg_analyse/src/options.rs +++ b/crates/pg_analyse/src/options.rs @@ -28,9 +28,9 @@ impl RuleOptions { /// A convenient new type data structure to insert and get rules #[derive(Debug, Default)] -pub struct AnalyzerRules(FxHashMap); +pub struct AnalyserRules(FxHashMap); -impl AnalyzerRules { +impl AnalyserRules { /// It tracks the options of a specific rule pub fn push_rule(&mut self, rule_key: RuleKey, options: RuleOptions) { self.0.insert(rule_key, options); @@ -42,27 +42,19 @@ impl AnalyzerRules { } } -/// A data structured derived from the `pglsp.toml` file +/// A set of information useful to the analyser infrastructure #[derive(Debug, Default)] -pub struct AnalyzerConfiguration { - /// A list of rules and their options - pub rules: AnalyzerRules, -} - -/// A set of information useful to the analyzer infrastructure -#[derive(Debug, Default)] -pub struct AnalyzerOptions { +pub struct AnalyserOptions { /// A data structured derived from the [`pglsp.toml`] file - pub configuration: AnalyzerConfiguration, + pub rules: AnalyserRules, } -impl AnalyzerOptions { +impl AnalyserOptions { pub fn rule_options(&self) -> Option where R: Rule + 'static, { - self.configuration - .rules + self.rules .get_rule_options::(&RuleKey::rule::()) .cloned() } diff --git a/crates/pg_analyse/src/registry.rs b/crates/pg_analyse/src/registry.rs index 178633315..b80de1cb4 100644 --- a/crates/pg_analyse/src/registry.rs +++ b/crates/pg_analyse/src/registry.rs @@ -4,7 +4,7 @@ use crate::{ context::RuleContext, filter::{AnalysisFilter, GroupKey, RuleKey}, rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup}, - AnalyzerOptions, + AnalyserOptions, }; pub trait RegistryVisitor { @@ -126,7 +126,7 @@ impl RegistryVisitor for RuleRegistryBuilder<'_> { /// The rule registry holds type-erased instances of all active analysis rules pub struct RuleRegistry { - rules: Vec, + pub rules: Vec, } impl IntoIterator for RuleRegistry { @@ -141,7 +141,7 @@ impl IntoIterator for RuleRegistry { /// Internal representation of a single rule in the registry #[derive(Copy, Clone)] pub struct RegistryRule { - pub(crate) run: RuleExecutor, + pub run: RuleExecutor, } impl RuleRegistry { @@ -155,9 +155,9 @@ impl RuleRegistry { } } -pub struct RegistryRuleParams<'analyzer> { - pub root: &'analyzer pg_query_ext::NodeEnum, - pub options: &'analyzer AnalyzerOptions, +pub struct RegistryRuleParams<'a> { + pub root: &'a pg_query_ext::NodeEnum, + pub options: &'a AnalyserOptions, } /// Executor for rule as a generic function pointer diff --git a/crates/pg_linter/Cargo.toml b/crates/pg_analyser/Cargo.toml similarity index 91% rename from crates/pg_linter/Cargo.toml rename to crates/pg_analyser/Cargo.toml index 0d1cc9710..9c56fbed7 100644 --- a/crates/pg_linter/Cargo.toml +++ b/crates/pg_analyser/Cargo.toml @@ -7,7 +7,7 @@ edition.workspace = true homepage.workspace = true keywords.workspace = true license.workspace = true -name = "pg_linter" +name = "pg_analyser" repository.workspace = true version = "0.0.0" diff --git a/crates/pg_analyser/src/lib.rs b/crates/pg_analyser/src/lib.rs new file mode 100644 index 000000000..d6c14d620 --- /dev/null +++ b/crates/pg_analyser/src/lib.rs @@ -0,0 +1,67 @@ +use std::{ops::Deref, sync::LazyLock}; + +use pg_analyse::{ + AnalyserOptions, AnalysisFilter, MetadataRegistry, RegistryRuleParams, RuleDiagnostic, + RuleRegistry, +}; +pub use registry::visit_registry; + +mod lint; +pub mod options; +mod registry; + +pub static METADATA: LazyLock = LazyLock::new(|| { + let mut metadata = MetadataRegistry::default(); + visit_registry(&mut metadata); + metadata +}); + +/// Main entry point to the analyser. +pub struct Analyser<'a> { + /// Holds the metadata for all the rules statically known to the analyser + /// we need this later when we add suppression support + #[allow(dead_code)] + metadata: &'a MetadataRegistry, + + /// Holds all rule options + options: &'a AnalyserOptions, + + /// Holds all rules + registry: RuleRegistry, +} + +pub struct AnalyserContext<'a> { + pub root: &'a pg_query_ext::NodeEnum, +} + +pub struct AnalyserConfig<'a> { + pub options: &'a AnalyserOptions, + pub filter: AnalysisFilter<'a>, +} + +impl<'a> Analyser<'a> { + pub fn new(conf: AnalyserConfig<'a>) -> Self { + let mut builder = RuleRegistry::builder(&conf.filter); + visit_registry(&mut builder); + let registry = builder.build(); + + Self { + metadata: METADATA.deref(), + registry, + options: conf.options, + } + } + + pub fn run(&self, ctx: AnalyserContext) -> Vec { + let params = RegistryRuleParams { + root: ctx.root, + options: self.options, + }; + + self.registry + .rules + .iter() + .flat_map(|rule| (rule.run)(¶ms)) + .collect::>() + } +} diff --git a/crates/pg_linter/src/lint.rs b/crates/pg_analyser/src/lint.rs similarity index 100% rename from crates/pg_linter/src/lint.rs rename to crates/pg_analyser/src/lint.rs diff --git a/crates/pg_linter/src/lint/safety.rs b/crates/pg_analyser/src/lint/safety.rs similarity index 100% rename from crates/pg_linter/src/lint/safety.rs rename to crates/pg_analyser/src/lint/safety.rs diff --git a/crates/pg_linter/src/lint/safety/ban_drop_column.rs b/crates/pg_analyser/src/lint/safety/ban_drop_column.rs similarity index 100% rename from crates/pg_linter/src/lint/safety/ban_drop_column.rs rename to crates/pg_analyser/src/lint/safety/ban_drop_column.rs diff --git a/crates/pg_linter/src/options.rs b/crates/pg_analyser/src/options.rs similarity index 100% rename from crates/pg_linter/src/options.rs rename to crates/pg_analyser/src/options.rs diff --git a/crates/pg_linter/src/registry.rs b/crates/pg_analyser/src/registry.rs similarity index 100% rename from crates/pg_linter/src/registry.rs rename to crates/pg_analyser/src/registry.rs diff --git a/crates/pg_cli/Cargo.toml b/crates/pg_cli/Cargo.toml index 3279b3514..6abad15ac 100644 --- a/crates/pg_cli/Cargo.toml +++ b/crates/pg_cli/Cargo.toml @@ -18,9 +18,9 @@ crossbeam = { workspace = true } dashmap = "5.5.3" hdrhistogram = { version = "7.5.4", default-features = false } path-absolutize = { version = "3.1.1", optional = false, features = ["use_unix_paths_on_wasm"] } +pg_analyse = { workspace = true } pg_configuration = { workspace = true } pg_console = { workspace = true } -pg_analyse = { workspace = true } pg_diagnostics = { workspace = true } pg_flags = { workspace = true } pg_fs = { workspace = true } diff --git a/crates/pg_cli/src/execute/process_file/check.rs b/crates/pg_cli/src/execute/process_file/check.rs index 134c5c4fb..fa5b522b5 100644 --- a/crates/pg_cli/src/execute/process_file/check.rs +++ b/crates/pg_cli/src/execute/process_file/check.rs @@ -31,9 +31,7 @@ pub(crate) fn check_with_guard<'ctx>( let pull_diagnostics_result = workspace_file .guard() .pull_diagnostics( - RuleCategoriesBuilder::default() - .all() - .build(), + RuleCategoriesBuilder::default().all().build(), max_diagnostics, only, skip, diff --git a/crates/pg_configuration/Cargo.toml b/crates/pg_configuration/Cargo.toml index c4a89a695..63e2f773d 100644 --- a/crates/pg_configuration/Cargo.toml +++ b/crates/pg_configuration/Cargo.toml @@ -16,9 +16,9 @@ biome_deserialize = { workspace = true } biome_deserialize_macros = { workspace = true } bpaf = { workspace = true } pg_analyse = { workspace = true } +pg_analyser = { workspace = true } pg_console = { workspace = true } pg_diagnostics = { workspace = true } -pg_linter = { workspace = true } rustc-hash = { workspace = true } schemars = { workspace = true, features = ["indexmap1"], optional = true } serde = { workspace = true, features = ["derive"] } diff --git a/crates/pg_configuration/src/analyser/linter/rules.rs b/crates/pg_configuration/src/analyser/linter/rules.rs index 918518e4f..cbd875ad8 100644 --- a/crates/pg_configuration/src/analyser/linter/rules.rs +++ b/crates/pg_configuration/src/analyser/linter/rules.rs @@ -145,7 +145,7 @@ pub struct Safety { pub all: Option, #[doc = "Dropping a column may break existing clients."] #[serde(skip_serializing_if = "Option::is_none")] - pub ban_drop_column: Option>, + pub ban_drop_column: Option>, } impl Safety { const GROUP_NAME: &'static str = "safety"; diff --git a/crates/pg_configuration/src/generated/linter.rs b/crates/pg_configuration/src/generated/linter.rs index 324fe0635..ecce64efd 100644 --- a/crates/pg_configuration/src/generated/linter.rs +++ b/crates/pg_configuration/src/generated/linter.rs @@ -1,11 +1,11 @@ //! Generated file, do not edit by hand, see `xtask/codegen` use crate::analyser::linter::*; -use pg_analyse::{AnalyzerRules, MetadataRegistry}; +use pg_analyse::{AnalyserRules, MetadataRegistry}; pub fn push_to_analyser_rules( rules: &Rules, metadata: &MetadataRegistry, - analyser_rules: &mut AnalyzerRules, + analyser_rules: &mut AnalyserRules, ) { if let Some(rules) = rules.safety.as_ref() { for rule_name in Safety::GROUP_RULES { diff --git a/crates/pg_configuration/src/lib.rs b/crates/pg_configuration/src/lib.rs index b053d89b1..6d2e5f600 100644 --- a/crates/pg_configuration/src/lib.rs +++ b/crates/pg_configuration/src/lib.rs @@ -18,8 +18,8 @@ pub use crate::generated::push_to_analyser_rules; use crate::vcs::{partial_vcs_configuration, PartialVcsConfiguration, VcsConfiguration}; pub use analyser::{ partial_linter_configuration, LinterConfiguration, PartialLinterConfiguration, - RuleConfiguration, RuleFixConfiguration, RulePlainConfiguration, RuleWithFixOptions, - RuleWithOptions, Rules, RuleSelector + RuleConfiguration, RuleFixConfiguration, RulePlainConfiguration, RuleSelector, + RuleWithFixOptions, RuleWithOptions, Rules, }; use biome_deserialize_macros::Partial; use bpaf::Bpaf; diff --git a/crates/pg_diagnostics_categories/build.rs b/crates/pg_diagnostics_categories/build.rs index d9fe0a9cd..dc2636642 100644 --- a/crates/pg_diagnostics_categories/build.rs +++ b/crates/pg_diagnostics_categories/build.rs @@ -104,7 +104,7 @@ pub fn main() -> io::Result<()> { /// The `category_concat!` macro is a variant of `category!` using a /// slightly different syntax, for use in the `declare_group` and - /// `declare_rule` macros in the analyzer + /// `declare_rule` macros in the analyser #[macro_export] macro_rules! category_concat { #( #concat_macro_arms )* diff --git a/crates/pg_linter/src/lib.rs b/crates/pg_linter/src/lib.rs deleted file mode 100644 index c016fb228..000000000 --- a/crates/pg_linter/src/lib.rs +++ /dev/null @@ -1,32 +0,0 @@ -use std::{ops::Deref, sync::LazyLock}; - -use pg_analyse::{AnalysisFilter, AnalyzerOptions, MetadataRegistry, RuleDiagnostic, RuleRegistry}; -pub use registry::visit_registry; - -mod lint; -pub mod options; -mod registry; - -pub static METADATA: LazyLock = LazyLock::new(|| { - let mut metadata = MetadataRegistry::default(); - visit_registry(&mut metadata); - metadata -}); - -pub fn lint( - root: &pg_query_ext::NodeEnum, - filter: AnalysisFilter, - options: &AnalyzerOptions, -) -> Vec { - let mut builder = RuleRegistry::builder(&filter); - visit_registry(&mut builder); - let registry = builder.build(); - - let analyser = pg_analyse::Analyzer::new(METADATA.deref()); - - analyser.run(pg_analyse::AnalyzerContext { - root, - options, - registry, - }) -} diff --git a/crates/pg_lsp_new/Cargo.toml b/crates/pg_lsp_new/Cargo.toml index f98aa9071..7b71c1758 100644 --- a/crates/pg_lsp_new/Cargo.toml +++ b/crates/pg_lsp_new/Cargo.toml @@ -15,10 +15,10 @@ version = "0.0.0" anyhow = { workspace = true } biome_deserialize = { workspace = true } futures = "0.3.31" +pg_analyse = { workspace = true } pg_configuration = { workspace = true } pg_console = { workspace = true } pg_diagnostics = { workspace = true } -pg_analyse = { workspace = true } pg_fs = { workspace = true } pg_lsp_converters = { workspace = true } pg_text_edit = { workspace = true } diff --git a/crates/pg_workspace_new/Cargo.toml b/crates/pg_workspace_new/Cargo.toml index 51bb03688..bb2bd7c88 100644 --- a/crates/pg_workspace_new/Cargo.toml +++ b/crates/pg_workspace_new/Cargo.toml @@ -16,16 +16,16 @@ biome_deserialize = "0.6.0" dashmap = "5.5.3" futures = "0.3.31" ignore = { workspace = true } +pg_analyse = { workspace = true, features = ["serde"] } +pg_analyser = { workspace = true } pg_configuration = { workspace = true } -pg_analyse = { workspace = true, features = ["serde"] } pg_console = { workspace = true } pg_diagnostics = { workspace = true } -pg_linter = { workspace = true } pg_fs = { workspace = true, features = ["serde"] } -rustc-hash = { workspace = true } pg_query_ext = { workspace = true } pg_schema_cache = { workspace = true } pg_statement_splitter = { workspace = true } +rustc-hash = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["raw_value"] } sqlx.workspace = true diff --git a/crates/pg_workspace_new/src/configuration.rs b/crates/pg_workspace_new/src/configuration.rs index 671c487da..481e99817 100644 --- a/crates/pg_workspace_new/src/configuration.rs +++ b/crates/pg_workspace_new/src/configuration.rs @@ -1,10 +1,13 @@ use std::{ - io::ErrorKind, ops::Deref, path::{Path, PathBuf} + io::ErrorKind, + ops::Deref, + path::{Path, PathBuf}, }; -use pg_analyse::AnalyzerRules; +use pg_analyse::AnalyserRules; use pg_configuration::{ - push_to_analyser_rules, ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, PartialConfiguration + push_to_analyser_rules, ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, + PartialConfiguration, }; use pg_fs::{AutoSearchResult, ConfigName, FileSystem, OpenOptions}; @@ -176,11 +179,10 @@ pub fn create_config( } /// Returns the rules applied to a specific [Path], given the [Settings] -pub fn to_analyzer_rules(settings: &Settings) -> AnalyzerRules { - let mut analyzer_rules = AnalyzerRules::default(); +pub fn to_analyser_rules(settings: &Settings) -> AnalyserRules { + let mut analyser_rules = AnalyserRules::default(); if let Some(rules) = settings.linter.rules.as_ref() { - push_to_analyser_rules(rules, pg_linter::METADATA.deref(), &mut analyzer_rules); + push_to_analyser_rules(rules, pg_analyser::METADATA.deref(), &mut analyser_rules); } - analyzer_rules + analyser_rules } - diff --git a/crates/pg_workspace_new/src/settings.rs b/crates/pg_workspace_new/src/settings.rs index 7e878b393..2eed36ac2 100644 --- a/crates/pg_workspace_new/src/settings.rs +++ b/crates/pg_workspace_new/src/settings.rs @@ -1,11 +1,15 @@ use biome_deserialize::StringSet; use std::{ - borrow::Cow, num::NonZeroU64, path::{Path, PathBuf}, sync::{RwLock, RwLockReadGuard, RwLockWriteGuard} + borrow::Cow, + num::NonZeroU64, + path::{Path, PathBuf}, + sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}, }; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use pg_configuration::{ - database::PartialDatabaseConfiguration, diagnostics::InvalidIgnorePattern, files::FilesConfiguration, ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration + database::PartialDatabaseConfiguration, diagnostics::InvalidIgnorePattern, + files::FilesConfiguration, ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration, }; use pg_fs::FileSystem; @@ -103,9 +107,7 @@ impl Settings { } /// Returns linter rules. - pub fn as_linter_rules( - &self - ) -> Option> { + pub fn as_linter_rules(&self) -> Option> { self.linter.rules.as_ref().map(Cow::Borrowed) } } diff --git a/crates/pg_workspace_new/src/workspace/server.rs b/crates/pg_workspace_new/src/workspace/server.rs index 148fbee61..1d3b5c72b 100644 --- a/crates/pg_workspace_new/src/workspace/server.rs +++ b/crates/pg_workspace_new/src/workspace/server.rs @@ -1,9 +1,11 @@ use std::{fs, future::Future, panic::RefUnwindSafe, path::Path, sync::RwLock}; -use analyser::lint::Linter; +use analyser::AnalyserVisitorBuilder; use change::StatementChange; use dashmap::{DashMap, DashSet}; use document::{Document, StatementRef}; +use pg_analyse::{AnalyserOptions, AnalysisFilter}; +use pg_analyser::{Analyser, AnalyserConfig, AnalyserContext}; use pg_diagnostics::{serde::Diagnostic as SDiagnostic, Diagnostic, DiagnosticExt, Severity}; use pg_fs::{ConfigName, PgLspPath}; use pg_query::PgQueryStore; @@ -16,6 +18,7 @@ use tracing::info; use tree_sitter::TreeSitterStore; use crate::{ + configuration::to_analyser_rules, settings::{Settings, SettingsHandle, SettingsHandleMut}, workspace::PullDiagnosticsResult, WorkspaceError, @@ -26,11 +29,11 @@ use super::{ Workspace, }; +mod analyser; mod change; mod document; mod pg_query; mod store; -mod analyser; mod tree_sitter; /// Simple helper to manage the db connection and the associated connection string @@ -314,14 +317,26 @@ impl Workspace for WorkspaceServer { .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; - let linter = Linter::new( - analyser::lint::LinterParams { - settings: &self.settings(), - only: params.only, - skip: params.skip, - categories: Default::default(), - } - ); + // create analyser for this run + // first, collect enabled and disabled rules from the workspace settings + let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(self.settings().as_ref()) + .with_linter_rules(¶ms.only, ¶ms.skip) + .finish(); + // then, build a map that contains all options + let options = AnalyserOptions { + rules: to_analyser_rules(self.settings().as_ref()), + }; + // next, build the analysis filter which will be used to match rules + let filter = AnalysisFilter { + categories: params.categories, + enabled_rules: Some(enabled_rules.as_slice()), + disabled_rules: &disabled_rules, + }; + // finally, create the analyser that will be used during this run + let analyser = Analyser::new(AnalyserConfig { + options: &options, + filter, + }); let diagnostics: Vec = doc .statement_refs_with_ranges() @@ -332,7 +347,13 @@ impl Workspace for WorkspaceServer { stmt_diagnostics.extend(self.pg_query.diagnostics(stmt)); let ast = self.pg_query.load(stmt); if let Some(ast) = ast { - stmt_diagnostics.extend(linter.run(&ast).diagnostics); + stmt_diagnostics.extend( + analyser + .run(AnalyserContext { root: &ast }) + .into_iter() + .map(SDiagnostic::new) + .collect::>(), + ); } stmt_diagnostics diff --git a/crates/pg_workspace_new/src/workspace/server/analyser/mod.rs b/crates/pg_workspace_new/src/workspace/server/analyser.rs similarity index 92% rename from crates/pg_workspace_new/src/workspace/server/analyser/mod.rs rename to crates/pg_workspace_new/src/workspace/server/analyser.rs index b99a0a0f3..7f6aa443b 100644 --- a/crates/pg_workspace_new/src/workspace/server/analyser/mod.rs +++ b/crates/pg_workspace_new/src/workspace/server/analyser.rs @@ -4,14 +4,12 @@ use rustc_hash::FxHashSet; use crate::settings::Settings; -pub mod lint; - -pub(crate) struct AnalyzerVisitorBuilder<'a, 'b> { +pub(crate) struct AnalyserVisitorBuilder<'a, 'b> { lint: Option>, settings: &'b Settings, } -impl<'a, 'b> AnalyzerVisitorBuilder<'a, 'b> { +impl<'a, 'b> AnalyserVisitorBuilder<'a, 'b> { pub(crate) fn new(settings: &'b Settings) -> Self { Self { settings, @@ -33,7 +31,7 @@ impl<'a, 'b> AnalyzerVisitorBuilder<'a, 'b> { let mut disabled_rules = vec![]; let mut enabled_rules = vec![]; if let Some(mut lint) = self.lint { - pg_linter::visit_registry(&mut lint); + pg_analyser::visit_registry(&mut lint); let (linter_enabled_rules, linter_disabled_rules) = lint.finish(); enabled_rules.extend(linter_enabled_rules); disabled_rules.extend(linter_disabled_rules); @@ -43,7 +41,6 @@ impl<'a, 'b> AnalyzerVisitorBuilder<'a, 'b> { } } - /// Type meant to register all the lint rules #[derive(Debug)] struct LintVisitor<'a, 'b> { @@ -73,7 +70,8 @@ impl<'a, 'b> LintVisitor<'a, 'b> { let has_only_filter = !self.only.is_empty(); if !has_only_filter { let enabled_rules = self - .settings.as_linter_rules() + .settings + .as_linter_rules() .map(|rules| rules.as_enabled_rules()) .unwrap_or_default(); self.enabled_rules.extend(enabled_rules); @@ -85,9 +83,7 @@ impl<'a, 'b> LintVisitor<'a, 'b> { where R: Rule + 'static, { - // Do not report unused suppression comment diagnostics if: - // - it is a syntax-only analyzer pass, or - // - if a single rule is run. + // Do not report unused suppression comment diagnostics if a single rule is run. for selector in self.only { let filter = RuleFilter::from(selector); if filter.match_rule::() { @@ -103,7 +99,6 @@ impl<'a, 'b> LintVisitor<'a, 'b> { } } - impl<'a, 'b> RegistryVisitor for LintVisitor<'a, 'b> { fn record_category(&mut self) { if C::CATEGORY == RuleCategory::Lint { @@ -132,4 +127,3 @@ impl<'a, 'b> RegistryVisitor for LintVisitor<'a, 'b> { self.push_rule::() } } - diff --git a/crates/pg_workspace_new/src/workspace/server/analyser/lint.rs b/crates/pg_workspace_new/src/workspace/server/analyser/lint.rs deleted file mode 100644 index d5e4cccd9..000000000 --- a/crates/pg_workspace_new/src/workspace/server/analyser/lint.rs +++ /dev/null @@ -1,61 +0,0 @@ -use pg_analyse::{AnalysisFilter, AnalyzerConfiguration, AnalyzerOptions, RuleCategories, RuleFilter}; -use pg_configuration::RuleSelector; -use pg_diagnostics::serde::Diagnostic; - -use crate::{configuration::to_analyzer_rules, settings::SettingsHandle}; - -use super::AnalyzerVisitorBuilder; - -#[derive(Debug)] -pub(crate) struct LinterParams<'a> { - pub(crate) settings: &'a SettingsHandle<'a>, - pub(crate) only: Vec, - pub(crate) skip: Vec, - pub(crate) categories: RuleCategories, -} - -pub(crate) struct LinterResults { - pub(crate) diagnostics: Vec, -} - -pub(crate) struct Linter<'a> { - categories: RuleCategories, - options: AnalyzerOptions, - enabled_rules: Vec>, - disabled_rules: Vec>, -} - -impl<'a> Linter<'a> { - pub fn new(params: LinterParams) -> Self { - let (enabled_rules, disabled_rules) = AnalyzerVisitorBuilder::new(params.settings.as_ref()) - .with_linter_rules(¶ms.only, ¶ms.skip) - .finish(); - - let configuration = AnalyzerConfiguration { - rules: to_analyzer_rules(params.settings.as_ref()), - }; - - let options = AnalyzerOptions { configuration }; - - Self { options, enabled_rules, disabled_rules, categories: params.categories } - } - - pub fn run(&self, stmt: &pg_query_ext::NodeEnum) -> LinterResults { - let filter = AnalysisFilter { - categories: self.categories, - enabled_rules: Some(self.enabled_rules.as_slice()), - disabled_rules: &self.disabled_rules, - }; - - - let diagnostics = pg_linter::lint(stmt, filter, &self.options); - - LinterResults { - diagnostics: diagnostics - .into_iter() - .map(Diagnostic::new) - .collect(), - } - } -} - diff --git a/crates/pg_workspace_new/src/workspace/server/store.rs b/crates/pg_workspace_new/src/workspace/server/store.rs index 472e1a84d..0891a974b 100644 --- a/crates/pg_workspace_new/src/workspace/server/store.rs +++ b/crates/pg_workspace_new/src/workspace/server/store.rs @@ -21,4 +21,3 @@ pub(crate) trait Store { fn modify_statement(&self, _change: &ChangedStatement) {} } - diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml index 17938fef0..d571f7dfb 100644 --- a/xtask/codegen/Cargo.toml +++ b/xtask/codegen/Cargo.toml @@ -9,7 +9,7 @@ anyhow = { workspace = true } biome_string_case = { workspace = true } bpaf = { workspace = true, features = ["derive"] } pg_analyse = { workspace = true } -pg_linter = { workspace = true } +pg_analyser = { workspace = true } proc-macro2 = { workspace = true, features = ["span-locations"] } pulldown-cmark = { version = "0.12.2" } quote = "1.0.36" diff --git a/xtask/codegen/src/generate_analyser.rs b/xtask/codegen/src/generate_analyser.rs index 6692094c8..6ea909076 100644 --- a/xtask/codegen/src/generate_analyser.rs +++ b/xtask/codegen/src/generate_analyser.rs @@ -13,13 +13,13 @@ pub fn generate_analyser() -> Result<()> { } fn generate_linter() -> Result<()> { - let base_path = project_root().join("crates/pg_linter/src"); - let mut analyzers = BTreeMap::new(); - generate_category("lint", &mut analyzers, &base_path)?; + let base_path = project_root().join("crates/pg_analyser/src"); + let mut analysers = BTreeMap::new(); + generate_category("lint", &mut analysers, &base_path)?; generate_options(&base_path)?; - update_linter_registry_builder(analyzers) + update_linter_registry_builder(analysers) } fn generate_options(base_path: &Path) -> Result<()> { @@ -107,7 +107,7 @@ fn generate_category( let kind = match name { "lint" => format_ident!("Lint"), - _ => panic!("unimplemented analyzer category {name:?}"), + _ => panic!("unimplemented analyser category {name:?}"), }; entries.insert( @@ -200,7 +200,7 @@ fn generate_group(category: &'static str, group: &str, base_path: &Path) -> Resu } fn update_linter_registry_builder(rules: BTreeMap<&'static str, TokenStream>) -> Result<()> { - let path = project_root().join("crates/pg_linter/src/registry.rs"); + let path = project_root().join("crates/pg_analyser/src/registry.rs"); let categories = rules.into_values(); diff --git a/xtask/codegen/src/generate_configuration.rs b/xtask/codegen/src/generate_configuration.rs index 5eb67a9aa..ba54a3267 100644 --- a/xtask/codegen/src/generate_configuration.rs +++ b/xtask/codegen/src/generate_configuration.rs @@ -36,7 +36,7 @@ pub fn generate_rules_configuration(mode: Mode) -> Result<()> { let push_rules_directory = project_root().join("crates/pg_configuration/src/generated"); let mut lint_visitor = LintRulesVisitor::default(); - pg_linter::visit_registry(&mut lint_visitor); + pg_analyser::visit_registry(&mut lint_visitor); generate_for_groups( lint_visitor.groups, @@ -380,12 +380,12 @@ fn generate_for_groups( RuleCategory::Lint => { quote! { use crate::analyser::linter::*; - use pg_analyse::{AnalyzerRules, MetadataRegistry}; + use pg_analyse::{AnalyserRules, MetadataRegistry}; pub fn push_to_analyser_rules( rules: &Rules, metadata: &MetadataRegistry, - analyser_rules: &mut AnalyzerRules, + analyser_rules: &mut AnalyserRules, ) { #( if let Some(rules) = rules.#group_idents.as_ref() { @@ -404,12 +404,12 @@ fn generate_for_groups( RuleCategory::Action => { quote! { use crate::analyser::assists::*; - use pg_analyse::{AnalyzerRules, MetadataRegistry}; + use pg_analyse::{AnalyserRules, MetadataRegistry}; pub fn push_to_analyser_assists( rules: &Actions, metadata: &MetadataRegistry, - analyser_rules: &mut AnalyzerRules, + analyser_rules: &mut AnalyserRules, ) { #( if let Some(rules) = rules.#group_idents.as_ref() { @@ -534,7 +534,7 @@ fn generate_group_struct( #rule }); let rule_option_type = quote! { - pg_linter::options::#rule_name + pg_analyser::options::#rule_name }; let rule_option = if kind == RuleCategory::Action { quote! { Option<#rule_config_type<#rule_option_type>> } diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index 67f6da030..8760bc606 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -75,7 +75,7 @@ impl Rule for {rule_name_upper_camel} {{ pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) { let rule_name_camel = Case::Camel.convert(rule_name); - let crate_folder = project_root().join("crates/pg_linter"); + let crate_folder = project_root().join("crates/pg_analyser"); let rule_folder = match &category { Category::Lint => crate_folder.join(format!("src/lint/{group}")), }; diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index 3a0542357..c24bf7c52 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -54,7 +54,7 @@ pub fn to_capitalized(s: &str) -> String { #[derive(Debug, Clone, Bpaf)] #[bpaf(options)] pub enum TaskCommand { - /// Generate factory functions for the analyzer and the configuration of the analysers + /// Generate factory functions for the analyser and the configuration of the analysers #[bpaf(command)] Analyser, /// Generate the part of the configuration that depends on some metadata From a3be0d0f516fbca8a3943aaefa22d843f7efec85 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 3 Jan 2025 18:29:33 +0000 Subject: [PATCH 18/27] test and fix options --- Cargo.lock | 1 + crates/pg_analyse/src/rule.rs | 1 - crates/pg_analyser/Cargo.toml | 1 + .../src/lint/safety/ban_drop_column.rs | 11 +++++++-- crates/pg_workspace_new/src/settings.rs | 15 ++++++++++++ .../pg_workspace_new/src/workspace/server.rs | 24 ++++++++++++++++--- pglsp.toml | 12 ++++++++++ 7 files changed, 59 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eb0e1c121..d16f861ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2249,6 +2249,7 @@ dependencies = [ "pg_analyse", "pg_console", "pg_query_ext", + "serde", ] [[package]] diff --git a/crates/pg_analyse/src/rule.rs b/crates/pg_analyse/src/rule.rs index 34827f7cc..f159861db 100644 --- a/crates/pg_analyse/src/rule.rs +++ b/crates/pg_analyse/src/rule.rs @@ -95,7 +95,6 @@ pub trait Rule: RuleMeta + Sized { /// Diagnostic object returned by a single analysis rule #[derive(Debug, Diagnostic)] -#[diagnostic(severity = Warning)] pub struct RuleDiagnostic { #[category] pub(crate) category: &'static Category, diff --git a/crates/pg_analyser/Cargo.toml b/crates/pg_analyser/Cargo.toml index 9c56fbed7..6e3ce4c5e 100644 --- a/crates/pg_analyser/Cargo.toml +++ b/crates/pg_analyser/Cargo.toml @@ -15,3 +15,4 @@ version = "0.0.0" pg_analyse = { workspace = true } pg_console = { workspace = true } pg_query_ext = { workspace = true } +serde = { workspace = true } diff --git a/crates/pg_analyser/src/lint/safety/ban_drop_column.rs b/crates/pg_analyser/src/lint/safety/ban_drop_column.rs index fe081d00e..9f20227d0 100644 --- a/crates/pg_analyser/src/lint/safety/ban_drop_column.rs +++ b/crates/pg_analyser/src/lint/safety/ban_drop_column.rs @@ -1,6 +1,13 @@ use pg_analyse::{context::RuleContext, declare_lint_rule, Rule, RuleDiagnostic, RuleSource}; use pg_console::markup; +#[derive(Clone, Debug, Default, Eq, PartialEq, serde::Deserialize, serde::Serialize)] +// #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields, default)] +pub struct Options { + test: String, +} + declare_lint_rule! { /// Dropping a column may break existing clients. /// @@ -25,7 +32,7 @@ declare_lint_rule! { } impl Rule for BanDropColumn { - type Options = (); + type Options = Options; fn run(ctx: &RuleContext) -> Vec { let mut diagnostics = Vec::new(); @@ -40,7 +47,7 @@ impl Rule for BanDropColumn { markup! { "Dropping a column may break existing clients." }, - ).detail(None, "You can leave the column as nullable or delete the column once queries no longer select or modify the column.")); + ).detail(None, format!("[{}] You can leave the column as nullable or delete the column once queries no longer select or modify the column.", ctx.options().test))); } } } diff --git a/crates/pg_workspace_new/src/settings.rs b/crates/pg_workspace_new/src/settings.rs index 2eed36ac2..1950ef8d2 100644 --- a/crates/pg_workspace_new/src/settings.rs +++ b/crates/pg_workspace_new/src/settings.rs @@ -1,4 +1,5 @@ use biome_deserialize::StringSet; +use pg_diagnostics::Category; use std::{ borrow::Cow, num::NonZeroU64, @@ -110,6 +111,20 @@ impl Settings { pub fn as_linter_rules(&self) -> Option> { self.linter.rules.as_ref().map(Cow::Borrowed) } + + /// It retrieves the severity based on the `code` of the rule and the current configuration. + /// + /// The code of the has the following pattern: `{group}/{rule_name}`. + /// + /// It returns [None] if the `code` doesn't match any rule. + pub fn get_severity_from_rule_code(&self, code: &Category) -> Option { + let rules = self.linter.rules.as_ref(); + if let Some(rules) = rules { + rules.get_severity_from_code(code) + } else { + None + } + } } fn to_linter_settings( diff --git a/crates/pg_workspace_new/src/workspace/server.rs b/crates/pg_workspace_new/src/workspace/server.rs index 1d3b5c72b..2f70fbff3 100644 --- a/crates/pg_workspace_new/src/workspace/server.rs +++ b/crates/pg_workspace_new/src/workspace/server.rs @@ -317,14 +317,16 @@ impl Workspace for WorkspaceServer { .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; + let settings = self.settings(); + // create analyser for this run // first, collect enabled and disabled rules from the workspace settings - let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(self.settings().as_ref()) + let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings.as_ref()) .with_linter_rules(¶ms.only, ¶ms.skip) .finish(); // then, build a map that contains all options let options = AnalyserOptions { - rules: to_analyser_rules(self.settings().as_ref()), + rules: to_analyser_rules(settings.as_ref()), }; // next, build the analysis filter which will be used to match rules let filter = AnalysisFilter { @@ -359,9 +361,25 @@ impl Workspace for WorkspaceServer { stmt_diagnostics .into_iter() .map(|d| { + // We do now check if the severity of the diagnostics should be changed. + // The configuration allows to change the severity of the diagnostics emitted by rules. + let severity = d + .category() + .filter(|category| category.name().starts_with("lint/")) + .map_or_else( + || d.severity(), + |category| { + settings + .as_ref() + .get_severity_from_rule_code(category) + .unwrap_or(Severity::Warning) + }, + ); + SDiagnostic::new( d.with_file_path(params.path.as_path().display().to_string()) - .with_file_span(r), + .with_file_span(r) + .with_severity(severity), ) }) .collect::>() diff --git a/pglsp.toml b/pglsp.toml index 6055ed23e..7e6bcb8ee 100644 --- a/pglsp.toml +++ b/pglsp.toml @@ -12,3 +12,15 @@ port = 54322 username = "postgres" password = "postgres" database = "postgres" + +[linter] +enabled = true + +[linter.rules] +recommended = true + +[linter.rules.safety.banDropColumn] +level = "warn" +options = { test = "HELLO" } + + From 24f6e40f03485c90b22306d3a7e4f542820ebaa1 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 3 Jan 2025 19:12:26 +0000 Subject: [PATCH 19/27] add rules_check script --- Cargo.lock | 17 ++ Cargo.toml | 2 +- crates/pg_query_ext/Cargo.toml | 2 + crates/pg_query_ext/src/diagnostics.rs | 25 ++ crates/pg_query_ext/src/lib.rs | 1 + .../src/workspace/server/pg_query.rs | 24 +- justfile | 2 +- xtask/rules_check/Cargo.toml | 17 ++ xtask/rules_check/src/lib.rs | 234 ++++++++++++++++++ xtask/rules_check/src/main.rs | 5 + 10 files changed, 304 insertions(+), 25 deletions(-) create mode 100644 crates/pg_query_ext/src/diagnostics.rs create mode 100644 xtask/rules_check/Cargo.toml create mode 100644 xtask/rules_check/src/lib.rs create mode 100644 xtask/rules_check/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index d16f861ec..0e86f9ca7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2577,9 +2577,11 @@ name = "pg_query_ext" version = "0.0.0" dependencies = [ "petgraph", + "pg_diagnostics", "pg_lexer", "pg_query", "pg_query_ext_codegen", + "text-size", ] [[package]] @@ -3185,6 +3187,21 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rules_check" +version = "0.0.0" +dependencies = [ + "anyhow", + "pg_analyse", + "pg_analyser", + "pg_console", + "pg_diagnostics", + "pg_query_ext", + "pg_statement_splitter", + "pg_workspace_new", + "pulldown-cmark", +] + [[package]] name = "rustc-demangle" version = "0.1.24" diff --git a/Cargo.toml b/Cargo.toml index a79687421..61d6e2c34 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["crates/*", "lib/*", "xtask/codegen"] +members = ["crates/*", "lib/*", "xtask/codegen", "xtask/rules_check"] resolver = "2" [workspace.package] diff --git a/crates/pg_query_ext/Cargo.toml b/crates/pg_query_ext/Cargo.toml index 8e9868e9d..4ef2ef879 100644 --- a/crates/pg_query_ext/Cargo.toml +++ b/crates/pg_query_ext/Cargo.toml @@ -15,8 +15,10 @@ version = "0.0.0" petgraph = "0.6.4" pg_query = "0.8" +pg_diagnostics.workspace = true pg_lexer.workspace = true pg_query_ext_codegen.workspace = true +text-size.workspace = true [lib] doctest = false diff --git a/crates/pg_query_ext/src/diagnostics.rs b/crates/pg_query_ext/src/diagnostics.rs new file mode 100644 index 000000000..2096f9cf7 --- /dev/null +++ b/crates/pg_query_ext/src/diagnostics.rs @@ -0,0 +1,25 @@ +use pg_diagnostics::{Diagnostic, MessageAndDescription}; +use text_size::TextRange; + +/// A specialized diagnostic for the libpg_query parser. +/// +/// Parser diagnostics are always **errors**. +#[derive(Clone, Debug, Diagnostic)] +#[diagnostic(category = "syntax", severity = Error)] +pub struct SyntaxDiagnostic { + /// The location where the error is occurred + #[location(span)] + span: Option, + #[message] + #[description] + pub message: MessageAndDescription, +} + +impl From for SyntaxDiagnostic { + fn from(err: pg_query::Error) -> Self { + SyntaxDiagnostic { + span: None, + message: MessageAndDescription::from(err.to_string()), + } + } +} diff --git a/crates/pg_query_ext/src/lib.rs b/crates/pg_query_ext/src/lib.rs index 92a9bc8c4..8cbbd2d7d 100644 --- a/crates/pg_query_ext/src/lib.rs +++ b/crates/pg_query_ext/src/lib.rs @@ -10,6 +10,7 @@ //! - `get_nodes` to get all the nodes in the AST as a petgraph tree //! - `ChildrenIterator` to iterate over the children of a node mod codegen; +pub mod diagnostics; pub use pg_query::protobuf; pub use pg_query::{Error, NodeEnum, Result}; diff --git a/crates/pg_workspace_new/src/workspace/server/pg_query.rs b/crates/pg_workspace_new/src/workspace/server/pg_query.rs index 8751c17ed..e14e0a270 100644 --- a/crates/pg_workspace_new/src/workspace/server/pg_query.rs +++ b/crates/pg_workspace_new/src/workspace/server/pg_query.rs @@ -2,6 +2,7 @@ use std::sync::Arc; use dashmap::DashMap; use pg_diagnostics::{serde::Diagnostic as SDiagnostic, Diagnostic, MessageAndDescription}; +use pg_query_ext::diagnostics::*; use text_size::TextRange; use super::{ @@ -10,34 +11,11 @@ use super::{ store::Store, }; -/// A specialized diagnostic for the libpg_query parser. -/// -/// Parser diagnostics are always **errors**. -#[derive(Clone, Debug, Diagnostic)] -#[diagnostic(category = "syntax", severity = Error)] -pub struct SyntaxDiagnostic { - /// The location where the error is occurred - #[location(span)] - span: Option, - #[message] - #[description] - pub message: MessageAndDescription, -} - pub struct PgQueryStore { ast_db: DashMap>, diagnostics: DashMap, } -impl From for SyntaxDiagnostic { - fn from(err: pg_query_ext::Error) -> Self { - SyntaxDiagnostic { - span: None, - message: MessageAndDescription::from(err.to_string()), - } - } -} - impl PgQueryStore { pub fn new() -> PgQueryStore { PgQueryStore { diff --git a/justfile b/justfile index 78029b20d..a4455fb14 100644 --- a/justfile +++ b/justfile @@ -36,7 +36,7 @@ gen-lint: cargo run -p xtask_codegen -- configuration # cargo codegen-migrate # just gen-bindings - # cargo run -p rules_check + cargo run -p rules_check just format # Generates the linter documentation and Rust documentation diff --git a/xtask/rules_check/Cargo.toml b/xtask/rules_check/Cargo.toml new file mode 100644 index 000000000..f436855fd --- /dev/null +++ b/xtask/rules_check/Cargo.toml @@ -0,0 +1,17 @@ +[package] +description = "Internal script to make sure that the metadata or the rules are correct" +edition = "2021" +name = "rules_check" +publish = false +version = "0.0.0" + +[dependencies] +anyhow = { workspace = true } +pg_analyse = { workspace = true } +pg_analyser = { workspace = true } +pg_console = { workspace = true } +pg_diagnostics = { workspace = true } +pg_query_ext = { workspace = true } +pg_statement_splitter = { workspace = true } +pg_workspace_new = { workspace = true } +pulldown-cmark = "0.12.2" diff --git a/xtask/rules_check/src/lib.rs b/xtask/rules_check/src/lib.rs new file mode 100644 index 000000000..ed589252e --- /dev/null +++ b/xtask/rules_check/src/lib.rs @@ -0,0 +1,234 @@ +use std::collections::BTreeMap; +use std::str::FromStr; +use std::{fmt::Write, slice}; + +use anyhow::bail; +use pg_analyse::{ + AnalyserOptions, AnalysisFilter, GroupCategory, RegistryVisitor, Rule, RuleCategory, + RuleFilter, RuleGroup, RuleMetadata, +}; +use pg_analyser::{Analyser, AnalyserConfig}; +use pg_console::{markup, Console}; +use pg_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; +use pg_query_ext::diagnostics::SyntaxDiagnostic; +use pg_workspace_new::settings::Settings; +use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag, TagEnd}; + +pub fn check_rules() -> anyhow::Result<()> { + #[derive(Default)] + struct LintRulesVisitor { + groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, + } + + impl LintRulesVisitor { + fn push_rule(&mut self) + where + R: Rule + 'static, + { + self.groups + .entry(::NAME) + .or_default() + .insert(R::METADATA.name, R::METADATA); + } + } + + impl RegistryVisitor for LintRulesVisitor { + fn record_category(&mut self) { + if matches!(C::CATEGORY, RuleCategory::Lint) { + C::record_groups(self); + } + } + + fn record_rule(&mut self) + where + R: Rule + 'static, + { + self.push_rule::() + } + } + + let mut visitor = LintRulesVisitor::default(); + pg_analyser::visit_registry(&mut visitor); + + let LintRulesVisitor { groups } = visitor; + + for (group, rules) in groups { + for (_, meta) in rules { + parse_documentation(group, meta.name, meta.docs)?; + } + } + + Ok(()) +} + +/// Parse and analyze the provided code block, and asserts that it emits +/// exactly zero or one diagnostic depending on the value of `expect_diagnostic`. +/// That diagnostic is then emitted as text into the `content` buffer +fn assert_lint( + group: &'static str, + rule: &'static str, + test: &CodeBlockTest, + code: &str, +) -> anyhow::Result<()> { + let file_path = format!("code-block.{}", test.tag); + let mut diagnostic_count = 0; + let mut all_diagnostics = vec![]; + let mut has_error = false; + let mut write_diagnostic = |code: &str, diag: pg_diagnostics::Error| { + all_diagnostics.push(diag); + // Fail the test if the analysis returns more diagnostics than expected + if test.expect_diagnostic { + // Print all diagnostics to help the user + if all_diagnostics.len() > 1 { + let mut console = pg_console::EnvConsole::default(); + for diag in all_diagnostics.iter() { + console.println( + pg_console::LogLevel::Error, + markup! { + {PrintDiagnostic::verbose(diag)} + }, + ); + } + has_error = true; + bail!("Analysis of '{group}/{rule}' on the following code block returned multiple diagnostics.\n\n{code}"); + } + } else { + // Print all diagnostics to help the user + let mut console = pg_console::EnvConsole::default(); + for diag in all_diagnostics.iter() { + console.println( + pg_console::LogLevel::Error, + markup! { + {PrintDiagnostic::verbose(diag)} + }, + ); + } + has_error = true; + bail!("Analysis of '{group}/{rule}' on the following code block returned an unexpected diagnostic.\n\n{code}"); + } + diagnostic_count += 1; + Ok(()) + }; + + if test.ignore { + return Ok(()); + } + + let rule_filter = RuleFilter::Rule(group, rule); + let filter = AnalysisFilter { + enabled_rules: Some(slice::from_ref(&rule_filter)), + ..AnalysisFilter::default() + }; + let settings = Settings::default(); + let options = AnalyserOptions::default(); + let analyser = Analyser::new(AnalyserConfig { + options: &options, + filter, + }); + + // split and parse each statement + let stmts = pg_statement_splitter::split(code); + for stmt in stmts.ranges { + match pg_query_ext::parse(&code[stmt]) { + Ok(ast) => { + for rule_diag in analyser.run(pg_analyser::AnalyserContext { root: &ast }) { + let diag = pg_diagnostics::serde::Diagnostic::new(rule_diag); + + let category = diag.category().expect("linter diagnostic has no code"); + let severity = settings.get_severity_from_rule_code(category).expect( + "If you see this error, it means you need to run cargo codegen-configuration", + ); + + let error = diag + .with_severity(severity) + .with_file_path(&file_path) + .with_file_source_code(code); + + write_diagnostic(code, error)?; + } + } + Err(e) => { + let error = SyntaxDiagnostic::from(e) + .with_file_path(&file_path) + .with_file_source_code(code); + write_diagnostic(code, error)?; + } + }; + } + + Ok(()) +} + +struct CodeBlockTest { + tag: String, + expect_diagnostic: bool, + ignore: bool, +} + +impl FromStr for CodeBlockTest { + type Err = anyhow::Error; + + fn from_str(input: &str) -> anyhow::Result { + // This is based on the parsing logic for code block languages in `rustdoc`: + // https://github.com/rust-lang/rust/blob/6ac8adad1f7d733b5b97d1df4e7f96e73a46db42/src/librustdoc/html/markdown.rs#L873 + let tokens = input + .split([',', ' ', '\t']) + .map(str::trim) + .filter(|token| !token.is_empty()); + + let mut test = CodeBlockTest { + tag: String::new(), + expect_diagnostic: false, + ignore: false, + }; + + for token in tokens { + match token { + // Other attributes + "expect_diagnostic" => test.expect_diagnostic = true, + "ignore" => test.ignore = true, + // Regard as language tags, last one wins + _ => test.tag = token.to_string(), + } + } + + Ok(test) + } +} + +/// Parse the documentation fragment for a lint rule (in markdown) and lint the code blcoks. +fn parse_documentation( + group: &'static str, + rule: &'static str, + docs: &'static str, +) -> anyhow::Result<()> { + let parser = Parser::new(docs); + + // Tracks the content of the current code block if it's using a + // language supported for analysis + let mut language = None; + for event in parser { + match event { + // CodeBlock-specific handling + Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(meta))) => { + // Track the content of code blocks to pass them through the analyser + let test = CodeBlockTest::from_str(meta.as_ref())?; + language = Some((test, String::new())); + } + Event::End(TagEnd::CodeBlock) => { + if let Some((test, block)) = language.take() { + assert_lint(group, rule, &test, &block)?; + } + } + Event::Text(text) => { + if let Some((_, block)) = &mut language { + write!(block, "{text}")?; + } + } + // We don't care other events + _ => {} + } + } + + Ok(()) +} diff --git a/xtask/rules_check/src/main.rs b/xtask/rules_check/src/main.rs new file mode 100644 index 000000000..1de34236f --- /dev/null +++ b/xtask/rules_check/src/main.rs @@ -0,0 +1,5 @@ +use rules_check::check_rules; + +fn main() -> anyhow::Result<()> { + check_rules() +} From dcfc9a54628a77c71e5f01ff75cbf5d0d9d651b1 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 3 Jan 2025 19:32:07 +0000 Subject: [PATCH 20/27] chore: add contribution guide for analyser --- crates/pg_analyser/CONTRIBUTING.md | 332 +++++++++++++++++++++++++++++ justfile | 2 +- 2 files changed, 333 insertions(+), 1 deletion(-) create mode 100644 crates/pg_analyser/CONTRIBUTING.md diff --git a/crates/pg_analyser/CONTRIBUTING.md b/crates/pg_analyser/CONTRIBUTING.md new file mode 100644 index 000000000..200be87cd --- /dev/null +++ b/crates/pg_analyser/CONTRIBUTING.md @@ -0,0 +1,332 @@ +# Analyser + +## Creating a rule + +When creating or updating a lint rule, you need to be aware that there's a lot of generated code inside our toolchain. +Our CI ensures that this code is not out of sync and fails otherwise. +See the [code generation section](#code-generation) for more details. + +To create a new rule, you have to create and update several files. +Because it is a bit tedious, we provide an easy way to create and test your rule using [Just](https://just.systems/man/en/). +_Just_ is not part of the rust toolchain, you have to install it with [a package manager](https://just.systems/man/en/chapter_4.html). + +### Choose a name + +We follow a naming convention according to what the rule does: + +1. Forbid a concept + + ```block + no + ``` + + When a rule's sole intention is to **forbid a single concept** the rule should be named using the `no` prefix. + +1. Mandate a concept + + ```block + use + ``` + + When a rule's sole intention is to **mandate a single concept** the rule should be named using the `use` prefix. + +### Explain a rule to the user + +A rule should be informative to the user, and give as much explanation as possible. + +When writing a rule, you must adhere to the following **pillars**: +1. Explain to the user the error. Generally, this is the message of the diagnostic. +1. Explain to the user **why** the error is triggered. Generally, this is implemented with an additional node. +1. Tell the user what they should do. Generally, this is implemented using a code action. If a code action is not applicable a note should tell the user what they should do to fix the error. + +### Create and implement the rule + +> [!TIP] +> As a developer, you aren't forced to make a rule perfect in one PR. Instead, you are encouraged to lay out a plan and to split the work into multiple PRs. +> +> If you aren't familiar with the APIs, this is an option that you have. If you decide to use this option, you should make sure to describe your plan in an issue. + +Let's say we want to create a new **lint** rule called `useMyRuleName`, follow these steps: + +1. Run the command + + ```shell + just new-lintrule safety useMyRuleName + ``` + The script will generate a bunch of files inside the `pg_analyser` crate. + Among the other files, you'll find a file called `use_my_new_rule_name.rs` inside the `pg_analyser/lib/src/lint/safety` folder. You'll implement your rule in this file. + +1. The `Option` type doesn't have to be used, so it can be considered optional. However, it has to be defined as `type Option = ()`. +1. Implement the `run` function: The function is called for every statement, and should return zero or more diagnostics. Follow the [pillars](#explain-a-rule-to-the-user) when writing the message of a diagnostic + +Don't forget to format your code with `just f` and lint with `just l`. + +That's it! Now, let's test the rule. + +### Rule configuration + +Some rules may allow customization using options. +We try to keep rule options to a minimum and only when needed. +Before adding an option, it's worth a discussion. + +Let's assume that the rule we implement support the following options: + +- `behavior`: a string among `"A"`, `"B"`, and `"C"`; +- `threshold`: an integer between 0 and 255; +- `behaviorExceptions`: an array of strings. + +We would like to set the options in the `pglsp.toml` configuration file: + +```toml +[linter.rules.safety.myRule] +level = "warn" +options = { + behavior = "A" + threshold = 20 + behaviorExceptions = ["one", "two"] +} +``` + +The first step is to create the Rust data representation of the rule's options. + +```rust +#[derive(Clone, Debug, Default)] +pub struct MyRuleOptions { + behavior: Behavior, + threshold: u8, + behavior_exceptions: Box<[Box]> +} + +#[derive(Clone, Debug, Defaul)] +pub enum Behavior { + #[default] + A, + B, + C, +} +``` + +Note that we use a boxed slice `Box<[Box]>` instead of `Vec`. +This allows saving memory: [boxed slices and boxed str use 2 words instead of three words](https://nnethercote.github.io/perf-book/type-sizes.html#boxed-slices). + +With these types in place, you can set the associated type `Options` of the rule: + +```rust +impl Rule for MyRule { + type Options = MyRuleOptions; +} +``` + +A rule can retrieve its options with: + +```rust +let options = ctx.options(); +``` + +The compiler should warn you that `MyRuleOptions` does not implement some required types. +We currently require implementing _serde_'s traits `Deserialize`/`Serialize`. + +Also, we use other `serde` macros to adjust the JSON configuration: +- `rename_all = "snake_case"`: it renames all fields in camel-case, so they are in line with the naming style of the `pglsp.toml`. +- `deny_unknown_fields`: it raises an error if the configuration contains extraneous fields. +- `default`: it uses the `Default` value when the field is missing from `pglsp.toml`. This macro makes the field optional. + +You can simply use a derive macros: + +```rust +#[derive(Debug, Default, Clone, Serialize, Deserialize)] +#[cfg_attr(feature = "schemars", derive(JsonSchema))] +#[serde(rename_all = "snake_case", deny_unknown_fields, default)] +pub struct MyRuleOptions { + #[serde(default, skip_serializing_if = "is_default")] + main_behavior: Behavior, + + #[serde(default, skip_serializing_if = "is_default")] + extra_behaviors: Vec, +} + +#[derive(Debug, Default, Clone)] +#[cfg_attr(feature = "schemars", derive(JsonSchema))] +pub enum Behavior { + #[default] + A, + B, + C, +} +``` + +### Coding the rule + +Below, there are many tips and guidelines on how to create a lint rule using our infrastructure. + + +#### `declare_lint_rule` + +This macro is used to declare an analyzer rule type, and implement the [RuleMeta] trait for it. + +The macro itself expects the following syntax: + +```rust +use pg_analyse::declare_lint_rule; + +declare_lint_rule! { + /// Documentation + pub(crate) ExampleRule { + version: "next", + name: "myRuleName", + recommended: false, + } +} +``` + +##### Lint rules inspired by other lint rules + +If a **lint** rule is inspired by an existing rule from other ecosystems (Squawk etc.), you can add a new metadata to the macro called `source`. Its value is `&'static [RuleSource]`, which is a reference to a slice of `RuleSource` elements, each representing a different source. + +If you're implementing a lint rule that matches the behaviour of the Squawk rule `ban-drop-column`, you'll use the variant `::Squawk` and pass the name of the rule: + +```rust +use pg_analyse::{declare_lint_rule, RuleSource}; + +declare_lint_rule! { + /// Documentation + pub(crate) ExampleRule { + version: "next", + name: "myRuleName", + recommended: false, + sources: &[RuleSource::Squawk("ban-drop-column")], + } +} +``` + +#### Category Macro + +Declaring a rule using `declare_lint_rule!` will cause a new `rule_category!` +macro to be declared in the surrounding module. This macro can be used to +refer to the corresponding diagnostic category for this lint rule, if it +has one. Using this macro instead of getting the category for a diagnostic +by dynamically parsing its string name has the advantage of statically +injecting the category at compile time and checking that it is correctly +registered to the `pg_diagnostics` library. + +```rust +declare_lint_rule! { + /// Documentation + pub(crate) ExampleRule { + version: "next", + name: "myRuleName", + recommended: false, + } +} + +impl Rule for BanDropColumn { + type Options = Options; + + fn run(ctx: &RuleContext) -> Vec { + vec![RuleDiagnostic::new( + rule_category!(), + None, + "message", + )] + } +} +``` + +### Document the rule + +The documentation needs to adhere to the following rules: +- The **first** paragraph of the documentation is used as brief description of the rule, and it **must** be written in one single line. Breaking the paragraph in multiple lines will break the table content of the rules page. +- The next paragraphs can be used to further document the rule with as many details as you see fit. +- The documentation must have a `## Examples` header, followed by two headers: `### Invalid` and `### Valid`. `### Invalid` must go first because we need to show when the rule is triggered. +- Rule options if any, must be documented in the `## Options` section. +- Each code block must have `sql` set as language defined. +- When adding _invalid_ snippets in the `### Invalid` section, you must use the `expect_diagnostic` code block property. We use this property to generate a diagnostic and attach it to the snippet. A snippet **must emit only ONE diagnostic**. +- When adding _valid_ snippets in the `### Valid` section, you can use one single snippet. +- You can use the code block property `ignore` to tell the code generation script to **not generate a diagnostic for an invalid snippet**. + +Here's an example of how the documentation could look like: + +```rust +declare_lint_rule! { + /// Dropping a column may break existing clients. + /// + /// Update your application code to no longer read or write the column. + /// + /// You can leave the column as nullable or delete the column once queries no longer select or modify the column. + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```sql,expect_diagnostic + /// alter table test drop column id; + /// ``` + /// + pub BanDropColumn { + version: "next", + name: "banDropColumn", + recommended: true, + sources: &[RuleSource::Squawk("ban-drop-column")], + } +} +``` + +This will cause the documentation generator to ensure the rule does emit +exactly one diagnostic for this code, and to include a snapshot for the +diagnostic in the resulting documentation page. + +### Code generation + +For simplicity, use `just` to run all the commands with: + +```shell +just gen-lint +``` + +### Commit your work + +Once the rule implemented, tested, and documented, you are ready to open a pull request! + +Stage and commit your changes: + +```shell +> git add -A +> git commit -m 'feat(pg_analyser): myRuleName' +``` + + +### Deprecate a rule + +There are occasions when a rule must be deprecated, to avoid breaking changes. The reason +of deprecation can be multiple. + +In order to do, the macro allows adding additional field to add the reason for deprecation + +```rust +use pg_analyse::declare_lint_rule; + +declare_lint_rule! { + /// Dropping a column may break existing clients. + /// + /// Update your application code to no longer read or write the column. + /// + /// You can leave the column as nullable or delete the column once queries no longer select or modify the column. + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```sql,expect_diagnostic + /// alter table test drop column id; + /// ``` + /// + pub BanDropColumn { + version: "next", + name: "banDropColumn", + recommended: true, + deprecated: true, + sources: &[RuleSource::Squawk("ban-drop-column")], + } +} +``` + diff --git a/justfile b/justfile index a4455fb14..fb81c8ec7 100644 --- a/justfile +++ b/justfile @@ -4,7 +4,7 @@ _default: alias f := format alias t := test # alias r := ready -# alias l := lint +alias l := lint # alias qt := test-quick # Installs the tools needed to develop From 272521fa8ef007f6c060e9dbafcae303a081ad3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20Steinr=C3=B6tter?= Date: Fri, 3 Jan 2025 21:45:05 +0000 Subject: [PATCH 21/27] Update .env Co-authored-by: Julian Domke <68325451+juleswritescode@users.noreply.github.com> --- .env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env b/.env index 88b3b55c5..dda717435 100644 --- a/.env +++ b/.env @@ -1 +1 @@ -DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:54322/postgres +DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:5432/postgres From 5cff1583feae7e7932073e036b118aff030e07ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20Steinr=C3=B6tter?= Date: Fri, 3 Jan 2025 21:46:17 +0000 Subject: [PATCH 22/27] Update crates/pg_analyse/src/context.rs Co-authored-by: Julian Domke <68325451+juleswritescode@users.noreply.github.com> --- crates/pg_analyse/src/context.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/pg_analyse/src/context.rs b/crates/pg_analyse/src/context.rs index 82e1f7c25..4e43b1dfd 100644 --- a/crates/pg_analyse/src/context.rs +++ b/crates/pg_analyse/src/context.rs @@ -43,7 +43,6 @@ where /// pub(crate) Foo { /// version: "0.0.0", /// name: "foo", - /// language: "js", /// recommended: true, /// } /// } From 89296baddb598350c1848693e619db7bb6895f5e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20Steinr=C3=B6tter?= Date: Fri, 3 Jan 2025 20:46:41 -0100 Subject: [PATCH 23/27] Update xtask/codegen/src/lib.rs Co-authored-by: Julian Domke <68325451+juleswritescode@users.noreply.github.com> --- xtask/codegen/src/lib.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index c24bf7c52..bfdb156d8 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -22,11 +22,8 @@ pub enum UpdateResult { /// A helper to update file on disk if it has changed. /// With verify = false, pub fn update(path: &Path, contents: &str, mode: &Mode) -> Result { - match fs2::read_to_string(path) { - Ok(old_contents) if old_contents == contents => { - return Ok(UpdateResult::NotUpdated); - } - _ => (), + if fs2::read_to_string(path).is_ok_and(|old_contents| old_contents == contents) { + return Ok(UpdateResult::NotUpdated); } if *mode == Mode::Verify { From b9fc6eb39c547d0f61f0b3b18dab43d00e9591b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20Steinr=C3=B6tter?= Date: Fri, 3 Jan 2025 20:46:52 -0100 Subject: [PATCH 24/27] Update xtask/codegen/src/lib.rs Co-authored-by: Julian Domke <68325451+juleswritescode@users.noreply.github.com> --- xtask/codegen/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index bfdb156d8..8cf075901 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -20,7 +20,8 @@ pub enum UpdateResult { } /// A helper to update file on disk if it has changed. -/// With verify = false, +/// With verify = false, the contents of the file will be updated to the passed in contents. +/// With verify = true, an Err will be returned if the contents of the file do not match the passed-in contents. pub fn update(path: &Path, contents: &str, mode: &Mode) -> Result { if fs2::read_to_string(path).is_ok_and(|old_contents| old_contents == contents) { return Ok(UpdateResult::NotUpdated); From 0dc1f982f313953e38511ca882ed6b78be225c50 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 3 Jan 2025 20:47:57 -0100 Subject: [PATCH 25/27] chore: add merge-main script --- justfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/justfile b/justfile index fb81c8ec7..11d6748f8 100644 --- a/justfile +++ b/justfile @@ -137,3 +137,6 @@ clear-branches: reset-git: git checkout main && git pull && pnpm run clear-branches + +merge-main: + git fetch origin main:main && git merge main From 65dbd0908d82064c53906f889239ae92c48fb1dc Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 3 Jan 2025 20:51:58 -0100 Subject: [PATCH 26/27] chore: remove unstable is_none_or --- crates/pg_schema_cache/src/schema_cache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/pg_schema_cache/src/schema_cache.rs b/crates/pg_schema_cache/src/schema_cache.rs index 8d73e6319..6deac6f11 100644 --- a/crates/pg_schema_cache/src/schema_cache.rs +++ b/crates/pg_schema_cache/src/schema_cache.rs @@ -66,7 +66,7 @@ impl SchemaCache { self.columns.iter().find(|c| { c.name.as_str() == name && c.table_name.as_str() == table - && schema.is_none_or(|s| s == c.schema_name.as_str()) + && schema.map_or(true, |s| s == c.schema_name.as_str()) }) } From 87f3a86d0c6c05ab0b084525b1e3709ae3002c00 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 3 Jan 2025 21:03:44 -0100 Subject: [PATCH 27/27] fix: docs test --- crates/pg_analyse/src/categories.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/pg_analyse/src/categories.rs b/crates/pg_analyse/src/categories.rs index 4f8dc4481..11117e25a 100644 --- a/crates/pg_analyse/src/categories.rs +++ b/crates/pg_analyse/src/categories.rs @@ -55,7 +55,7 @@ impl ActionCategory { /// /// ``` /// use std::borrow::Cow; - /// use biome_analyze::{ActionCategory, RefactorKind}; + /// use pg_analyse::{ActionCategory, RefactorKind}; /// /// assert!(ActionCategory::QuickFix(Cow::from("quickfix")).matches("quickfix")); ///