From 7ed42b631aecb66df3ff9de9d2e4d780a9615e81 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Thu, 13 Feb 2025 17:57:53 +0100 Subject: [PATCH 01/14] chore(docs): initial commit --- docs/index.md | 88 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 docs/index.md diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 000000000..8365e19d9 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,88 @@ +![Postgres Language Server](images/pls-github.png) + +# Postgres Language Server + +A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling. + +--- + +**Source Code**: https://github.com/supabase-community/postgres_lsp + +--- + +## Overview + +This project provides a toolchain for Postgres development, built on Postgres' own parser `libpg_query` to ensure 100% syntax compatibility. It is built on a Server-Client architecture with a transport-agnostic design. This means all features can be accessed not only through the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/), but also through other interfaces like a CLI, HTTP APIs, or a WebAssembly module. The goal is to make all the great Postgres tooling out there as accessible as possible, and to build anything that is missing ourselves. + +Currently, the following features are implemented: +- Autocompletion +- Syntax Error Highlighting +- Type-checking (via `EXPLAIN` error insights) +- Linter, inspired by [Squawk](https://squawkhq.com) + +Our current focus is on refining and enhancing these core features while building a robust and easily accessible infrastructure. For future plans and opportunities to contribute, please check out the issues and discussions. Any contributions are welcome! + +## Installation + +> [!NOTE] +> We will update this section once we have published the binaries. + +## Configuration + +We recommend that you create a `pglt.toml` configuration file for each project. This eliminates the need to repeat the CLI options each time you run a command, and ensures that we use the same configuration in your editor. Some options are also only available from a configuration file. If you are happy with the defaults, you don’t need to create a configuration file. To create the `pglt.toml` file, run the `init` command in the root folder of your project: + +```sh +pglt init +``` + +After running the `init` command, you’ll have a new `pglt.toml` file in your directory: + +```toml +[vcs] +enabled = false +client_kind = "git" +use_ignore_file = false + +[files] +ignore = [] + +[linter] +enabled = true + +[linter.rules] +recommended = true + +[db] +host = "127.0.0.1" +port = 5432 +username = "postgres" +password = "postgres" +database = "postgres" +conn_timeout_secs = 10 +``` + +Make sure to point the database connection settings at your local development database. To see what else can be configured, run `--help`. + +## Usage + +You can check SQL files using the `check` command: + +```sh +pglt check myfile.sql +``` + +Make sure to check out the other options. We will provide guides for specific use cases like linting migration files soon. + +## Install an Editor Plugin + +We recommend installing an editor plugin to get the most out of Postgres Language Tools. + +> [!NOTE] +> We will update this section once we have published the binaries. + + +## CI Setup + +> [!NOTE] +> We will update this section once we have published the binaries. + From 9c200d4493115d4bb77aba8c10a3ab8c491277e2 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 14 Feb 2025 09:28:41 +0100 Subject: [PATCH 02/14] feat(docs): poc for codegen --- Cargo.lock | 13 + Cargo.toml | 7 +- docs/cli_reference.md | 289 ++++++++++++++++++++++ docs/codegen/Cargo.toml | 23 ++ docs/codegen/src/cli_doc.rs | 17 ++ docs/codegen/src/default_configuration.rs | 22 ++ docs/codegen/src/env_variables.rs | 44 ++++ docs/codegen/src/lib.rs | 4 + docs/codegen/src/main.rs | 22 ++ docs/codegen/src/utils.rs | 15 ++ docs/env_variables.md | 19 ++ docs/index.md | 4 + 12 files changed, 475 insertions(+), 4 deletions(-) create mode 100644 docs/cli_reference.md create mode 100644 docs/codegen/Cargo.toml create mode 100644 docs/codegen/src/cli_doc.rs create mode 100644 docs/codegen/src/default_configuration.rs create mode 100644 docs/codegen/src/env_variables.rs create mode 100644 docs/codegen/src/lib.rs create mode 100644 docs/codegen/src/main.rs create mode 100644 docs/codegen/src/utils.rs create mode 100644 docs/env_variables.md diff --git a/Cargo.lock b/Cargo.lock index b133b902e..1f5f23cec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -975,6 +975,19 @@ dependencies = [ "syn 2.0.90", ] +[[package]] +name = "docs_codegen" +version = "0.0.0" +dependencies = [ + "anyhow", + "bpaf", + "pglt_cli", + "pglt_configuration", + "pglt_flags", + "regex", + "toml", +] + [[package]] name = "dotenv" version = "0.15.0" diff --git a/Cargo.toml b/Cargo.toml index 902ef8902..674dfa518 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["crates/*", "lib/*", "xtask/codegen", "xtask/rules_check"] +members = ["crates/*", "lib/*", "xtask/codegen", "xtask/rules_check", "docs/codegen"] resolver = "2" [workspace.package] @@ -28,6 +28,7 @@ pg_query = "6.0.0" proc-macro2 = "1.0.66" quote = "1.0.33" rayon = "1.10.0" +regex = "1.11.1" rustc-hash = "2.0.0" schemars = { version = "0.8.21", features = ["indexmap2", "smallvec"] } serde = "1.0.195" @@ -79,10 +80,8 @@ pglt_typecheck = { path = "./crates/pglt_typecheck", version = "0.0 pglt_workspace = { path = "./crates/pglt_workspace", version = "0.0.0" } pglt_test_utils = { path = "./crates/pglt_test_utils" } -# parser = { path = "./crates/parser", version = "0.0.0" } -# sql_parser = { path = "./crates/sql_parser", version = "0.0.0" } -# sql_parser_codegen = { path = "./crates/sql_parser_codegen", version = "0.0.0" } +docs_codegen = { path = "./docs/codegen", version = "0.0.0" } [profile.dev.package] insta.opt-level = 3 diff --git a/docs/cli_reference.md b/docs/cli_reference.md new file mode 100644 index 000000000..a4248d3c8 --- /dev/null +++ b/docs/cli_reference.md @@ -0,0 +1,289 @@ +## CLI Reference + +[//]: # (BEGIN CLI_REF) + + + +# Command summary + + * [`pglt`↴](#pglt) + * [`pglt version`↴](#pglt-version) + * [`pglt check`↴](#pglt-check) + * [`pglt start`↴](#pglt-start) + * [`pglt stop`↴](#pglt-stop) + * [`pglt init`↴](#pglt-init) + * [`pglt lsp-proxy`↴](#pglt-lsp-proxy) + * [`pglt clean`↴](#pglt-clean) + +## pglt + +PgLT official CLI. Use it to check the health of your project or run it to check single files. + +**Usage**: **`pglt`** _`COMMAND ...`_ + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information +- **`-V`**, **`--version`** — + Prints version information + + + +**Available commands:** +- **`version`** — + Shows the version information and quit. +- **`check`** — + Runs everything to the requested files. +- **`start`** — + Starts the daemon server process. +- **`stop`** — + Stops the daemon server process. +- **`init`** — + Bootstraps a new project. Creates a configuration file with some defaults. +- **`lsp-proxy`** — + Acts as a server for the Language Server Protocol over stdin/stdout. +- **`clean`** — + Cleans the logs emitted by the daemon. + + +## pglt version + +Shows the version information and quit. + +**Usage**: **`pglt`** **`version`** + +**Global options applied to all commands** +- **` --colors`**=_``_ — + Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible +- **` --use-server`** — + Connect to a running instance of the daemon server. +- **` --skip-db`** — + Skip connecting to the database and only run checks that don't require a database connection. +- **` --verbose`** — + Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified. +- **` --config-path`**=_`PATH`_ — + Set the file path to the configuration file, or the directory path to find `pglt.toml`. If used, it disables the default configuration file resolution. +- **` --max-diagnostics`**=_`>`_ — + Cap the amount of diagnostics displayed. When `none` is provided, the limit is lifted. + + [default: 20] +- **` --skip-errors`** — + Skip over files containing syntax errors instead of emitting an error diagnostic. +- **` --no-errors-on-unmatched`** — + Silence errors that would be emitted in case no files were processed during the execution of the command. +- **` --error-on-warnings`** — + Tell PGLSP to exit with an error code if some diagnostics emit warnings. +- **` --reporter`**=_``_ — + Allows to change how diagnostics and summary are reported. +- **` --log-level`**=_``_ — + The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error. + + The value `none` won't show any logging. + + [default: none] +- **` --log-kind`**=_``_ — + How the log should look like. + + [default: pretty] +- **` --diagnostic-level`**=_``_ — + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PGLSP to print only diagnostics that contain only errors. + + [default: info] + + + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt check + +Runs everything to the requested files. + +**Usage**: **`pglt`** **`check`** \[**`--staged`**\] \[**`--changed`**\] \[**`--since`**=_`REF`_\] \[_`PATH`_\]... + +**The configuration that is contained inside the configuration file.** +- **` --vcs-enabled`**=_``_ — + Whether we should integrate itself with the VCS client +- **` --vcs-client-kind`**=_``_ — + The kind of client. +- **` --vcs-use-ignore-file`**=_``_ — + Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file. +- **` --vcs-root`**=_`PATH`_ — + The folder where we should check for VCS files. By default, we will use the same folder where `pglt.toml` was found. + + If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted +- **` --vcs-default-branch`**=_`BRANCH`_ — + The main branch of the project +- **` --files-max-size`**=_`NUMBER`_ — + The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB +- **` --migrations-dir`**=_`ARG`_ — + The directory where the migration files are stored +- **` --after`**=_`ARG`_ — + Ignore any migrations before this timestamp +- **` --host`**=_`ARG`_ — + The host of the database. +- **` --port`**=_`ARG`_ — + The port of the database. +- **` --username`**=_`ARG`_ — + The username to connect to the database. +- **` --password`**=_`ARG`_ — + The password to connect to the database. +- **` --database`**=_`ARG`_ — + The name of the database. +- **` --conn_timeout_secs`**=_`ARG`_ — + The connection timeout in seconds. + + [default: Some(10)] + + + +**Global options applied to all commands** +- **` --colors`**=_``_ — + Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible +- **` --use-server`** — + Connect to a running instance of the daemon server. +- **` --skip-db`** — + Skip connecting to the database and only run checks that don't require a database connection. +- **` --verbose`** — + Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified. +- **` --config-path`**=_`PATH`_ — + Set the file path to the configuration file, or the directory path to find `pglt.toml`. If used, it disables the default configuration file resolution. +- **` --max-diagnostics`**=_`>`_ — + Cap the amount of diagnostics displayed. When `none` is provided, the limit is lifted. + + [default: 20] +- **` --skip-errors`** — + Skip over files containing syntax errors instead of emitting an error diagnostic. +- **` --no-errors-on-unmatched`** — + Silence errors that would be emitted in case no files were processed during the execution of the command. +- **` --error-on-warnings`** — + Tell PGLSP to exit with an error code if some diagnostics emit warnings. +- **` --reporter`**=_``_ — + Allows to change how diagnostics and summary are reported. +- **` --log-level`**=_``_ — + The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error. + + The value `none` won't show any logging. + + [default: none] +- **` --log-kind`**=_``_ — + How the log should look like. + + [default: pretty] +- **` --diagnostic-level`**=_``_ — + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PGLSP to print only diagnostics that contain only errors. + + [default: info] + + + +**Available positional items:** +- _`PATH`_ — + Single file, single path or list of paths + + + +**Available options:** +- **` --stdin-file-path`**=_`PATH`_ — + Use this option when you want to format code piped from `stdin`, and print the output to `stdout`. + + The file doesn't need to exist on disk, what matters is the extension of the file. Based on the extension, we know how to check the code. + + Example: `echo 'let a;' | pglt_cli check --stdin-file-path=test.sql` +- **` --staged`** — + When set to true, only the files that have been staged (the ones prepared to be committed) will be linted. This option should be used when working locally. +- **` --changed`** — + When set to true, only the files that have been changed compared to your `defaultBranch` configuration will be linted. This option should be used in CI environments. +- **` --since`**=_`REF`_ — + Use this to specify the base branch to compare against when you're using the --changed flag and the `defaultBranch` is not set in your `pglt.toml` +- **`-h`**, **`--help`** — + Prints help information + + +## pglt start + +Starts the daemon server process. + +**Usage**: **`pglt`** **`start`** \[**`--config-path`**=_`PATH`_\] + +**Available options:** +- **` --log-prefix-name`**=_`STRING`_ — + Allows to change the prefix applied to the file name of the logs. + + Uses environment variable **`PGLSP_LOG_PREFIX_NAME`** + + [default: server.log] +- **` --log-path`**=_`PATH`_ — + Allows to change the folder where logs are stored. + + Uses environment variable **`PGLSP_LOG_PATH`** +- **` --config-path`**=_`PATH`_ — + Allows to set a custom file path to the configuration file, or a custom directory path to find `pglt.toml` + + Uses environment variable **`PGLSP_LOG_PREFIX_NAME`** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt stop + +Stops the daemon server process. + +**Usage**: **`pglt`** **`stop`** + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt init + +Bootstraps a new project. Creates a configuration file with some defaults. + +**Usage**: **`pglt`** **`init`** + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt lsp-proxy + +Acts as a server for the Language Server Protocol over stdin/stdout. + +**Usage**: **`pglt`** **`lsp-proxy`** \[**`--config-path`**=_`PATH`_\] + +**Available options:** +- **` --log-prefix-name`**=_`STRING`_ — + Allows to change the prefix applied to the file name of the logs. + + Uses environment variable **`PGLSP_LOG_PREFIX_NAME`** + + [default: server.log] +- **` --log-path`**=_`PATH`_ — + Allows to change the folder where logs are stored. + + Uses environment variable **`PGLSP_LOG_PATH`** +- **` --config-path`**=_`PATH`_ — + Allows to set a custom file path to the configuration file, or a custom directory path to find `pglt.toml` + + Uses environment variable **`PGLSP_CONFIG_PATH`** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt clean + +Cleans the logs emitted by the daemon. + +**Usage**: **`pglt`** **`clean`** + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + + +[//]: # (END CLI_REF) diff --git a/docs/codegen/Cargo.toml b/docs/codegen/Cargo.toml new file mode 100644 index 000000000..34f9e29d0 --- /dev/null +++ b/docs/codegen/Cargo.toml @@ -0,0 +1,23 @@ + +[package] +authors.workspace = true +categories.workspace = true +description = "" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "docs_codegen" +repository.workspace = true +version = "0.0.0" + +[dependencies] +regex = { workspace = true } +toml = { workspace = true } +anyhow = { workspace = true } +bpaf = { workspace = true, features = ["docgen"] } + +pglt_configuration = { workspace = true } +pglt_flags = { workspace = true } +pglt_cli = { workspace = true } + diff --git a/docs/codegen/src/cli_doc.rs b/docs/codegen/src/cli_doc.rs new file mode 100644 index 000000000..ad299605d --- /dev/null +++ b/docs/codegen/src/cli_doc.rs @@ -0,0 +1,17 @@ +use pglt_cli::pglt_command; +use std::{fs, path::Path}; + +use crate::utils; + +pub fn generate_cli_doc(docs_dir: &Path) -> anyhow::Result<()> { + let file_path = docs_dir.join("cli_reference.md"); + + let content = fs::read_to_string(&file_path)?; + + let new_content = + utils::replace_section(&content, "CLI_REF", &pglt_command().render_markdown("pglt")); + + fs::write(file_path, &new_content)?; + + Ok(()) +} diff --git a/docs/codegen/src/default_configuration.rs b/docs/codegen/src/default_configuration.rs new file mode 100644 index 000000000..12f305b82 --- /dev/null +++ b/docs/codegen/src/default_configuration.rs @@ -0,0 +1,22 @@ +use std::{fs, path::Path}; + +use crate::utils::replace_section; + +use pglt_configuration::PartialConfiguration; + +pub fn generate_default_configuration(docs_dir: &Path) -> anyhow::Result<()> { + let index_path = docs_dir.join("index.md"); + + let printed_config = format!( + "\n```toml\n{}```\n", + toml::ser::to_string_pretty(&PartialConfiguration::init())? + ); + + let data = fs::read_to_string(&index_path)?; + + let new_data = replace_section(&data, "DEFAULT_CONFIGURATION", &printed_config); + + fs::write(&index_path, new_data)?; + + Ok(()) +} diff --git a/docs/codegen/src/env_variables.rs b/docs/codegen/src/env_variables.rs new file mode 100644 index 000000000..26839b5f7 --- /dev/null +++ b/docs/codegen/src/env_variables.rs @@ -0,0 +1,44 @@ +use anyhow::Result; +use std::fs; +use std::io::Write; +use std::path::Path; + +use crate::utils::replace_section; + +pub fn generate_env_variables(docs_dir: &Path) -> Result<()> { + let file_path = docs_dir.join("env_variables.md"); + + let mut content = vec![]; + + let env = pglt_flags::pglt_env(); + + writeln!(content, "\n",)?; + + writeln!( + content, + "### `{}`\n\n {}\n", + env.pglt_log_path.name(), + env.pglt_log_path.description() + )?; + writeln!( + content, + "### `{}`\n\n {}\n", + env.pglt_log_prefix.name(), + env.pglt_log_prefix.description() + )?; + writeln!( + content, + "### `{}`\n\n {}\n", + env.pglt_config_path.name(), + env.pglt_config_path.description() + )?; + + let data = fs::read_to_string(&file_path)?; + + let conent_str = String::from_utf8(content)?; + let new_data = replace_section(&data, "ENV_VARS", &conent_str); + + fs::write(file_path, new_data)?; + + Ok(()) +} diff --git a/docs/codegen/src/lib.rs b/docs/codegen/src/lib.rs new file mode 100644 index 000000000..2899d08b6 --- /dev/null +++ b/docs/codegen/src/lib.rs @@ -0,0 +1,4 @@ +pub mod cli_doc; +pub mod default_configuration; +pub mod env_variables; +mod utils; diff --git a/docs/codegen/src/main.rs b/docs/codegen/src/main.rs new file mode 100644 index 000000000..cac99db88 --- /dev/null +++ b/docs/codegen/src/main.rs @@ -0,0 +1,22 @@ +use std::env; +use std::path::{Path, PathBuf}; + +use docs_codegen::cli_doc::generate_cli_doc; +use docs_codegen::default_configuration::generate_default_configuration; +use docs_codegen::env_variables::generate_env_variables; + +fn docs_root() -> PathBuf { + let dir = + env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()); + Path::new(&dir).parent().unwrap().to_path_buf() +} + +fn main() -> anyhow::Result<()> { + let docs_root = docs_root(); + + generate_default_configuration(&docs_root)?; + generate_env_variables(&docs_root)?; + generate_cli_doc(&docs_root)?; + + Ok(()) +} diff --git a/docs/codegen/src/utils.rs b/docs/codegen/src/utils.rs new file mode 100644 index 000000000..4c179f7c0 --- /dev/null +++ b/docs/codegen/src/utils.rs @@ -0,0 +1,15 @@ +use regex::Regex; + +pub(crate) fn replace_section( + content: &str, + section_identifier: &str, + replacement: &str, +) -> String { + let pattern = format!( + r"(\[//\]: # \(BEGIN {}\)\n)(?s).*?(\n\[//\]: # \(END {}\))", + section_identifier, section_identifier + ); + let re = Regex::new(&pattern).unwrap(); + re.replace_all(content, format!("${{1}}{}${{2}}", replacement)) + .to_string() +} diff --git a/docs/env_variables.md b/docs/env_variables.md new file mode 100644 index 000000000..8b80b862c --- /dev/null +++ b/docs/env_variables.md @@ -0,0 +1,19 @@ +## Environment Variables + +[//]: # (BEGIN ENV_VARS) + + +### `PGLSP_LOG_PATH` + + The directory where the Daemon logs will be saved. + +### `PGLSP_LOG_PREFIX_NAME` + + A prefix that's added to the name of the log. Default: `server.log.` + +### `PGLSP_CONFIG_PATH` + + A path to the configuration file + + +[//]: # (END ENV_VARS) diff --git a/docs/index.md b/docs/index.md index 8365e19d9..5e1317c88 100644 --- a/docs/index.md +++ b/docs/index.md @@ -37,6 +37,8 @@ pglt init After running the `init` command, you’ll have a new `pglt.toml` file in your directory: +[//]: # (BEGIN DEFAULT_CONFIGURATION) + ```toml [vcs] enabled = false @@ -61,6 +63,8 @@ database = "postgres" conn_timeout_secs = 10 ``` +[//]: # (END DEFAULT_CONFIGURATION) + Make sure to point the database connection settings at your local development database. To see what else can be configured, run `--help`. ## Usage From 918fee7555240bd995222ca6d1161de45d55afcd Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 14 Feb 2025 13:16:51 +0100 Subject: [PATCH 03/14] docs: troubleshooting --- docs/troubleshooting.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 docs/troubleshooting.md diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md new file mode 100644 index 000000000..f9d91561e --- /dev/null +++ b/docs/troubleshooting.md @@ -0,0 +1,9 @@ +## Troubleshooting + +This guide describes how to resolve common issues with Postgres Language Tools. + +### Incorrect and / or misplaced diagnostics + +We are employing pragmatic solutions to split a SQL file into statements, and they might be incorrect in certain cases. If you see diagnostics like `Unexpected token` in the middle of a valid statement, make sure to either end all statements with a semicolon, or put two double newlines between them. If there are still issues, its most likely a bug in the change handler that is gone after reopening the file. But please file an issue with sample code so we can fix the root cause. + + From bfd177453653515479c2d00cd32d2ed1b7ae4eaa Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 16 Feb 2025 17:28:30 +0100 Subject: [PATCH 04/14] feat(docs): finish codegen --- Cargo.lock | 9 + crates/pglt_cli/src/cli_options.rs | 4 +- crates/pglt_cli/src/commands/daemon.rs | 2 +- crates/pglt_cli/src/commands/mod.rs | 18 +- crates/pglt_cli/src/diagnostics.rs | 2 +- crates/pglt_cli/src/lib.rs | 2 +- .../src/analyser/linter/rules.rs | 2 +- crates/pglt_configuration/src/lib.rs | 2 +- crates/pglt_flags/src/lib.rs | 14 +- crates/pglt_fs/src/fs.rs | 6 +- crates/pglt_fs/src/path.rs | 4 +- crates/pglt_workspace/src/workspace/server.rs | 4 +- docs/cli_reference.md | 20 +- docs/codegen/Cargo.toml | 9 + docs/codegen/src/lib.rs | 4 + docs/codegen/src/main.rs | 6 + docs/codegen/src/rules_docs.rs | 469 ++++++++++++++++++ docs/codegen/src/rules_index.rs | 124 +++++ docs/codegen/src/rules_sources.rs | 112 +++++ docs/codegen/src/utils.rs | 40 ++ docs/env_variables.md | 6 +- docs/rule_sources.md | 2 + docs/rules.md | 19 + docs/rules/ban-drop-column.md | 32 ++ docs/rules/ban-drop-not-null.md | 32 ++ xtask/codegen/src/generate_configuration.rs | 2 +- 26 files changed, 902 insertions(+), 44 deletions(-) create mode 100644 docs/codegen/src/rules_docs.rs create mode 100644 docs/codegen/src/rules_index.rs create mode 100644 docs/codegen/src/rules_sources.rs create mode 100644 docs/rule_sources.md create mode 100644 docs/rules.md create mode 100644 docs/rules/ban-drop-column.md create mode 100644 docs/rules/ban-drop-not-null.md diff --git a/Cargo.lock b/Cargo.lock index 1f5f23cec..74cbce8f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -980,10 +980,19 @@ name = "docs_codegen" version = "0.0.0" dependencies = [ "anyhow", + "biome_string_case", "bpaf", + "pglt_analyse", + "pglt_analyser", "pglt_cli", "pglt_configuration", + "pglt_console", + "pglt_diagnostics", "pglt_flags", + "pglt_query_ext", + "pglt_statement_splitter", + "pglt_workspace", + "pulldown-cmark", "regex", "toml", ] diff --git a/crates/pglt_cli/src/cli_options.rs b/crates/pglt_cli/src/cli_options.rs index c49f6f798..d61fcd102 100644 --- a/crates/pglt_cli/src/cli_options.rs +++ b/crates/pglt_cli/src/cli_options.rs @@ -48,7 +48,7 @@ pub struct CliOptions { #[bpaf(long("no-errors-on-unmatched"), switch)] pub no_errors_on_unmatched: bool, - /// Tell PGLSP to exit with an error code if some diagnostics emit warnings. + /// Tell PgLT to exit with an error code if some diagnostics emit warnings. #[bpaf(long("error-on-warnings"), switch)] pub error_on_warnings: bool, @@ -86,7 +86,7 @@ pub struct CliOptions { fallback(Severity::default()), display_fallback )] - /// The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PGLSP to print only diagnostics that contain only errors. + /// The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PgLT to print only diagnostics that contain only errors. pub diagnostic_level: Severity, } diff --git a/crates/pglt_cli/src/commands/daemon.rs b/crates/pglt_cli/src/commands/daemon.rs index 43f96ddcf..336b11fb9 100644 --- a/crates/pglt_cli/src/commands/daemon.rs +++ b/crates/pglt_cli/src/commands/daemon.rs @@ -230,7 +230,7 @@ fn setup_tracing_subscriber(log_path: Option, log_file_name_prefix: Opt } pub fn default_pglt_log_path() -> PathBuf { - match env::var_os("PGLSP_LOG_PATH") { + match env::var_os("PGLT_LOG_PATH") { Some(directory) => PathBuf::from(directory), None => pglt_fs::ensure_cache_dir().join("pglt-logs"), } diff --git a/crates/pglt_cli/src/commands/mod.rs b/crates/pglt_cli/src/commands/mod.rs index db8f53c31..90f48321d 100644 --- a/crates/pglt_cli/src/commands/mod.rs +++ b/crates/pglt_cli/src/commands/mod.rs @@ -72,7 +72,7 @@ pub enum PgltCommand { Start { /// Allows to change the prefix applied to the file name of the logs. #[bpaf( - env("PGLSP_LOG_PREFIX_NAME"), + env("PGLT_LOG_PREFIX_NAME"), long("log-prefix-name"), argument("STRING"), hide_usage, @@ -83,7 +83,7 @@ pub enum PgltCommand { /// Allows to change the folder where logs are stored. #[bpaf( - env("PGLSP_LOG_PATH"), + env("PGLT_LOG_PATH"), long("log-path"), argument("PATH"), hide_usage, @@ -92,7 +92,7 @@ pub enum PgltCommand { log_path: PathBuf, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `pglt.toml` - #[bpaf(env("PGLSP_LOG_PREFIX_NAME"), long("config-path"), argument("PATH"))] + #[bpaf(env("PGLT_LOG_PREFIX_NAME"), long("config-path"), argument("PATH"))] config_path: Option, }, @@ -109,7 +109,7 @@ pub enum PgltCommand { LspProxy { /// Allows to change the prefix applied to the file name of the logs. #[bpaf( - env("PGLSP_LOG_PREFIX_NAME"), + env("PGLT_LOG_PREFIX_NAME"), long("log-prefix-name"), argument("STRING"), hide_usage, @@ -119,7 +119,7 @@ pub enum PgltCommand { log_prefix_name: String, /// Allows to change the folder where logs are stored. #[bpaf( - env("PGLSP_LOG_PATH"), + env("PGLT_LOG_PATH"), long("log-path"), argument("PATH"), hide_usage, @@ -128,7 +128,7 @@ pub enum PgltCommand { log_path: PathBuf, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `pglt.toml` - #[bpaf(env("PGLSP_CONFIG_PATH"), long("config-path"), argument("PATH"))] + #[bpaf(env("PGLT_CONFIG_PATH"), long("config-path"), argument("PATH"))] config_path: Option, /// Bogus argument to make the command work with vscode-languageclient #[bpaf(long("stdio"), hide, hide_usage, switch)] @@ -143,7 +143,7 @@ pub enum PgltCommand { RunServer { /// Allows to change the prefix applied to the file name of the logs. #[bpaf( - env("PGLSP_LOG_PREFIX_NAME"), + env("PGLT_LOG_PREFIX_NAME"), long("log-prefix-name"), argument("STRING"), hide_usage, @@ -153,7 +153,7 @@ pub enum PgltCommand { log_prefix_name: String, /// Allows to change the folder where logs are stored. #[bpaf( - env("PGLSP_LOG_PATH"), + env("PGLT_LOG_PATH"), long("log-path"), argument("PATH"), hide_usage, @@ -165,7 +165,7 @@ pub enum PgltCommand { stop_on_disconnect: bool, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `pglt.toml` - #[bpaf(env("PGLSP_CONFIG_PATH"), long("config-path"), argument("PATH"))] + #[bpaf(env("PGLT_CONFIG_PATH"), long("config-path"), argument("PATH"))] config_path: Option, }, #[bpaf(command("__print_socket"), hide)] diff --git a/crates/pglt_cli/src/diagnostics.rs b/crates/pglt_cli/src/diagnostics.rs index fa9b7ed2d..07e43b7dc 100644 --- a/crates/pglt_cli/src/diagnostics.rs +++ b/crates/pglt_cli/src/diagnostics.rs @@ -15,7 +15,7 @@ fn command_name() -> String { .unwrap_or_else(|| String::from("pglt")) } -/// A diagnostic that is emitted when running PGLSP via CLI. +/// A diagnostic that is emitted when running PgLT via CLI. /// /// When displaying the diagnostic, #[derive(Debug, Diagnostic)] diff --git a/crates/pglt_cli/src/lib.rs b/crates/pglt_cli/src/lib.rs index 72585813e..d4e966ae6 100644 --- a/crates/pglt_cli/src/lib.rs +++ b/crates/pglt_cli/src/lib.rs @@ -32,7 +32,7 @@ pub use panic::setup_panic_handler; pub use reporter::{DiagnosticsPayload, Reporter, ReporterVisitor, TraversalSummary}; pub use service::{open_transport, SocketTransport}; -pub(crate) const VERSION: &str = match option_env!("PGLSP_VERSION") { +pub(crate) const VERSION: &str = match option_env!("PGLT_VERSION") { Some(version) => version, None => env!("CARGO_PKG_VERSION"), }; diff --git a/crates/pglt_configuration/src/analyser/linter/rules.rs b/crates/pglt_configuration/src/analyser/linter/rules.rs index 8e9544008..4d4b0ad18 100644 --- a/crates/pglt_configuration/src/analyser/linter/rules.rs +++ b/crates/pglt_configuration/src/analyser/linter/rules.rs @@ -46,7 +46,7 @@ impl std::str::FromStr for RuleGroup { #[cfg_attr(feature = "schema", derive(JsonSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct Rules { - #[doc = r" It enables the lint rules recommended by PGLSP. `true` by default."] + #[doc = r" It enables the lint rules recommended by PgLT. `true` by default."] #[serde(skip_serializing_if = "Option::is_none")] pub recommended: Option, #[doc = r" It enables ALL rules. The rules that belong to `nursery` won't be enabled."] diff --git a/crates/pglt_configuration/src/lib.rs b/crates/pglt_configuration/src/lib.rs index d505baf66..297feed46 100644 --- a/crates/pglt_configuration/src/lib.rs +++ b/crates/pglt_configuration/src/lib.rs @@ -34,7 +34,7 @@ use migrations::{ use serde::{Deserialize, Serialize}; use vcs::VcsClientKind; -pub const VERSION: &str = match option_env!("PGLSP_VERSION") { +pub const VERSION: &str = match option_env!("PGLT_VERSION") { Some(version) => version, None => "0.0.0", }; diff --git a/crates/pglt_flags/src/lib.rs b/crates/pglt_flags/src/lib.rs index 162db2f7d..5a8d7efbc 100644 --- a/crates/pglt_flags/src/lib.rs +++ b/crates/pglt_flags/src/lib.rs @@ -8,11 +8,11 @@ use std::sync::{LazyLock, OnceLock}; /// Returns `true` if this is an unstable build of PgLT pub fn is_unstable() -> bool { - PGLSP_VERSION.deref().is_none() + PGLT_VERSION.deref().is_none() } /// The internal version of PgLT. This is usually supplied during the CI build -pub static PGLSP_VERSION: LazyLock> = LazyLock::new(|| option_env!("PGLSP_VERSION")); +pub static PGLT_VERSION: LazyLock> = LazyLock::new(|| option_env!("PGLT_VERSION")); pub struct PgLTEnv { pub pglt_log_path: PgLTEnvVariable, @@ -20,21 +20,21 @@ pub struct PgLTEnv { pub pglt_config_path: PgLTEnvVariable, } -pub static PGLSP_ENV: OnceLock = OnceLock::new(); +pub static PGLT_ENV: OnceLock = OnceLock::new(); impl PgLTEnv { fn new() -> Self { Self { pglt_log_path: PgLTEnvVariable::new( - "PGLSP_LOG_PATH", + "PGLT_LOG_PATH", "The directory where the Daemon logs will be saved.", ), pglt_log_prefix: PgLTEnvVariable::new( - "PGLSP_LOG_PREFIX_NAME", + "PGLT_LOG_PREFIX_NAME", "A prefix that's added to the name of the log. Default: `server.log.`", ), pglt_config_path: PgLTEnvVariable::new( - "PGLSP_CONFIG_PATH", + "PGLT_CONFIG_PATH", "A path to the configuration file", ), } @@ -71,7 +71,7 @@ impl PgLTEnvVariable { } pub fn pglt_env() -> &'static PgLTEnv { - PGLSP_ENV.get_or_init(PgLTEnv::new) + PGLT_ENV.get_or_init(PgLTEnv::new) } impl Display for PgLTEnv { diff --git a/crates/pglt_fs/src/fs.rs b/crates/pglt_fs/src/fs.rs index 055d5eb5a..76f20d9aa 100644 --- a/crates/pglt_fs/src/fs.rs +++ b/crates/pglt_fs/src/fs.rs @@ -18,14 +18,14 @@ mod os; pub struct ConfigName; impl ConfigName { - const PGLSP_TOML: [&'static str; 1] = ["pglt.toml"]; + const PGLT_TOML: [&'static str; 1] = ["pglt.toml"]; pub const fn pglt_toml() -> &'static str { - Self::PGLSP_TOML[0] + Self::PGLT_TOML[0] } pub const fn file_names() -> [&'static str; 1] { - Self::PGLSP_TOML + Self::PGLT_TOML } } diff --git a/crates/pglt_fs/src/path.rs b/crates/pglt_fs/src/path.rs index b1411c53d..1194246d1 100644 --- a/crates/pglt_fs/src/path.rs +++ b/crates/pglt_fs/src/path.rs @@ -91,7 +91,7 @@ impl From for FileKinds { )] pub struct PgLTPath { path: PathBuf, - /// Determines the kind of the file inside PGLSP. Some files are considered as configuration files, others as manifest files, and others as files to handle + /// Determines the kind of the file inside PgLT. Some files are considered as configuration files, others as manifest files, and others as files to handle kind: FileKinds, /// Whether this path (usually a file) was fixed as a result of a format/lint/check command with the `--write` filag. was_written: bool, @@ -164,7 +164,7 @@ impl PgLTPath { /// Returns the contents of a file, if it exists /// /// ## Error - /// If PGLSP doesn't have permissions to read the file + /// If PgLT doesn't have permissions to read the file pub fn get_buffer_from_file(&mut self) -> String { // we assume we have permissions read_to_string(&self.path).expect("cannot read the file to format") diff --git a/crates/pglt_workspace/src/workspace/server.rs b/crates/pglt_workspace/src/workspace/server.rs index 31ad999d5..45ffb1987 100644 --- a/crates/pglt_workspace/src/workspace/server.rs +++ b/crates/pglt_workspace/src/workspace/server.rs @@ -111,7 +111,7 @@ impl WorkspaceServer { /// Check whether a file is ignored in the top-level config `files.ignore`/`files.include` fn is_ignored(&self, path: &Path) -> bool { let file_name = path.file_name().and_then(|s| s.to_str()); - // Never ignore PGLSP's config file regardless `include`/`ignore` + // Never ignore PgLT's config file regardless `include`/`ignore` (file_name != Some(ConfigName::pglt_toml())) && // Apply top-level `include`/`ignore (self.is_ignored_by_top_level_config(path) || self.is_ignored_by_migration_config(path)) @@ -130,7 +130,7 @@ impl WorkspaceServer { // `matched_path_or_any_parents` panics if `source` is not under the gitignore root. // This checks excludes absolute paths that are not a prefix of the base root. if !path.has_root() || path.starts_with(ignore.path()) { - // Because PGLSP passes a list of paths, + // Because PgLT passes a list of paths, // we use `matched_path_or_any_parents` instead of `matched`. ignore .matched_path_or_any_parents(path, path.is_dir()) diff --git a/docs/cli_reference.md b/docs/cli_reference.md index a4248d3c8..2dfe7ee6d 100644 --- a/docs/cli_reference.md +++ b/docs/cli_reference.md @@ -72,7 +72,7 @@ Shows the version information and quit. - **` --no-errors-on-unmatched`** — Silence errors that would be emitted in case no files were processed during the execution of the command. - **` --error-on-warnings`** — - Tell PGLSP to exit with an error code if some diagnostics emit warnings. + Tell PgLT to exit with an error code if some diagnostics emit warnings. - **` --reporter`**=_``_ — Allows to change how diagnostics and summary are reported. - **` --log-level`**=_``_ — @@ -86,7 +86,7 @@ Shows the version information and quit. [default: pretty] - **` --diagnostic-level`**=_``_ — - The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PGLSP to print only diagnostics that contain only errors. + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PgLT to print only diagnostics that contain only errors. [default: info] @@ -159,7 +159,7 @@ Runs everything to the requested files. - **` --no-errors-on-unmatched`** — Silence errors that would be emitted in case no files were processed during the execution of the command. - **` --error-on-warnings`** — - Tell PGLSP to exit with an error code if some diagnostics emit warnings. + Tell PgLT to exit with an error code if some diagnostics emit warnings. - **` --reporter`**=_``_ — Allows to change how diagnostics and summary are reported. - **` --log-level`**=_``_ — @@ -173,7 +173,7 @@ Runs everything to the requested files. [default: pretty] - **` --diagnostic-level`**=_``_ — - The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PGLSP to print only diagnostics that contain only errors. + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PgLT to print only diagnostics that contain only errors. [default: info] @@ -212,17 +212,17 @@ Starts the daemon server process. - **` --log-prefix-name`**=_`STRING`_ — Allows to change the prefix applied to the file name of the logs. - Uses environment variable **`PGLSP_LOG_PREFIX_NAME`** + Uses environment variable **`PGLT_LOG_PREFIX_NAME`** [default: server.log] - **` --log-path`**=_`PATH`_ — Allows to change the folder where logs are stored. - Uses environment variable **`PGLSP_LOG_PATH`** + Uses environment variable **`PGLT_LOG_PATH`** - **` --config-path`**=_`PATH`_ — Allows to set a custom file path to the configuration file, or a custom directory path to find `pglt.toml` - Uses environment variable **`PGLSP_LOG_PREFIX_NAME`** + Uses environment variable **`PGLT_LOG_PREFIX_NAME`** - **`-h`**, **`--help`** — Prints help information @@ -259,17 +259,17 @@ Acts as a server for the Language Server Protocol over stdin/stdout. - **` --log-prefix-name`**=_`STRING`_ — Allows to change the prefix applied to the file name of the logs. - Uses environment variable **`PGLSP_LOG_PREFIX_NAME`** + Uses environment variable **`PGLT_LOG_PREFIX_NAME`** [default: server.log] - **` --log-path`**=_`PATH`_ — Allows to change the folder where logs are stored. - Uses environment variable **`PGLSP_LOG_PATH`** + Uses environment variable **`PGLT_LOG_PATH`** - **` --config-path`**=_`PATH`_ — Allows to set a custom file path to the configuration file, or a custom directory path to find `pglt.toml` - Uses environment variable **`PGLSP_CONFIG_PATH`** + Uses environment variable **`PGLT_CONFIG_PATH`** - **`-h`**, **`--help`** — Prints help information diff --git a/docs/codegen/Cargo.toml b/docs/codegen/Cargo.toml index 34f9e29d0..c7898c7a2 100644 --- a/docs/codegen/Cargo.toml +++ b/docs/codegen/Cargo.toml @@ -20,4 +20,13 @@ bpaf = { workspace = true, features = ["docgen"] } pglt_configuration = { workspace = true } pglt_flags = { workspace = true } pglt_cli = { workspace = true } +pglt_analyse = { workspace = true } +pglt_analyser = { workspace = true } +pglt_diagnostics = { workspace = true } +pglt_query_ext = { workspace = true } +pglt_workspace = { workspace = true } +pglt_statement_splitter = { workspace = true } +pglt_console = { workspace = true } +biome_string_case = { workspace = true } +pulldown-cmark = "0.12.2" diff --git a/docs/codegen/src/lib.rs b/docs/codegen/src/lib.rs index 2899d08b6..6ff084603 100644 --- a/docs/codegen/src/lib.rs +++ b/docs/codegen/src/lib.rs @@ -1,4 +1,8 @@ pub mod cli_doc; pub mod default_configuration; pub mod env_variables; +pub mod rules_docs; +pub mod rules_index; +pub mod rules_sources; + mod utils; diff --git a/docs/codegen/src/main.rs b/docs/codegen/src/main.rs index cac99db88..a03a1bf05 100644 --- a/docs/codegen/src/main.rs +++ b/docs/codegen/src/main.rs @@ -4,6 +4,9 @@ use std::path::{Path, PathBuf}; use docs_codegen::cli_doc::generate_cli_doc; use docs_codegen::default_configuration::generate_default_configuration; use docs_codegen::env_variables::generate_env_variables; +use docs_codegen::rules_docs::generate_rules_docs; +use docs_codegen::rules_index::generate_rules_index; +use docs_codegen::rules_sources::generate_rule_sources; fn docs_root() -> PathBuf { let dir = @@ -17,6 +20,9 @@ fn main() -> anyhow::Result<()> { generate_default_configuration(&docs_root)?; generate_env_variables(&docs_root)?; generate_cli_doc(&docs_root)?; + generate_rules_docs(&docs_root)?; + generate_rules_index(&docs_root)?; + generate_rule_sources(&docs_root)?; Ok(()) } diff --git a/docs/codegen/src/rules_docs.rs b/docs/codegen/src/rules_docs.rs new file mode 100644 index 000000000..353021a44 --- /dev/null +++ b/docs/codegen/src/rules_docs.rs @@ -0,0 +1,469 @@ +use anyhow::{bail, Result}; +use biome_string_case::Case; +use pglt_analyse::{AnalyserOptions, AnalysisFilter, RuleFilter, RuleMetadata}; +use pglt_analyser::{Analyser, AnalyserConfig}; +use pglt_console::fmt::{Formatter, HTML}; +use pglt_console::markup; +use pglt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; +use pglt_query_ext::diagnostics::SyntaxDiagnostic; +use pglt_workspace::settings::Settings; +use pulldown_cmark::{CodeBlockKind, Event, LinkType, Parser, Tag, TagEnd}; +use std::{ + fmt::Write as _, + fs, + io::{self, Write as _}, + path::Path, + slice, + str::{self, FromStr}, +}; + +/// +/// Generates the documentation page for each lint rule. +/// +/// * `docs_dir`: Path to the docs directory. +pub fn generate_rules_docs(docs_dir: &Path) -> anyhow::Result<()> { + let rules_dir = docs_dir.join("rules"); + + if rules_dir.exists() { + fs::remove_dir_all(&rules_dir)?; + } + fs::create_dir_all(&rules_dir)?; + + let mut visitor = crate::utils::LintRulesVisitor::default(); + pglt_analyser::visit_registry(&mut visitor); + + let crate::utils::LintRulesVisitor { groups } = visitor; + + for (group, rules) in groups { + for (rule, metadata) in rules { + let content = generate_rule_doc(group, rule, metadata)?; + let dashed_rule = Case::Kebab.convert(rule); + fs::write(rules_dir.join(format!("{}.md", dashed_rule)), content)?; + } + } + + Ok(()) +} + +fn generate_rule_doc( + group: &'static str, + rule: &'static str, + meta: RuleMetadata, +) -> Result { + let mut content = Vec::new(); + + writeln!(content, "# {rule}")?; + + writeln!( + content, + "**Diagnostic Category: `lint/{}/{}`**", + group, rule + )?; + + let is_recommended = meta.recommended; + + // add deprecation notice + if let Some(reason) = &meta.deprecated { + writeln!(content, ":::caution[Deprecated]")?; + writeln!(content, "This rule is deprecated and will be removed in the next major release.\n**Reason**: {reason}")?; + writeln!(content, ":::")?; + } + + writeln!(content, "**Since**: `v{}`", meta.version)?; + + // add recommended notice + if is_recommended { + writeln!(content, ":::note")?; + writeln!( + content, + "- This rule is recommended. A diagnostic error will appear when linting your code." + )?; + writeln!(content, ":::")?; + } + + // add source information + if !meta.sources.is_empty() { + writeln!(content, "Sources: ")?; + + for source in meta.sources { + let rule_name = source.to_namespaced_rule_name(); + let source_rule_url = source.to_rule_url(); + write!(content, "- Inspired from: ")?; + writeln!( + content, + "{rule_name}" + )?; + } + writeln!(content)?; + } + + write_documentation(group, rule, meta.docs, &mut content)?; + + write_how_to_configure(group, rule, &mut content)?; + + Ok(String::from_utf8(content)?) +} + +fn write_how_to_configure( + group: &'static str, + rule: &'static str, + content: &mut Vec, +) -> io::Result<()> { + writeln!(content, "## How to configure")?; + let toml = format!( + r#"[linter.rules.{group}] +{rule} = "error" +"# + ); + + writeln!(content, "```toml title=\"pglt.toml\"")?; + writeln!(content, "{}", toml)?; + writeln!(content, "```")?; + + Ok(()) +} + +/// Parse the documentation fragment for a lint rule (in markdown) and generates +/// the content for the corresponding documentation page +fn write_documentation( + group: &'static str, + rule: &'static str, + docs: &'static str, + content: &mut Vec, +) -> Result<()> { + writeln!(content, "## Description")?; + + let parser = Parser::new(docs); + + // Tracks the content of the current code block if it's using a + // language supported for analysis + let mut language = None; + let mut list_order = None; + let mut list_indentation = 0; + + // Tracks the type and metadata of the link + let mut start_link_tag: Option = None; + + for event in parser { + match event { + // CodeBlock-specific handling + Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(meta))) => { + // Track the content of code blocks to pass them through the analyzer + let test = CodeBlockTest::from_str(meta.as_ref())?; + + // Erase the lintdoc-specific attributes in the output by + // re-generating the language ID from the source type + write!(content, "```{}", &test.tag)?; + writeln!(content)?; + + language = Some((test, String::new())); + } + + Event::End(TagEnd::CodeBlock) => { + writeln!(content, "```")?; + writeln!(content)?; + + if let Some((test, block)) = language.take() { + if test.expect_diagnostic { + write!( + content, + "
"
+                        )?;
+                    }
+
+                    print_diagnostics(group, rule, &test, &block, content)?;
+
+                    if test.expect_diagnostic {
+                        writeln!(content, "
")?; + writeln!(content)?; + } + } + } + + Event::Text(text) => { + let mut hide_line = false; + + if let Some((test, block)) = &mut language { + if let Some(inner_text) = text.strip_prefix("# ") { + // Lines prefixed with "# " are hidden from the public documentation + write!(block, "{inner_text}")?; + hide_line = true; + test.hidden_lines.push(test.line_count); + } else { + write!(block, "{text}")?; + } + test.line_count += 1; + } + + if hide_line { + // Line should not be emitted into the output + } else if matches!(text.as_ref(), "`" | "*" | "_") { + write!(content, "\\{text}")?; + } else { + write!(content, "{text}")?; + } + } + + // Other markdown events are emitted as-is + Event::Start(Tag::Heading { level, .. }) => { + write!(content, "{} ", "#".repeat(level as usize))?; + } + Event::End(TagEnd::Heading { .. }) => { + writeln!(content)?; + writeln!(content)?; + } + + Event::Start(Tag::Paragraph) => { + continue; + } + Event::End(TagEnd::Paragraph) => { + writeln!(content)?; + writeln!(content)?; + } + + Event::Code(text) => { + write!(content, "`{text}`")?; + } + Event::Start(ref link_tag @ Tag::Link { link_type, .. }) => { + start_link_tag = Some(link_tag.clone()); + match link_type { + LinkType::Autolink => { + write!(content, "<")?; + } + LinkType::Inline | LinkType::Reference | LinkType::Shortcut => { + write!(content, "[")?; + } + _ => { + panic!("unimplemented link type") + } + } + } + Event::End(TagEnd::Link) => { + if let Some(Tag::Link { + link_type, + dest_url, + title, + .. + }) = start_link_tag + { + match link_type { + LinkType::Autolink => { + write!(content, ">")?; + } + LinkType::Inline | LinkType::Reference | LinkType::Shortcut => { + write!(content, "]({dest_url}")?; + if !title.is_empty() { + write!(content, " \"{title}\"")?; + } + write!(content, ")")?; + } + _ => { + panic!("unimplemented link type") + } + } + start_link_tag = None; + } else { + panic!("missing start link tag"); + } + } + + Event::SoftBreak => { + writeln!(content)?; + } + + Event::HardBreak => { + writeln!(content, "
")?; + } + + Event::Start(Tag::List(num)) => { + list_indentation += 1; + if let Some(num) = num { + list_order = Some(num); + } + if list_indentation > 1 { + writeln!(content)?; + } + } + + Event::End(TagEnd::List(_)) => { + list_order = None; + list_indentation -= 1; + writeln!(content)?; + } + Event::Start(Tag::Item) => { + write!(content, "{}", " ".repeat(list_indentation - 1))?; + if let Some(num) = list_order { + write!(content, "{num}. ")?; + } else { + write!(content, "- ")?; + } + } + + Event::End(TagEnd::Item) => { + list_order = list_order.map(|item| item + 1); + writeln!(content)?; + } + + Event::Start(Tag::Strong) => { + write!(content, "**")?; + } + + Event::End(TagEnd::Strong) => { + write!(content, "**")?; + } + + Event::Start(Tag::Emphasis) => { + write!(content, "_")?; + } + + Event::End(TagEnd::Emphasis) => { + write!(content, "_")?; + } + + Event::Start(Tag::Strikethrough) => { + write!(content, "~")?; + } + + Event::End(TagEnd::Strikethrough) => { + write!(content, "~")?; + } + + Event::Start(Tag::BlockQuote(_)) => { + write!(content, ">")?; + } + + Event::End(TagEnd::BlockQuote(_)) => { + writeln!(content)?; + } + + _ => { + bail!("unimplemented event {event:?}") + } + } + } + + Ok(()) +} + +struct CodeBlockTest { + /// The language tag of this code block. + tag: String, + + /// True if this is an invalid example that should trigger a diagnostic. + expect_diagnostic: bool, + + /// Whether to ignore this code block. + ignore: bool, + + /// The number of lines in this code block. + line_count: u32, + + // The indices of lines that should be hidden from the public documentation. + hidden_lines: Vec, +} + +impl FromStr for CodeBlockTest { + type Err = anyhow::Error; + + fn from_str(input: &str) -> Result { + // This is based on the parsing logic for code block languages in `rustdoc`: + // https://github.com/rust-lang/rust/blob/6ac8adad1f7d733b5b97d1df4e7f96e73a46db42/src/librustdoc/html/markdown.rs#L873 + let tokens = input + .split([',', ' ', '\t']) + .map(str::trim) + .filter(|token| !token.is_empty()); + + let mut test = CodeBlockTest { + tag: String::new(), + expect_diagnostic: false, + ignore: false, + line_count: 0, + hidden_lines: vec![], + }; + + for token in tokens { + match token { + // Other attributes + "expect_diagnostic" => test.expect_diagnostic = true, + "ignore" => test.ignore = true, + // Regard as language tags, last one wins + _ => test.tag = token.to_string(), + } + } + + Ok(test) + } +} + +/// Prints diagnostics documentation from a gode block into the content buffer. +/// +/// * `group`: The group of the rule. +/// * `rule`: The rule name. +/// * `test`: The code block test. +/// * `code`: The code block content. +/// * `content`: The buffer to write the documentation to. +fn print_diagnostics( + group: &'static str, + rule: &'static str, + test: &CodeBlockTest, + code: &str, + content: &mut Vec, +) -> Result<()> { + let file_path = format!("code-block.{}", test.tag); + + let mut write = HTML::new(content).with_mdx(); + + let mut write_diagnostic = |_: &str, diag: pglt_diagnostics::Error| -> Result<()> { + Formatter::new(&mut write).write_markup(markup! { + {PrintDiagnostic::verbose(&diag)} + })?; + Ok(()) + }; + if test.ignore { + return Ok(()); + } + + let rule_filter = RuleFilter::Rule(group, rule); + let filter = AnalysisFilter { + enabled_rules: Some(slice::from_ref(&rule_filter)), + ..AnalysisFilter::default() + }; + let settings = Settings::default(); + let options = AnalyserOptions::default(); + let analyser = Analyser::new(AnalyserConfig { + options: &options, + filter, + }); + + // split and parse each statement + let stmts = pglt_statement_splitter::split(code); + for stmt in stmts.ranges { + match pglt_query_ext::parse(&code[stmt]) { + Ok(ast) => { + for rule_diag in analyser.run(pglt_analyser::AnalyserContext { root: &ast }) { + let diag = pglt_diagnostics::serde::Diagnostic::new(rule_diag); + + let category = diag.category().expect("linter diagnostic has no code"); + let severity = settings.get_severity_from_rule_code(category).expect( + "If you see this error, it means you need to run cargo codegen-configuration", + ); + + let error = diag + .with_severity(severity) + .with_file_path(&file_path) + .with_file_source_code(code); + + write_diagnostic(code, error)?; + } + } + Err(e) => { + let error = SyntaxDiagnostic::from(e) + .with_file_path(&file_path) + .with_file_source_code(code); + write_diagnostic(code, error)?; + } + }; + } + + Ok(()) +} diff --git a/docs/codegen/src/rules_index.rs b/docs/codegen/src/rules_index.rs new file mode 100644 index 000000000..a00ab365c --- /dev/null +++ b/docs/codegen/src/rules_index.rs @@ -0,0 +1,124 @@ +use biome_string_case::Case; +use pglt_analyse::RuleMetadata; +use pglt_console::fmt::{Formatter, HTML}; +use pglt_console::{markup, Markup}; +use pulldown_cmark::{Event, Parser, Tag, TagEnd}; +use std::{ + collections::BTreeMap, + fs, + io::{self}, + path::Path, + str::{self}, +}; + +use crate::utils; + +/// Generates the lint rules index. +/// +/// * `docs_dir`: Path to the docs directory. +pub fn generate_rules_index(docs_dir: &Path) -> anyhow::Result<()> { + let index_file = docs_dir.join("rules.md"); + + let mut visitor = crate::utils::LintRulesVisitor::default(); + pglt_analyser::visit_registry(&mut visitor); + + let crate::utils::LintRulesVisitor { groups } = visitor; + + let mut content = Vec::new(); + + for (group, rules) in groups { + generate_group(group, rules, &mut content)?; + } + + let new_content = String::from_utf8(content)?; + + let file_content = fs::read_to_string(&index_file)?; + + let new_content = utils::replace_section(&file_content, "RULES_INDEX", &new_content); + + fs::write(index_file, new_content)?; + + Ok(()) +} + +fn generate_group( + group: &'static str, + rules: BTreeMap<&'static str, RuleMetadata>, + content: &mut dyn io::Write, +) -> io::Result<()> { + let (group_name, description) = extract_group_metadata(group); + + writeln!(content, "\n## {group_name}")?; + writeln!(content)?; + write_markup_to_string(content, description)?; + writeln!(content)?; + writeln!(content, "| Rule name | Description | Properties |")?; + writeln!(content, "| --- | --- | --- |")?; + + for (rule_name, rule_metadata) in rules { + let is_recommended = rule_metadata.recommended; + let dashed_rule = Case::Kebab.convert(rule_name); + + let mut properties = String::new(); + if is_recommended { + properties.push_str(""); + } + + let summary = generate_rule_summary(rule_metadata.docs)?; + + write!( + content, + "| [{rule_name}](./rules/{dashed_rule}) | {summary} | {properties} |" + )?; + + writeln!(content)?; + } + + Ok(()) +} + +fn extract_group_metadata(group: &str) -> (&str, Markup) { + match group { + "safety" => ( + "Safety", + markup! { + "Rules that detect potential safety issues in your code." + }, + ), + _ => panic!("Unknown group ID {group:?}"), + } +} + +fn write_markup_to_string(buffer: &mut dyn io::Write, markup: Markup) -> io::Result<()> { + let mut write = HTML::new(buffer).with_mdx(); + let mut fmt = Formatter::new(&mut write); + fmt.write_markup(markup) +} + +/// Parsed the rule documentation to extract the summary. +/// The summary is the first paragraph in the rule documentation. +fn generate_rule_summary(docs: &'static str) -> io::Result { + let parser = Parser::new(docs); + + let mut buffer = String::new(); + + for event in parser { + match event { + Event::Start(Tag::Paragraph) => { + continue; + } + Event::Text(text) => { + buffer.push_str(&text); + } + Event::Code(code) => { + buffer.push_str(format!("`{}`", code).as_str()); + } + Event::End(TagEnd::Paragraph) => { + return Ok(buffer); + } + _ => {} + } + } + + panic!("No summary found in rule documentation"); +} diff --git a/docs/codegen/src/rules_sources.rs b/docs/codegen/src/rules_sources.rs new file mode 100644 index 000000000..1473809db --- /dev/null +++ b/docs/codegen/src/rules_sources.rs @@ -0,0 +1,112 @@ +use anyhow::Result; +use biome_string_case::Case; +use pglt_analyse::RuleMetadata; +use std::cmp::Ordering; +use std::collections::{BTreeMap, BTreeSet}; +use std::fs; +use std::io::Write; +use std::path::Path; + +#[derive(Debug, Eq, PartialEq)] +struct SourceSet { + source_rule_name: String, + source_link: String, + rule_name: String, + link: String, +} + +impl Ord for SourceSet { + fn cmp(&self, other: &Self) -> Ordering { + self.source_rule_name.cmp(&other.source_rule_name) + } +} + +impl PartialOrd for SourceSet { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +pub fn generate_rule_sources(docs_dir: &Path) -> anyhow::Result<()> { + let rule_sources_file = docs_dir.join("rule_sources.md"); + + let mut visitor = crate::utils::LintRulesVisitor::default(); + pglt_analyser::visit_registry(&mut visitor); + + let crate::utils::LintRulesVisitor { groups } = visitor; + + let mut buffer = Vec::new(); + + let rules = groups + .into_iter() + .flat_map(|(_, rule)| rule) + .collect::>(); + + let mut rules_by_source = BTreeMap::>::new(); + let mut exclusive_rules = BTreeSet::<(String, String)>::new(); + + for (rule_name, metadata) in rules { + if metadata.version == "next" { + continue; + } + let kebab_rule_name = Case::Kebab.convert(rule_name); + if metadata.sources.is_empty() { + exclusive_rules.insert((rule_name.to_string(), format!("./rules/{kebab_rule_name}"))); + } else { + for source in metadata.sources { + let source_set = SourceSet { + rule_name: rule_name.to_string(), + link: format!("./rules/{kebab_rule_name}"), + source_link: source.to_rule_url(), + source_rule_name: source.as_rule_name().to_string(), + }; + + if let Some(set) = rules_by_source.get_mut(&format!("{source}")) { + set.insert(source_set); + } else { + let mut set = BTreeSet::new(); + set.insert(source_set); + rules_by_source.insert(format!("{source}"), set); + } + } + } + } + + writeln!(buffer, "## Exclusive rules",)?; + for (rule, link) in exclusive_rules { + writeln!(buffer, "- [{rule}]({link}) ")?; + } + + writeln!(buffer, "## Rules from other sources",)?; + + for (source, rules) in rules_by_source { + writeln!(buffer, "### {source}")?; + writeln!(buffer, r#"| {source} Rule Name | Rule Name |"#)?; + writeln!(buffer, r#"| ---- | ---- |"#)?; + + push_to_table(rules, &mut buffer)?; + } + + let new_content = String::from_utf8(buffer)?; + + fs::write(rule_sources_file, new_content)?; + + Ok(()) +} + +fn push_to_table(source_set: BTreeSet, buffer: &mut Vec) -> Result<()> { + for source_set in source_set { + write!( + buffer, + "| [{}]({}) |[{}]({})", + source_set.source_rule_name, + source_set.source_link, + source_set.rule_name, + source_set.link + )?; + + writeln!(buffer, " |")?; + } + + Ok(()) +} diff --git a/docs/codegen/src/utils.rs b/docs/codegen/src/utils.rs index 4c179f7c0..5646468eb 100644 --- a/docs/codegen/src/utils.rs +++ b/docs/codegen/src/utils.rs @@ -1,4 +1,6 @@ +use pglt_analyse::{GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleGroup, RuleMetadata}; use regex::Regex; +use std::collections::BTreeMap; pub(crate) fn replace_section( content: &str, @@ -13,3 +15,41 @@ pub(crate) fn replace_section( re.replace_all(content, format!("${{1}}{}${{2}}", replacement)) .to_string() } + +#[derive(Default)] +pub(crate) struct LintRulesVisitor { + /// This is mapped to: + /// - group (correctness) -> list of rules + /// - list or rules is mapped to + /// - rule name -> metadata + pub(crate) groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, +} + +impl LintRulesVisitor { + fn push_rule(&mut self) + where + R: Rule + 'static, + { + let group = self + .groups + .entry(::NAME) + .or_default(); + + group.insert(R::METADATA.name, R::METADATA); + } +} + +impl RegistryVisitor for LintRulesVisitor { + fn record_category(&mut self) { + if matches!(C::CATEGORY, RuleCategory::Lint) { + C::record_groups(self); + } + } + + fn record_rule(&mut self) + where + R: Rule + 'static, + { + self.push_rule::() + } +} diff --git a/docs/env_variables.md b/docs/env_variables.md index 8b80b862c..e8f2d493c 100644 --- a/docs/env_variables.md +++ b/docs/env_variables.md @@ -3,15 +3,15 @@ [//]: # (BEGIN ENV_VARS) -### `PGLSP_LOG_PATH` +### `PGLT_LOG_PATH` The directory where the Daemon logs will be saved. -### `PGLSP_LOG_PREFIX_NAME` +### `PGLT_LOG_PREFIX_NAME` A prefix that's added to the name of the log. Default: `server.log.` -### `PGLSP_CONFIG_PATH` +### `PGLT_CONFIG_PATH` A path to the configuration file diff --git a/docs/rule_sources.md b/docs/rule_sources.md new file mode 100644 index 000000000..87aed41c0 --- /dev/null +++ b/docs/rule_sources.md @@ -0,0 +1,2 @@ +## Exclusive rules +## Rules from other sources diff --git a/docs/rules.md b/docs/rules.md new file mode 100644 index 000000000..cf933b4ba --- /dev/null +++ b/docs/rules.md @@ -0,0 +1,19 @@ +# Rules + +Below the list of rules supported by Postgres Language Tools, divided by group. Here's a legend of the emojis: + +- The icon indicates that the rule is part of the recommended rules. + +[//]: # (BEGIN RULES_INDEX) + +## Safety + +Rules that detect potential safety issues in your code. +| Rule name | Description | Properties | +| --- | --- | --- | +| [banDropColumn](./rules/ban-drop-column) | Dropping a column may break existing clients. | | +| [banDropNotNull](./rules/ban-drop-not-null) | Dropping a NOT NULL constraint may break existing clients. | | + +[//]: # (END RULES_INDEX) + + diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md new file mode 100644 index 000000000..c8bbead99 --- /dev/null +++ b/docs/rules/ban-drop-column.md @@ -0,0 +1,32 @@ +# banDropColumn +**Diagnostic Category: `lint/safety/banDropColumn`** +**Since**: `vnext` +:::note +- This rule is recommended. A diagnostic error will appear when linting your code. +::: +Sources: +- Inspired from: squawk/ban-drop-column + +## Description +Dropping a column may break existing clients. + +Update your application code to no longer read or write the column. + +You can leave the column as nullable or delete the column once queries no longer select or modify the column. + +## Examples + +### Invalid + +```sql +alter table test drop column id; +``` + +
code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━

Dropping a column may break existing clients.

You can leave the column as nullable or delete the column once queries no longer select or modify the column.

+ +## How to configure +```toml title="pglt.toml" +[linter.rules.safety] +banDropColumn = "error" + +``` diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md new file mode 100644 index 000000000..be2b808c0 --- /dev/null +++ b/docs/rules/ban-drop-not-null.md @@ -0,0 +1,32 @@ +# banDropNotNull +**Diagnostic Category: `lint/safety/banDropNotNull`** +**Since**: `vnext` +:::note +- This rule is recommended. A diagnostic error will appear when linting your code. +::: +Sources: +- Inspired from: squawk/ban-drop-not-null + +## Description +Dropping a NOT NULL constraint may break existing clients. + +Application code or code written in procedural languages like PL/SQL or PL/pgSQL may not expect NULL values for the column that was previously guaranteed to be NOT NULL and therefore may fail to process them correctly. + +You can consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. + +## Examples + +### Invalid + +```sql +alter table users alter column email drop not null; +``` + +
code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━

Dropping a NOT NULL constraint may break existing clients.

Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values.

+ +## How to configure +```toml title="pglt.toml" +[linter.rules.safety] +banDropNotNull = "error" + +``` diff --git a/xtask/codegen/src/generate_configuration.rs b/xtask/codegen/src/generate_configuration.rs index 398d461ae..cecf1a045 100644 --- a/xtask/codegen/src/generate_configuration.rs +++ b/xtask/codegen/src/generate_configuration.rs @@ -299,7 +299,7 @@ fn generate_for_groups( #[cfg_attr(feature = "schema", derive(JsonSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct Rules { - /// It enables the lint rules recommended by PGLSP. `true` by default. + /// It enables the lint rules recommended by PgLT. `true` by default. #[serde(skip_serializing_if = "Option::is_none")] pub recommended: Option, From b7a9009e1a483f26a0cba524d1cd23c505016d6c Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 16 Feb 2025 17:34:36 +0100 Subject: [PATCH 05/14] fix: docs --- docs/codegen/src/rules_docs.rs | 14 +++++++------- docs/rules/ban-drop-column.md | 7 ++++--- docs/rules/ban-drop-not-null.md | 7 ++++--- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/docs/codegen/src/rules_docs.rs b/docs/codegen/src/rules_docs.rs index 353021a44..ea97aff41 100644 --- a/docs/codegen/src/rules_docs.rs +++ b/docs/codegen/src/rules_docs.rs @@ -17,7 +17,6 @@ use std::{ str::{self, FromStr}, }; -/// /// Generates the documentation page for each lint rule. /// /// * `docs_dir`: Path to the docs directory. @@ -64,23 +63,24 @@ fn generate_rule_doc( // add deprecation notice if let Some(reason) = &meta.deprecated { - writeln!(content, ":::caution[Deprecated]")?; - writeln!(content, "This rule is deprecated and will be removed in the next major release.\n**Reason**: {reason}")?; - writeln!(content, ":::")?; + writeln!(content, "> [!WARNING]")?; + writeln!(content, "> This rule is deprecated and will be removed in the next major release.\n**Reason**: {reason}")?; } + writeln!(content)?; writeln!(content, "**Since**: `v{}`", meta.version)?; // add recommended notice if is_recommended { - writeln!(content, ":::note")?; + writeln!(content, "> [!NOTE]")?; writeln!( content, - "- This rule is recommended. A diagnostic error will appear when linting your code." + "> - This rule is recommended. A diagnostic error will appear when linting your code." )?; - writeln!(content, ":::")?; } + writeln!(content)?; + // add source information if !meta.sources.is_empty() { writeln!(content, "Sources: ")?; diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md index c8bbead99..b93a4acfc 100644 --- a/docs/rules/ban-drop-column.md +++ b/docs/rules/ban-drop-column.md @@ -1,9 +1,10 @@ # banDropColumn **Diagnostic Category: `lint/safety/banDropColumn`** + **Since**: `vnext` -:::note -- This rule is recommended. A diagnostic error will appear when linting your code. -::: +> [!NOTE] +> - This rule is recommended. A diagnostic error will appear when linting your code. + Sources: - Inspired from: squawk/ban-drop-column diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md index be2b808c0..be2c7c2e2 100644 --- a/docs/rules/ban-drop-not-null.md +++ b/docs/rules/ban-drop-not-null.md @@ -1,9 +1,10 @@ # banDropNotNull **Diagnostic Category: `lint/safety/banDropNotNull`** + **Since**: `vnext` -:::note -- This rule is recommended. A diagnostic error will appear when linting your code. -::: +> [!NOTE] +> - This rule is recommended. A diagnostic error will appear when linting your code. + Sources: - Inspired from: squawk/ban-drop-not-null From 5fcd0526b00509fa041771b12994cf96f18f0bf9 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 16 Feb 2025 17:43:47 +0100 Subject: [PATCH 06/14] fix: diagnostics in docs --- docs/codegen/src/rules_docs.rs | 43 +++++++++++++++++++++++---------- docs/rules/ban-drop-column.md | 12 +++++++-- docs/rules/ban-drop-not-null.md | 12 +++++++-- 3 files changed, 50 insertions(+), 17 deletions(-) diff --git a/docs/codegen/src/rules_docs.rs b/docs/codegen/src/rules_docs.rs index ea97aff41..0a16edca3 100644 --- a/docs/codegen/src/rules_docs.rs +++ b/docs/codegen/src/rules_docs.rs @@ -2,8 +2,8 @@ use anyhow::{bail, Result}; use biome_string_case::Case; use pglt_analyse::{AnalyserOptions, AnalysisFilter, RuleFilter, RuleMetadata}; use pglt_analyser::{Analyser, AnalyserConfig}; -use pglt_console::fmt::{Formatter, HTML}; -use pglt_console::markup; +use pglt_console::fmt::{Display, Formatter, Termcolor}; +use pglt_diagnostics::termcolor::NoColor; use pglt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; use pglt_query_ext::diagnostics::SyntaxDiagnostic; use pglt_workspace::settings::Settings; @@ -17,6 +17,28 @@ use std::{ str::{self, FromStr}, }; +/// TODO: get this from jules pr +/// Adapter type providing a std::fmt::Display implementation for any type that +/// implements pglt_console::fmt::Display. +pub struct StdDisplay(pub T); + +impl std::fmt::Display for StdDisplay +where + T: Display, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut buffer: Vec = Vec::new(); + let mut termcolor = Termcolor(NoColor::new(&mut buffer)); + let mut formatter = Formatter::new(&mut termcolor); + + self.0.fmt(&mut formatter).map_err(|_| std::fmt::Error)?; + + let content = String::from_utf8(buffer).map_err(|_| std::fmt::Error)?; + + f.write_str(content.as_str()) + } +} + /// Generates the documentation page for each lint rule. /// /// * `docs_dir`: Path to the docs directory. @@ -75,7 +97,7 @@ fn generate_rule_doc( writeln!(content, "> [!NOTE]")?; writeln!( content, - "> - This rule is recommended. A diagnostic error will appear when linting your code." + "> This rule is recommended. A diagnostic error will appear when linting your code." )?; } @@ -165,16 +187,13 @@ fn write_documentation( if let Some((test, block)) = language.take() { if test.expect_diagnostic { - write!( - content, - "
"
-                        )?;
+                        writeln!(content, "```sh")?;
                     }
 
                     print_diagnostics(group, rule, &test, &block, content)?;
 
                     if test.expect_diagnostic {
-                        writeln!(content, "
")?; + writeln!(content, "```")?; writeln!(content)?; } } @@ -411,12 +430,10 @@ fn print_diagnostics( ) -> Result<()> { let file_path = format!("code-block.{}", test.tag); - let mut write = HTML::new(content).with_mdx(); - let mut write_diagnostic = |_: &str, diag: pglt_diagnostics::Error| -> Result<()> { - Formatter::new(&mut write).write_markup(markup! { - {PrintDiagnostic::verbose(&diag)} - })?; + let printer = PrintDiagnostic::simple(&diag); + writeln!(content, "{}", StdDisplay(printer)).unwrap(); + Ok(()) }; if test.ignore { diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md index b93a4acfc..8909fdc11 100644 --- a/docs/rules/ban-drop-column.md +++ b/docs/rules/ban-drop-column.md @@ -3,7 +3,7 @@ **Since**: `vnext` > [!NOTE] -> - This rule is recommended. A diagnostic error will appear when linting your code. +> This rule is recommended. A diagnostic error will appear when linting your code. Sources: - Inspired from: squawk/ban-drop-column @@ -23,7 +23,15 @@ You can leave the column as nullable or delete the column once queries no longer alter table test drop column id; ``` -
code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━

Dropping a column may break existing clients.

You can leave the column as nullable or delete the column once queries no longer select or modify the column.

+```sh +code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + × Dropping a column may break existing clients. + + i You can leave the column as nullable or delete the column once queries no longer select or modify the column. + + +``` ## How to configure ```toml title="pglt.toml" diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md index be2c7c2e2..28f61dce0 100644 --- a/docs/rules/ban-drop-not-null.md +++ b/docs/rules/ban-drop-not-null.md @@ -3,7 +3,7 @@ **Since**: `vnext` > [!NOTE] -> - This rule is recommended. A diagnostic error will appear when linting your code. +> This rule is recommended. A diagnostic error will appear when linting your code. Sources: - Inspired from: squawk/ban-drop-not-null @@ -23,7 +23,15 @@ You can consider using a marker value that represents NULL. Alternatively, creat alter table users alter column email drop not null; ``` -
code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━

Dropping a NOT NULL constraint may break existing clients.

Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values.

+```sh +code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + × Dropping a NOT NULL constraint may break existing clients. + + i Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. + + +``` ## How to configure ```toml title="pglt.toml" From 09d86edd1e75a2267426dd88e1b4983c8f188013 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 16 Feb 2025 17:46:50 +0100 Subject: [PATCH 07/14] fix: docs --- docs/codegen/src/rules_sources.rs | 3 --- docs/rule_sources.md | 5 +++++ 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/codegen/src/rules_sources.rs b/docs/codegen/src/rules_sources.rs index 1473809db..00bdfd361 100644 --- a/docs/codegen/src/rules_sources.rs +++ b/docs/codegen/src/rules_sources.rs @@ -46,9 +46,6 @@ pub fn generate_rule_sources(docs_dir: &Path) -> anyhow::Result<()> { let mut exclusive_rules = BTreeSet::<(String, String)>::new(); for (rule_name, metadata) in rules { - if metadata.version == "next" { - continue; - } let kebab_rule_name = Case::Kebab.convert(rule_name); if metadata.sources.is_empty() { exclusive_rules.insert((rule_name.to_string(), format!("./rules/{kebab_rule_name}"))); diff --git a/docs/rule_sources.md b/docs/rule_sources.md index 87aed41c0..5036ae51f 100644 --- a/docs/rule_sources.md +++ b/docs/rule_sources.md @@ -1,2 +1,7 @@ ## Exclusive rules ## Rules from other sources +### Squawk +| Squawk Rule Name | Rule Name | +| ---- | ---- | +| [ban-drop-column](https://squawkhq.com/docs/ban-drop-column) |[banDropColumn](./rules/ban-drop-column) | +| [ban-drop-not-null](https://squawkhq.com/docs/ban-drop-not-null) |[banDropNotNull](./rules/ban-drop-not-null) | From 3cd60fa42d978edc4b60c7485ddc08c9cab86681 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 16 Feb 2025 17:47:58 +0100 Subject: [PATCH 08/14] fix: docs --- docs/codegen/src/rules_docs.rs | 3 ++- docs/rules/ban-drop-column.md | 3 ++- docs/rules/ban-drop-not-null.md | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/codegen/src/rules_docs.rs b/docs/codegen/src/rules_docs.rs index 0a16edca3..21fc518fa 100644 --- a/docs/codegen/src/rules_docs.rs +++ b/docs/codegen/src/rules_docs.rs @@ -91,6 +91,7 @@ fn generate_rule_doc( writeln!(content)?; writeln!(content, "**Since**: `v{}`", meta.version)?; + writeln!(content)?; // add recommended notice if is_recommended { @@ -105,7 +106,7 @@ fn generate_rule_doc( // add source information if !meta.sources.is_empty() { - writeln!(content, "Sources: ")?; + writeln!(content, "**Sources**: ")?; for source in meta.sources { let rule_name = source.to_namespaced_rule_name(); diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md index 8909fdc11..75cabd8bd 100644 --- a/docs/rules/ban-drop-column.md +++ b/docs/rules/ban-drop-column.md @@ -2,10 +2,11 @@ **Diagnostic Category: `lint/safety/banDropColumn`** **Since**: `vnext` + > [!NOTE] > This rule is recommended. A diagnostic error will appear when linting your code. -Sources: +**Sources**: - Inspired from: squawk/ban-drop-column ## Description diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md index 28f61dce0..3722534cb 100644 --- a/docs/rules/ban-drop-not-null.md +++ b/docs/rules/ban-drop-not-null.md @@ -2,10 +2,11 @@ **Diagnostic Category: `lint/safety/banDropNotNull`** **Since**: `vnext` + > [!NOTE] > This rule is recommended. A diagnostic error will appear when linting your code. -Sources: +**Sources**: - Inspired from: squawk/ban-drop-not-null ## Description From ae5656f44b1b7eeab8be4c3a1ad5d71235a03447 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Mon, 17 Feb 2025 09:16:01 +0100 Subject: [PATCH 09/14] fix: lint --- crates/pglt_analyser/tests/rules_tests.rs | 12 ++++-------- crates/pglt_test_macros/src/lib.rs | 2 +- xtask/codegen/src/generate_new_analyser_rule.rs | 2 +- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/crates/pglt_analyser/tests/rules_tests.rs b/crates/pglt_analyser/tests/rules_tests.rs index a18dd0573..1c7b31bdb 100644 --- a/crates/pglt_analyser/tests/rules_tests.rs +++ b/crates/pglt_analyser/tests/rules_tests.rs @@ -23,7 +23,7 @@ fn rule_test(full_path: &'static str, _: &str, _: &str) { }; let query = - read_to_string(full_path).expect(format!("Failed to read file: {} ", full_path).as_str()); + read_to_string(full_path).unwrap_or_else(|_| panic!("Failed to read file: {} ", full_path)); let ast = pglt_query_ext::parse(&query).expect("failed to parse SQL"); let options = AnalyserOptions::default(); @@ -51,7 +51,6 @@ fn rule_test(full_path: &'static str, _: &str, _: &str) { fn parse_test_path(path: &Path) -> (String, String, String) { let mut comps: Vec<&str> = path .components() - .into_iter() .map(|c| c.as_os_str().to_str().unwrap()) .collect(); @@ -97,13 +96,10 @@ impl Expectation { } fn assert(&self, diagnostics: &[RuleDiagnostic]) { - match self { - Self::NoDiagnostics => { - if !diagnostics.is_empty() { - panic!("This test should not have any diagnostics."); - } + if let Self::NoDiagnostics = self { + if !diagnostics.is_empty() { + panic!("This test should not have any diagnostics."); } - _ => {} } } } diff --git a/crates/pglt_test_macros/src/lib.rs b/crates/pglt_test_macros/src/lib.rs index 30c0f7a76..4157467e0 100644 --- a/crates/pglt_test_macros/src/lib.rs +++ b/crates/pglt_test_macros/src/lib.rs @@ -227,7 +227,7 @@ impl TryFrom for Variables { Ok(Variables { test_name: test_name.into(), test_fullpath: test_fullpath.into(), - test_expected_fullpath: test_expected_fullpath.into(), + test_expected_fullpath, test_dir: test_dir.into(), }) } diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index e5e6b0eff..d4782d15c 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -73,7 +73,7 @@ impl Rule for {rule_name_upper_camel} {{ ) } -static EXAMPLE_SQL: &'static str = r#" +static EXAMPLE_SQL: &str = r#" -- select 1; "#; From 143d5133b97378d302f4184ff89131286cb3903d Mon Sep 17 00:00:00 2001 From: psteinroe Date: Mon, 17 Feb 2025 09:17:41 +0100 Subject: [PATCH 10/14] feat: integrate docs codegen into ci --- .github/workflows/pull_request.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index e816e584d..3d6f94023 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -174,6 +174,8 @@ jobs: run: cargo run -p xtask_codegen -- analyser - name: Run the configuration codegen run: cargo run -p xtask_codegen -- configuration + - name: Run the docs codegen + run: cargo run -p docs_codegen - name: Check for git diff run: | if [[ $(git status --porcelain) ]]; then From c6c58bbdb83dbf3cf78025552e11855a3da7c00b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20Steinr=C3=B6tter?= Date: Tue, 18 Feb 2025 09:59:50 +0100 Subject: [PATCH 11/14] Update docs/codegen/src/utils.rs Co-authored-by: Julian Domke <68325451+juleswritescode@users.noreply.github.com> --- docs/codegen/src/utils.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/codegen/src/utils.rs b/docs/codegen/src/utils.rs index 5646468eb..7770a5085 100644 --- a/docs/codegen/src/utils.rs +++ b/docs/codegen/src/utils.rs @@ -19,9 +19,9 @@ pub(crate) fn replace_section( #[derive(Default)] pub(crate) struct LintRulesVisitor { /// This is mapped to: - /// - group (correctness) -> list of rules - /// - list or rules is mapped to - /// - rule name -> metadata + /// group (e.g. "safety") -> + /// where is: + /// -> metadata pub(crate) groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, } From c4bb45f2db1168d97ce74a6b5e725aeba16ce522 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Tue, 18 Feb 2025 10:04:33 +0100 Subject: [PATCH 12/14] chore: merge main --- crates/pglt_analyse/src/rule.rs | 5 ++ crates/pglt_analyser/tests/rules_tests.rs | 35 ++++++++-- .../specs/safety/banDropColumn/basic.sql | 1 + .../specs/safety/banDropColumn/basic.sql.snap | 1 + .../specs/safety/banDropNotNull/basic.sql | 1 + .../safety/banDropNotNull/basic.sql.snap | 1 + crates/pglt_test_macros/README.md | 67 +++++++++++++++++++ crates/pglt_test_macros/src/lib.rs | 38 +++++++---- .../codegen/src/generate_new_analyser_rule.rs | 15 +++-- 9 files changed, 141 insertions(+), 23 deletions(-) create mode 100644 crates/pglt_test_macros/README.md diff --git a/crates/pglt_analyse/src/rule.rs b/crates/pglt_analyse/src/rule.rs index 59c892db4..217d9baf0 100644 --- a/crates/pglt_analyse/src/rule.rs +++ b/crates/pglt_analyse/src/rule.rs @@ -263,6 +263,11 @@ impl RuleDiagnostic { pub fn advices(&self) -> &RuleAdvice { &self.rule_advice } + + /// Will return the rule's category name as defined via `define_categories! { .. }`. + pub fn get_category_name(&self) -> &'static str { + self.category.name() + } } #[derive(Debug, Clone, Eq)] diff --git a/crates/pglt_analyser/tests/rules_tests.rs b/crates/pglt_analyser/tests/rules_tests.rs index 1c7b31bdb..62aa3ae29 100644 --- a/crates/pglt_analyser/tests/rules_tests.rs +++ b/crates/pglt_analyser/tests/rules_tests.rs @@ -82,24 +82,51 @@ fn write_snapshot(snapshot: &mut String, query: &str, diagnostics: &[RuleDiagnos enum Expectation { NoDiagnostics, AnyDiagnostics, + OnlyOne(String), } impl Expectation { fn from_file(content: &str) -> Self { for line in content.lines() { - if line.contains("expect-no-diagnostics") { + if line.contains("expect_no_diagnostics") { return Self::NoDiagnostics; } + + if line.contains("expect_only_") { + let kind = line + .splitn(3, "_") + .last() + .expect("Use pattern: `-- expect_only_`") + .trim(); + + return Self::OnlyOne(kind.into()); + } } Self::AnyDiagnostics } fn assert(&self, diagnostics: &[RuleDiagnostic]) { - if let Self::NoDiagnostics = self { - if !diagnostics.is_empty() { - panic!("This test should not have any diagnostics."); + match self { + Self::NoDiagnostics => { + if !diagnostics.is_empty() { + panic!("This test should not have any diagnostics."); + } + } + Self::OnlyOne(category) => { + let found_kinds = diagnostics + .iter() + .map(|d| d.get_category_name()) + .collect::>() + .join(", "); + + if diagnostics.len() != 1 || diagnostics[0].get_category_name() != category { + panic!( + "This test should only have one diagnostic of kind: {category}\nReceived: {found_kinds}" + ); + } } + Self::AnyDiagnostics => {} } } } diff --git a/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql b/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql index 2cb89e2a6..16d3b4769 100644 --- a/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql +++ b/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql @@ -1,2 +1,3 @@ +-- expect_only_lint/safety/banDropColumn alter table test drop column id; \ No newline at end of file diff --git a/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql.snap b/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql.snap index db2f93ae6..81905b610 100644 --- a/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql.snap +++ b/crates/pglt_analyser/tests/specs/safety/banDropColumn/basic.sql.snap @@ -4,6 +4,7 @@ expression: snapshot --- # Input ``` +-- expect_only_lint/safety/banDropColumn alter table test drop column id; ``` diff --git a/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql b/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql index 994eb07af..1e1fc8796 100644 --- a/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql +++ b/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql @@ -1,3 +1,4 @@ +-- expect_only_lint/safety/banDropNotNull alter table users alter column id drop not null; \ No newline at end of file diff --git a/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql.snap b/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql.snap index 44b3b5c8b..ec9214935 100644 --- a/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql.snap +++ b/crates/pglt_analyser/tests/specs/safety/banDropNotNull/basic.sql.snap @@ -4,6 +4,7 @@ expression: snapshot --- # Input ``` +-- expect_only_lint/safety/banDropNotNull alter table users alter column id drop not null; diff --git a/crates/pglt_test_macros/README.md b/crates/pglt_test_macros/README.md new file mode 100644 index 000000000..aa05cd821 --- /dev/null +++ b/crates/pglt_test_macros/README.md @@ -0,0 +1,67 @@ +# Tests macros + +Macros to help auto-generate tests based on files. + +## Usage + +Pass a glob pattern that'll identify your files and a test-function that'll run for each file. The glob pattern has to start at the root of your crate. + +You can add a `.expected.` file next to your test file. Its path will be passed to your test function so you can make outcome-based assertions. (Alternatively, write snapshot tests.) + +Given the following file structure: + +```txt +crate/ +|-- src/ +|-- tests/ + |-- queries/ + |-- test.sql + |-- test.expected.sql + |-- querytest.rs +``` + +You can generate tests like so: + +```rust + // crate/tests/querytest.rs + + tests_macros::gen_tests!{ + "tests/queries/*.sql", + crate::run_test // use `crate::` if the linter complains. + } + + fn run_test( + test_path: &str, // absolute path on the machine + expected_path: &str, // absolute path of .expected file + test_dir: &str // absolute path of the test file's parent + ) { + // your logic + } +``` + +Given a `crate/tests/queries/some_test_abc.sql` file, this will generate the following: + +```rust +#[test] +pub fn some_test_abc() +{ + let test_file = "/tests/queries/some_test_abc.sql"; + let test_expected_file = "/tests/queries/some_test_abc.expected.sql"; + let parent = "/tests/queries"; + run_test(test_file, test_expected_file, parent); +} +``` + +This will be replicated for each file matched by the glob pattern. + +## Pitfalls + +- If you use a Rust-keyword as a file name, this'll result in invalid syntax for the generated tests. +- You might get linting errors if your test files aren't snake case. +- All files of the glob-pattern must (currently) be `.sql` files. +- The `.expected.sql` file-name will always be passed, even if the file doesn't exist. +- The macro will wrap your tests in a `mod tests { .. }` module. If you need multiple generations, wrap them in modules like so: `mod some_test { tests_macros::gen_tests! { .. } }`. + +## How to run + +Simply run your `cargo test` commands as usual. diff --git a/crates/pglt_test_macros/src/lib.rs b/crates/pglt_test_macros/src/lib.rs index 4157467e0..e9c5c296f 100644 --- a/crates/pglt_test_macros/src/lib.rs +++ b/crates/pglt_test_macros/src/lib.rs @@ -197,38 +197,50 @@ struct Variables { impl TryFrom for Variables { type Error = &'static str; - fn try_from(path: PathBuf) -> Result { - let test_name = path + fn try_from(mut path: PathBuf) -> Result { + let test_name: String = path .file_stem() .ok_or("Cannot get file stem.")? .to_str() - .ok_or("Cannot convert file stem to string.")?; + .ok_or("Cannot convert file stem to string.")? + .into(); - let ext = path + let ext: String = path .extension() .ok_or("Cannot get extension.")? .to_str() - .ok_or("Cannot convert extension to string.")?; + .ok_or("Cannot convert extension to string.")? + .into(); assert_eq!(ext, "sql", "Expected .sql extension but received: {}", ext); - let test_dir = path + let test_dir: String = path .parent() .ok_or("Cannot get parent directory.")? .to_str() - .ok_or("Cannot convert parent directory to string.")?; + .ok_or("Cannot convert parent directory to string.")? + .into(); - let test_fullpath = path.to_str().ok_or("Cannot convert path to string.")?; + let test_fullpath: String = path + .as_os_str() + .to_str() + .ok_or("Cannot convert file stem to string.")? + .into(); + + path.set_extension(OsStr::new("")); - let mut without_ext = test_fullpath.to_string(); - without_ext.pop(); + let without_ext: String = path + .as_os_str() + .to_str() + .ok_or("Cannot convert file stem to string.")? + .into(); let test_expected_fullpath = format!("{}.expected.{}", without_ext, ext); Ok(Variables { - test_name: test_name.into(), - test_fullpath: test_fullpath.into(), + test_name, + test_fullpath, test_expected_fullpath, - test_dir: test_dir.into(), + test_dir, }) } } diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index d4782d15c..f5618114e 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -73,9 +73,9 @@ impl Rule for {rule_name_upper_camel} {{ ) } -static EXAMPLE_SQL: &str = r#" - -- select 1; -"#; +fn gen_sql(category_name: &str) -> String { + format!("-- expect_only_{category_name}\n-- select 1;").into() +} pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) { let rule_name_camel = Case::Camel.convert(rule_name); @@ -143,7 +143,10 @@ pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &s std::fs::create_dir(test_folder.clone()).expect("To create the test rule folder"); } - let test_file_name = format!("{}/query.sql", test_folder.display()); - std::fs::write(test_file_name.clone(), EXAMPLE_SQL) - .unwrap_or_else(|_| panic!("To write {}", &test_file_name)); + let test_file_name = format!("{}/basic.sql", test_folder.display()); + std::fs::write( + test_file_name.clone(), + gen_sql(format!("lint/{group}/{rule_name_camel}").as_str()), + ) + .unwrap_or_else(|_| panic!("To write {}", &test_file_name)); } From 6591dbb55a372f76097462f4cf1369a958590a71 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Tue, 18 Feb 2025 10:05:35 +0100 Subject: [PATCH 13/14] chore: lint fix --- xtask/codegen/src/generate_new_analyser_rule.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index f5618114e..e847fd550 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -74,7 +74,7 @@ impl Rule for {rule_name_upper_camel} {{ } fn gen_sql(category_name: &str) -> String { - format!("-- expect_only_{category_name}\n-- select 1;").into() + format!("-- expect_only_{category_name}\n-- select 1;") } pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) { From 14766884be5fce4a90c8c96f33ae6a11480f0b69 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Tue, 18 Feb 2025 10:10:10 +0100 Subject: [PATCH 14/14] chore: lint fix --- xtask/codegen/src/generate_new_analyser_rule.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index f5618114e..e847fd550 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -74,7 +74,7 @@ impl Rule for {rule_name_upper_camel} {{ } fn gen_sql(category_name: &str) -> String { - format!("-- expect_only_{category_name}\n-- select 1;").into() + format!("-- expect_only_{category_name}\n-- select 1;") } pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) {