diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index e816e584d..3d6f94023 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -174,6 +174,8 @@ jobs: run: cargo run -p xtask_codegen -- analyser - name: Run the configuration codegen run: cargo run -p xtask_codegen -- configuration + - name: Run the docs codegen + run: cargo run -p docs_codegen - name: Check for git diff run: | if [[ $(git status --porcelain) ]]; then diff --git a/Cargo.lock b/Cargo.lock index e7026ffa6..10a46ca40 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -975,6 +975,28 @@ dependencies = [ "syn 2.0.90", ] +[[package]] +name = "docs_codegen" +version = "0.0.0" +dependencies = [ + "anyhow", + "biome_string_case", + "bpaf", + "pglt_analyse", + "pglt_analyser", + "pglt_cli", + "pglt_configuration", + "pglt_console", + "pglt_diagnostics", + "pglt_flags", + "pglt_query_ext", + "pglt_statement_splitter", + "pglt_workspace", + "pulldown-cmark", + "regex", + "toml", +] + [[package]] name = "dotenv" version = "0.15.0" diff --git a/Cargo.toml b/Cargo.toml index 4fcea3115..b2424f097 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["crates/*", "lib/*", "xtask/codegen", "xtask/rules_check"] +members = ["crates/*", "lib/*", "xtask/codegen", "xtask/rules_check", "docs/codegen"] resolver = "2" [workspace.package] @@ -28,6 +28,7 @@ pg_query = "6.0.0" proc-macro2 = "1.0.66" quote = "1.0.33" rayon = "1.10.0" +regex = "1.11.1" rustc-hash = "2.0.0" schemars = { version = "0.8.21", features = ["indexmap2", "smallvec"] } serde = "1.0.195" @@ -80,6 +81,8 @@ pglt_workspace = { path = "./crates/pglt_workspace", version = "0.0 pglt_test_macros = { path = "./crates/pglt_test_macros" } pglt_test_utils = { path = "./crates/pglt_test_utils" } -# parser = { path = "./crates/parser", version = "0.0.0" } -# sql_parser = { path = "./crates/sql_parser", version = "0.0.0" } -# sql_parser_codegen = { path = "./crates/sql_parser_codegen", version = "0.0.0" } + +docs_codegen = { path = "./docs/codegen", version = "0.0.0" } + +[profile.dev.package] +insta.opt-level = 3 diff --git a/crates/pglt_analyser/tests/rules_tests.rs b/crates/pglt_analyser/tests/rules_tests.rs index 0b68c7532..62aa3ae29 100644 --- a/crates/pglt_analyser/tests/rules_tests.rs +++ b/crates/pglt_analyser/tests/rules_tests.rs @@ -23,7 +23,7 @@ fn rule_test(full_path: &'static str, _: &str, _: &str) { }; let query = - read_to_string(full_path).expect(format!("Failed to read file: {} ", full_path).as_str()); + read_to_string(full_path).unwrap_or_else(|_| panic!("Failed to read file: {} ", full_path)); let ast = pglt_query_ext::parse(&query).expect("failed to parse SQL"); let options = AnalyserOptions::default(); @@ -51,7 +51,6 @@ fn rule_test(full_path: &'static str, _: &str, _: &str) { fn parse_test_path(path: &Path) -> (String, String, String) { let mut comps: Vec<&str> = path .components() - .into_iter() .map(|c| c.as_os_str().to_str().unwrap()) .collect(); diff --git a/crates/pglt_cli/src/cli_options.rs b/crates/pglt_cli/src/cli_options.rs index c49f6f798..d61fcd102 100644 --- a/crates/pglt_cli/src/cli_options.rs +++ b/crates/pglt_cli/src/cli_options.rs @@ -48,7 +48,7 @@ pub struct CliOptions { #[bpaf(long("no-errors-on-unmatched"), switch)] pub no_errors_on_unmatched: bool, - /// Tell PGLSP to exit with an error code if some diagnostics emit warnings. + /// Tell PgLT to exit with an error code if some diagnostics emit warnings. #[bpaf(long("error-on-warnings"), switch)] pub error_on_warnings: bool, @@ -86,7 +86,7 @@ pub struct CliOptions { fallback(Severity::default()), display_fallback )] - /// The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PGLSP to print only diagnostics that contain only errors. + /// The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PgLT to print only diagnostics that contain only errors. pub diagnostic_level: Severity, } diff --git a/crates/pglt_cli/src/commands/daemon.rs b/crates/pglt_cli/src/commands/daemon.rs index 43f96ddcf..336b11fb9 100644 --- a/crates/pglt_cli/src/commands/daemon.rs +++ b/crates/pglt_cli/src/commands/daemon.rs @@ -230,7 +230,7 @@ fn setup_tracing_subscriber(log_path: Option, log_file_name_prefix: Opt } pub fn default_pglt_log_path() -> PathBuf { - match env::var_os("PGLSP_LOG_PATH") { + match env::var_os("PGLT_LOG_PATH") { Some(directory) => PathBuf::from(directory), None => pglt_fs::ensure_cache_dir().join("pglt-logs"), } diff --git a/crates/pglt_cli/src/commands/mod.rs b/crates/pglt_cli/src/commands/mod.rs index db8f53c31..90f48321d 100644 --- a/crates/pglt_cli/src/commands/mod.rs +++ b/crates/pglt_cli/src/commands/mod.rs @@ -72,7 +72,7 @@ pub enum PgltCommand { Start { /// Allows to change the prefix applied to the file name of the logs. #[bpaf( - env("PGLSP_LOG_PREFIX_NAME"), + env("PGLT_LOG_PREFIX_NAME"), long("log-prefix-name"), argument("STRING"), hide_usage, @@ -83,7 +83,7 @@ pub enum PgltCommand { /// Allows to change the folder where logs are stored. #[bpaf( - env("PGLSP_LOG_PATH"), + env("PGLT_LOG_PATH"), long("log-path"), argument("PATH"), hide_usage, @@ -92,7 +92,7 @@ pub enum PgltCommand { log_path: PathBuf, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `pglt.toml` - #[bpaf(env("PGLSP_LOG_PREFIX_NAME"), long("config-path"), argument("PATH"))] + #[bpaf(env("PGLT_LOG_PREFIX_NAME"), long("config-path"), argument("PATH"))] config_path: Option, }, @@ -109,7 +109,7 @@ pub enum PgltCommand { LspProxy { /// Allows to change the prefix applied to the file name of the logs. #[bpaf( - env("PGLSP_LOG_PREFIX_NAME"), + env("PGLT_LOG_PREFIX_NAME"), long("log-prefix-name"), argument("STRING"), hide_usage, @@ -119,7 +119,7 @@ pub enum PgltCommand { log_prefix_name: String, /// Allows to change the folder where logs are stored. #[bpaf( - env("PGLSP_LOG_PATH"), + env("PGLT_LOG_PATH"), long("log-path"), argument("PATH"), hide_usage, @@ -128,7 +128,7 @@ pub enum PgltCommand { log_path: PathBuf, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `pglt.toml` - #[bpaf(env("PGLSP_CONFIG_PATH"), long("config-path"), argument("PATH"))] + #[bpaf(env("PGLT_CONFIG_PATH"), long("config-path"), argument("PATH"))] config_path: Option, /// Bogus argument to make the command work with vscode-languageclient #[bpaf(long("stdio"), hide, hide_usage, switch)] @@ -143,7 +143,7 @@ pub enum PgltCommand { RunServer { /// Allows to change the prefix applied to the file name of the logs. #[bpaf( - env("PGLSP_LOG_PREFIX_NAME"), + env("PGLT_LOG_PREFIX_NAME"), long("log-prefix-name"), argument("STRING"), hide_usage, @@ -153,7 +153,7 @@ pub enum PgltCommand { log_prefix_name: String, /// Allows to change the folder where logs are stored. #[bpaf( - env("PGLSP_LOG_PATH"), + env("PGLT_LOG_PATH"), long("log-path"), argument("PATH"), hide_usage, @@ -165,7 +165,7 @@ pub enum PgltCommand { stop_on_disconnect: bool, /// Allows to set a custom file path to the configuration file, /// or a custom directory path to find `pglt.toml` - #[bpaf(env("PGLSP_CONFIG_PATH"), long("config-path"), argument("PATH"))] + #[bpaf(env("PGLT_CONFIG_PATH"), long("config-path"), argument("PATH"))] config_path: Option, }, #[bpaf(command("__print_socket"), hide)] diff --git a/crates/pglt_cli/src/diagnostics.rs b/crates/pglt_cli/src/diagnostics.rs index fa9b7ed2d..07e43b7dc 100644 --- a/crates/pglt_cli/src/diagnostics.rs +++ b/crates/pglt_cli/src/diagnostics.rs @@ -15,7 +15,7 @@ fn command_name() -> String { .unwrap_or_else(|| String::from("pglt")) } -/// A diagnostic that is emitted when running PGLSP via CLI. +/// A diagnostic that is emitted when running PgLT via CLI. /// /// When displaying the diagnostic, #[derive(Debug, Diagnostic)] diff --git a/crates/pglt_cli/src/lib.rs b/crates/pglt_cli/src/lib.rs index 72585813e..d4e966ae6 100644 --- a/crates/pglt_cli/src/lib.rs +++ b/crates/pglt_cli/src/lib.rs @@ -32,7 +32,7 @@ pub use panic::setup_panic_handler; pub use reporter::{DiagnosticsPayload, Reporter, ReporterVisitor, TraversalSummary}; pub use service::{open_transport, SocketTransport}; -pub(crate) const VERSION: &str = match option_env!("PGLSP_VERSION") { +pub(crate) const VERSION: &str = match option_env!("PGLT_VERSION") { Some(version) => version, None => env!("CARGO_PKG_VERSION"), }; diff --git a/crates/pglt_configuration/src/analyser/linter/rules.rs b/crates/pglt_configuration/src/analyser/linter/rules.rs index 8e9544008..4d4b0ad18 100644 --- a/crates/pglt_configuration/src/analyser/linter/rules.rs +++ b/crates/pglt_configuration/src/analyser/linter/rules.rs @@ -46,7 +46,7 @@ impl std::str::FromStr for RuleGroup { #[cfg_attr(feature = "schema", derive(JsonSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct Rules { - #[doc = r" It enables the lint rules recommended by PGLSP. `true` by default."] + #[doc = r" It enables the lint rules recommended by PgLT. `true` by default."] #[serde(skip_serializing_if = "Option::is_none")] pub recommended: Option, #[doc = r" It enables ALL rules. The rules that belong to `nursery` won't be enabled."] diff --git a/crates/pglt_configuration/src/lib.rs b/crates/pglt_configuration/src/lib.rs index d505baf66..297feed46 100644 --- a/crates/pglt_configuration/src/lib.rs +++ b/crates/pglt_configuration/src/lib.rs @@ -34,7 +34,7 @@ use migrations::{ use serde::{Deserialize, Serialize}; use vcs::VcsClientKind; -pub const VERSION: &str = match option_env!("PGLSP_VERSION") { +pub const VERSION: &str = match option_env!("PGLT_VERSION") { Some(version) => version, None => "0.0.0", }; diff --git a/crates/pglt_flags/src/lib.rs b/crates/pglt_flags/src/lib.rs index 162db2f7d..5a8d7efbc 100644 --- a/crates/pglt_flags/src/lib.rs +++ b/crates/pglt_flags/src/lib.rs @@ -8,11 +8,11 @@ use std::sync::{LazyLock, OnceLock}; /// Returns `true` if this is an unstable build of PgLT pub fn is_unstable() -> bool { - PGLSP_VERSION.deref().is_none() + PGLT_VERSION.deref().is_none() } /// The internal version of PgLT. This is usually supplied during the CI build -pub static PGLSP_VERSION: LazyLock> = LazyLock::new(|| option_env!("PGLSP_VERSION")); +pub static PGLT_VERSION: LazyLock> = LazyLock::new(|| option_env!("PGLT_VERSION")); pub struct PgLTEnv { pub pglt_log_path: PgLTEnvVariable, @@ -20,21 +20,21 @@ pub struct PgLTEnv { pub pglt_config_path: PgLTEnvVariable, } -pub static PGLSP_ENV: OnceLock = OnceLock::new(); +pub static PGLT_ENV: OnceLock = OnceLock::new(); impl PgLTEnv { fn new() -> Self { Self { pglt_log_path: PgLTEnvVariable::new( - "PGLSP_LOG_PATH", + "PGLT_LOG_PATH", "The directory where the Daemon logs will be saved.", ), pglt_log_prefix: PgLTEnvVariable::new( - "PGLSP_LOG_PREFIX_NAME", + "PGLT_LOG_PREFIX_NAME", "A prefix that's added to the name of the log. Default: `server.log.`", ), pglt_config_path: PgLTEnvVariable::new( - "PGLSP_CONFIG_PATH", + "PGLT_CONFIG_PATH", "A path to the configuration file", ), } @@ -71,7 +71,7 @@ impl PgLTEnvVariable { } pub fn pglt_env() -> &'static PgLTEnv { - PGLSP_ENV.get_or_init(PgLTEnv::new) + PGLT_ENV.get_or_init(PgLTEnv::new) } impl Display for PgLTEnv { diff --git a/crates/pglt_fs/src/fs.rs b/crates/pglt_fs/src/fs.rs index 055d5eb5a..76f20d9aa 100644 --- a/crates/pglt_fs/src/fs.rs +++ b/crates/pglt_fs/src/fs.rs @@ -18,14 +18,14 @@ mod os; pub struct ConfigName; impl ConfigName { - const PGLSP_TOML: [&'static str; 1] = ["pglt.toml"]; + const PGLT_TOML: [&'static str; 1] = ["pglt.toml"]; pub const fn pglt_toml() -> &'static str { - Self::PGLSP_TOML[0] + Self::PGLT_TOML[0] } pub const fn file_names() -> [&'static str; 1] { - Self::PGLSP_TOML + Self::PGLT_TOML } } diff --git a/crates/pglt_fs/src/path.rs b/crates/pglt_fs/src/path.rs index b1411c53d..1194246d1 100644 --- a/crates/pglt_fs/src/path.rs +++ b/crates/pglt_fs/src/path.rs @@ -91,7 +91,7 @@ impl From for FileKinds { )] pub struct PgLTPath { path: PathBuf, - /// Determines the kind of the file inside PGLSP. Some files are considered as configuration files, others as manifest files, and others as files to handle + /// Determines the kind of the file inside PgLT. Some files are considered as configuration files, others as manifest files, and others as files to handle kind: FileKinds, /// Whether this path (usually a file) was fixed as a result of a format/lint/check command with the `--write` filag. was_written: bool, @@ -164,7 +164,7 @@ impl PgLTPath { /// Returns the contents of a file, if it exists /// /// ## Error - /// If PGLSP doesn't have permissions to read the file + /// If PgLT doesn't have permissions to read the file pub fn get_buffer_from_file(&mut self) -> String { // we assume we have permissions read_to_string(&self.path).expect("cannot read the file to format") diff --git a/crates/pglt_workspace/src/workspace/server.rs b/crates/pglt_workspace/src/workspace/server.rs index 31ad999d5..45ffb1987 100644 --- a/crates/pglt_workspace/src/workspace/server.rs +++ b/crates/pglt_workspace/src/workspace/server.rs @@ -111,7 +111,7 @@ impl WorkspaceServer { /// Check whether a file is ignored in the top-level config `files.ignore`/`files.include` fn is_ignored(&self, path: &Path) -> bool { let file_name = path.file_name().and_then(|s| s.to_str()); - // Never ignore PGLSP's config file regardless `include`/`ignore` + // Never ignore PgLT's config file regardless `include`/`ignore` (file_name != Some(ConfigName::pglt_toml())) && // Apply top-level `include`/`ignore (self.is_ignored_by_top_level_config(path) || self.is_ignored_by_migration_config(path)) @@ -130,7 +130,7 @@ impl WorkspaceServer { // `matched_path_or_any_parents` panics if `source` is not under the gitignore root. // This checks excludes absolute paths that are not a prefix of the base root. if !path.has_root() || path.starts_with(ignore.path()) { - // Because PGLSP passes a list of paths, + // Because PgLT passes a list of paths, // we use `matched_path_or_any_parents` instead of `matched`. ignore .matched_path_or_any_parents(path, path.is_dir()) diff --git a/docs/cli_reference.md b/docs/cli_reference.md new file mode 100644 index 000000000..2dfe7ee6d --- /dev/null +++ b/docs/cli_reference.md @@ -0,0 +1,289 @@ +## CLI Reference + +[//]: # (BEGIN CLI_REF) + + + +# Command summary + + * [`pglt`↴](#pglt) + * [`pglt version`↴](#pglt-version) + * [`pglt check`↴](#pglt-check) + * [`pglt start`↴](#pglt-start) + * [`pglt stop`↴](#pglt-stop) + * [`pglt init`↴](#pglt-init) + * [`pglt lsp-proxy`↴](#pglt-lsp-proxy) + * [`pglt clean`↴](#pglt-clean) + +## pglt + +PgLT official CLI. Use it to check the health of your project or run it to check single files. + +**Usage**: **`pglt`** _`COMMAND ...`_ + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information +- **`-V`**, **`--version`** — + Prints version information + + + +**Available commands:** +- **`version`** — + Shows the version information and quit. +- **`check`** — + Runs everything to the requested files. +- **`start`** — + Starts the daemon server process. +- **`stop`** — + Stops the daemon server process. +- **`init`** — + Bootstraps a new project. Creates a configuration file with some defaults. +- **`lsp-proxy`** — + Acts as a server for the Language Server Protocol over stdin/stdout. +- **`clean`** — + Cleans the logs emitted by the daemon. + + +## pglt version + +Shows the version information and quit. + +**Usage**: **`pglt`** **`version`** + +**Global options applied to all commands** +- **` --colors`**=_``_ — + Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible +- **` --use-server`** — + Connect to a running instance of the daemon server. +- **` --skip-db`** — + Skip connecting to the database and only run checks that don't require a database connection. +- **` --verbose`** — + Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified. +- **` --config-path`**=_`PATH`_ — + Set the file path to the configuration file, or the directory path to find `pglt.toml`. If used, it disables the default configuration file resolution. +- **` --max-diagnostics`**=_`>`_ — + Cap the amount of diagnostics displayed. When `none` is provided, the limit is lifted. + + [default: 20] +- **` --skip-errors`** — + Skip over files containing syntax errors instead of emitting an error diagnostic. +- **` --no-errors-on-unmatched`** — + Silence errors that would be emitted in case no files were processed during the execution of the command. +- **` --error-on-warnings`** — + Tell PgLT to exit with an error code if some diagnostics emit warnings. +- **` --reporter`**=_``_ — + Allows to change how diagnostics and summary are reported. +- **` --log-level`**=_``_ — + The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error. + + The value `none` won't show any logging. + + [default: none] +- **` --log-kind`**=_``_ — + How the log should look like. + + [default: pretty] +- **` --diagnostic-level`**=_``_ — + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PgLT to print only diagnostics that contain only errors. + + [default: info] + + + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt check + +Runs everything to the requested files. + +**Usage**: **`pglt`** **`check`** \[**`--staged`**\] \[**`--changed`**\] \[**`--since`**=_`REF`_\] \[_`PATH`_\]... + +**The configuration that is contained inside the configuration file.** +- **` --vcs-enabled`**=_``_ — + Whether we should integrate itself with the VCS client +- **` --vcs-client-kind`**=_``_ — + The kind of client. +- **` --vcs-use-ignore-file`**=_``_ — + Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file. +- **` --vcs-root`**=_`PATH`_ — + The folder where we should check for VCS files. By default, we will use the same folder where `pglt.toml` was found. + + If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted +- **` --vcs-default-branch`**=_`BRANCH`_ — + The main branch of the project +- **` --files-max-size`**=_`NUMBER`_ — + The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB +- **` --migrations-dir`**=_`ARG`_ — + The directory where the migration files are stored +- **` --after`**=_`ARG`_ — + Ignore any migrations before this timestamp +- **` --host`**=_`ARG`_ — + The host of the database. +- **` --port`**=_`ARG`_ — + The port of the database. +- **` --username`**=_`ARG`_ — + The username to connect to the database. +- **` --password`**=_`ARG`_ — + The password to connect to the database. +- **` --database`**=_`ARG`_ — + The name of the database. +- **` --conn_timeout_secs`**=_`ARG`_ — + The connection timeout in seconds. + + [default: Some(10)] + + + +**Global options applied to all commands** +- **` --colors`**=_``_ — + Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible +- **` --use-server`** — + Connect to a running instance of the daemon server. +- **` --skip-db`** — + Skip connecting to the database and only run checks that don't require a database connection. +- **` --verbose`** — + Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified. +- **` --config-path`**=_`PATH`_ — + Set the file path to the configuration file, or the directory path to find `pglt.toml`. If used, it disables the default configuration file resolution. +- **` --max-diagnostics`**=_`>`_ — + Cap the amount of diagnostics displayed. When `none` is provided, the limit is lifted. + + [default: 20] +- **` --skip-errors`** — + Skip over files containing syntax errors instead of emitting an error diagnostic. +- **` --no-errors-on-unmatched`** — + Silence errors that would be emitted in case no files were processed during the execution of the command. +- **` --error-on-warnings`** — + Tell PgLT to exit with an error code if some diagnostics emit warnings. +- **` --reporter`**=_``_ — + Allows to change how diagnostics and summary are reported. +- **` --log-level`**=_``_ — + The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error. + + The value `none` won't show any logging. + + [default: none] +- **` --log-kind`**=_``_ — + How the log should look like. + + [default: pretty] +- **` --diagnostic-level`**=_``_ — + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PgLT to print only diagnostics that contain only errors. + + [default: info] + + + +**Available positional items:** +- _`PATH`_ — + Single file, single path or list of paths + + + +**Available options:** +- **` --stdin-file-path`**=_`PATH`_ — + Use this option when you want to format code piped from `stdin`, and print the output to `stdout`. + + The file doesn't need to exist on disk, what matters is the extension of the file. Based on the extension, we know how to check the code. + + Example: `echo 'let a;' | pglt_cli check --stdin-file-path=test.sql` +- **` --staged`** — + When set to true, only the files that have been staged (the ones prepared to be committed) will be linted. This option should be used when working locally. +- **` --changed`** — + When set to true, only the files that have been changed compared to your `defaultBranch` configuration will be linted. This option should be used in CI environments. +- **` --since`**=_`REF`_ — + Use this to specify the base branch to compare against when you're using the --changed flag and the `defaultBranch` is not set in your `pglt.toml` +- **`-h`**, **`--help`** — + Prints help information + + +## pglt start + +Starts the daemon server process. + +**Usage**: **`pglt`** **`start`** \[**`--config-path`**=_`PATH`_\] + +**Available options:** +- **` --log-prefix-name`**=_`STRING`_ — + Allows to change the prefix applied to the file name of the logs. + + Uses environment variable **`PGLT_LOG_PREFIX_NAME`** + + [default: server.log] +- **` --log-path`**=_`PATH`_ — + Allows to change the folder where logs are stored. + + Uses environment variable **`PGLT_LOG_PATH`** +- **` --config-path`**=_`PATH`_ — + Allows to set a custom file path to the configuration file, or a custom directory path to find `pglt.toml` + + Uses environment variable **`PGLT_LOG_PREFIX_NAME`** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt stop + +Stops the daemon server process. + +**Usage**: **`pglt`** **`stop`** + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt init + +Bootstraps a new project. Creates a configuration file with some defaults. + +**Usage**: **`pglt`** **`init`** + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt lsp-proxy + +Acts as a server for the Language Server Protocol over stdin/stdout. + +**Usage**: **`pglt`** **`lsp-proxy`** \[**`--config-path`**=_`PATH`_\] + +**Available options:** +- **` --log-prefix-name`**=_`STRING`_ — + Allows to change the prefix applied to the file name of the logs. + + Uses environment variable **`PGLT_LOG_PREFIX_NAME`** + + [default: server.log] +- **` --log-path`**=_`PATH`_ — + Allows to change the folder where logs are stored. + + Uses environment variable **`PGLT_LOG_PATH`** +- **` --config-path`**=_`PATH`_ — + Allows to set a custom file path to the configuration file, or a custom directory path to find `pglt.toml` + + Uses environment variable **`PGLT_CONFIG_PATH`** +- **`-h`**, **`--help`** — + Prints help information + + +## pglt clean + +Cleans the logs emitted by the daemon. + +**Usage**: **`pglt`** **`clean`** + +**Available options:** +- **`-h`**, **`--help`** — + Prints help information + + + +[//]: # (END CLI_REF) diff --git a/docs/codegen/Cargo.toml b/docs/codegen/Cargo.toml new file mode 100644 index 000000000..c7898c7a2 --- /dev/null +++ b/docs/codegen/Cargo.toml @@ -0,0 +1,32 @@ + +[package] +authors.workspace = true +categories.workspace = true +description = "" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "docs_codegen" +repository.workspace = true +version = "0.0.0" + +[dependencies] +regex = { workspace = true } +toml = { workspace = true } +anyhow = { workspace = true } +bpaf = { workspace = true, features = ["docgen"] } + +pglt_configuration = { workspace = true } +pglt_flags = { workspace = true } +pglt_cli = { workspace = true } +pglt_analyse = { workspace = true } +pglt_analyser = { workspace = true } +pglt_diagnostics = { workspace = true } +pglt_query_ext = { workspace = true } +pglt_workspace = { workspace = true } +pglt_statement_splitter = { workspace = true } +pglt_console = { workspace = true } +biome_string_case = { workspace = true } +pulldown-cmark = "0.12.2" + diff --git a/docs/codegen/src/cli_doc.rs b/docs/codegen/src/cli_doc.rs new file mode 100644 index 000000000..ad299605d --- /dev/null +++ b/docs/codegen/src/cli_doc.rs @@ -0,0 +1,17 @@ +use pglt_cli::pglt_command; +use std::{fs, path::Path}; + +use crate::utils; + +pub fn generate_cli_doc(docs_dir: &Path) -> anyhow::Result<()> { + let file_path = docs_dir.join("cli_reference.md"); + + let content = fs::read_to_string(&file_path)?; + + let new_content = + utils::replace_section(&content, "CLI_REF", &pglt_command().render_markdown("pglt")); + + fs::write(file_path, &new_content)?; + + Ok(()) +} diff --git a/docs/codegen/src/default_configuration.rs b/docs/codegen/src/default_configuration.rs new file mode 100644 index 000000000..12f305b82 --- /dev/null +++ b/docs/codegen/src/default_configuration.rs @@ -0,0 +1,22 @@ +use std::{fs, path::Path}; + +use crate::utils::replace_section; + +use pglt_configuration::PartialConfiguration; + +pub fn generate_default_configuration(docs_dir: &Path) -> anyhow::Result<()> { + let index_path = docs_dir.join("index.md"); + + let printed_config = format!( + "\n```toml\n{}```\n", + toml::ser::to_string_pretty(&PartialConfiguration::init())? + ); + + let data = fs::read_to_string(&index_path)?; + + let new_data = replace_section(&data, "DEFAULT_CONFIGURATION", &printed_config); + + fs::write(&index_path, new_data)?; + + Ok(()) +} diff --git a/docs/codegen/src/env_variables.rs b/docs/codegen/src/env_variables.rs new file mode 100644 index 000000000..26839b5f7 --- /dev/null +++ b/docs/codegen/src/env_variables.rs @@ -0,0 +1,44 @@ +use anyhow::Result; +use std::fs; +use std::io::Write; +use std::path::Path; + +use crate::utils::replace_section; + +pub fn generate_env_variables(docs_dir: &Path) -> Result<()> { + let file_path = docs_dir.join("env_variables.md"); + + let mut content = vec![]; + + let env = pglt_flags::pglt_env(); + + writeln!(content, "\n",)?; + + writeln!( + content, + "### `{}`\n\n {}\n", + env.pglt_log_path.name(), + env.pglt_log_path.description() + )?; + writeln!( + content, + "### `{}`\n\n {}\n", + env.pglt_log_prefix.name(), + env.pglt_log_prefix.description() + )?; + writeln!( + content, + "### `{}`\n\n {}\n", + env.pglt_config_path.name(), + env.pglt_config_path.description() + )?; + + let data = fs::read_to_string(&file_path)?; + + let conent_str = String::from_utf8(content)?; + let new_data = replace_section(&data, "ENV_VARS", &conent_str); + + fs::write(file_path, new_data)?; + + Ok(()) +} diff --git a/docs/codegen/src/lib.rs b/docs/codegen/src/lib.rs new file mode 100644 index 000000000..6ff084603 --- /dev/null +++ b/docs/codegen/src/lib.rs @@ -0,0 +1,8 @@ +pub mod cli_doc; +pub mod default_configuration; +pub mod env_variables; +pub mod rules_docs; +pub mod rules_index; +pub mod rules_sources; + +mod utils; diff --git a/docs/codegen/src/main.rs b/docs/codegen/src/main.rs new file mode 100644 index 000000000..a03a1bf05 --- /dev/null +++ b/docs/codegen/src/main.rs @@ -0,0 +1,28 @@ +use std::env; +use std::path::{Path, PathBuf}; + +use docs_codegen::cli_doc::generate_cli_doc; +use docs_codegen::default_configuration::generate_default_configuration; +use docs_codegen::env_variables::generate_env_variables; +use docs_codegen::rules_docs::generate_rules_docs; +use docs_codegen::rules_index::generate_rules_index; +use docs_codegen::rules_sources::generate_rule_sources; + +fn docs_root() -> PathBuf { + let dir = + env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()); + Path::new(&dir).parent().unwrap().to_path_buf() +} + +fn main() -> anyhow::Result<()> { + let docs_root = docs_root(); + + generate_default_configuration(&docs_root)?; + generate_env_variables(&docs_root)?; + generate_cli_doc(&docs_root)?; + generate_rules_docs(&docs_root)?; + generate_rules_index(&docs_root)?; + generate_rule_sources(&docs_root)?; + + Ok(()) +} diff --git a/docs/codegen/src/rules_docs.rs b/docs/codegen/src/rules_docs.rs new file mode 100644 index 000000000..12160699a --- /dev/null +++ b/docs/codegen/src/rules_docs.rs @@ -0,0 +1,464 @@ +use anyhow::{bail, Result}; +use biome_string_case::Case; +use pglt_analyse::{AnalyserOptions, AnalysisFilter, RuleFilter, RuleMetadata}; +use pglt_analyser::{Analyser, AnalyserConfig}; +use pglt_console::StdDisplay; +use pglt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; +use pglt_query_ext::diagnostics::SyntaxDiagnostic; +use pglt_workspace::settings::Settings; +use pulldown_cmark::{CodeBlockKind, Event, LinkType, Parser, Tag, TagEnd}; +use std::{ + fmt::Write as _, + fs, + io::{self, Write as _}, + path::Path, + slice, + str::{self, FromStr}, +}; + +/// Generates the documentation page for each lint rule. +/// +/// * `docs_dir`: Path to the docs directory. +pub fn generate_rules_docs(docs_dir: &Path) -> anyhow::Result<()> { + let rules_dir = docs_dir.join("rules"); + + if rules_dir.exists() { + fs::remove_dir_all(&rules_dir)?; + } + fs::create_dir_all(&rules_dir)?; + + let mut visitor = crate::utils::LintRulesVisitor::default(); + pglt_analyser::visit_registry(&mut visitor); + + let crate::utils::LintRulesVisitor { groups } = visitor; + + for (group, rules) in groups { + for (rule, metadata) in rules { + let content = generate_rule_doc(group, rule, metadata)?; + let dashed_rule = Case::Kebab.convert(rule); + fs::write(rules_dir.join(format!("{}.md", dashed_rule)), content)?; + } + } + + Ok(()) +} + +fn generate_rule_doc( + group: &'static str, + rule: &'static str, + meta: RuleMetadata, +) -> Result { + let mut content = Vec::new(); + + writeln!(content, "# {rule}")?; + + writeln!( + content, + "**Diagnostic Category: `lint/{}/{}`**", + group, rule + )?; + + let is_recommended = meta.recommended; + + // add deprecation notice + if let Some(reason) = &meta.deprecated { + writeln!(content, "> [!WARNING]")?; + writeln!(content, "> This rule is deprecated and will be removed in the next major release.\n**Reason**: {reason}")?; + } + + writeln!(content)?; + writeln!(content, "**Since**: `v{}`", meta.version)?; + writeln!(content)?; + + // add recommended notice + if is_recommended { + writeln!(content, "> [!NOTE]")?; + writeln!( + content, + "> This rule is recommended. A diagnostic error will appear when linting your code." + )?; + } + + writeln!(content)?; + + // add source information + if !meta.sources.is_empty() { + writeln!(content, "**Sources**: ")?; + + for source in meta.sources { + let rule_name = source.to_namespaced_rule_name(); + let source_rule_url = source.to_rule_url(); + write!(content, "- Inspired from: ")?; + writeln!( + content, + "{rule_name}" + )?; + } + writeln!(content)?; + } + + write_documentation(group, rule, meta.docs, &mut content)?; + + write_how_to_configure(group, rule, &mut content)?; + + Ok(String::from_utf8(content)?) +} + +fn write_how_to_configure( + group: &'static str, + rule: &'static str, + content: &mut Vec, +) -> io::Result<()> { + writeln!(content, "## How to configure")?; + let toml = format!( + r#"[linter.rules.{group}] +{rule} = "error" +"# + ); + + writeln!(content, "```toml title=\"pglt.toml\"")?; + writeln!(content, "{}", toml)?; + writeln!(content, "```")?; + + Ok(()) +} + +/// Parse the documentation fragment for a lint rule (in markdown) and generates +/// the content for the corresponding documentation page +fn write_documentation( + group: &'static str, + rule: &'static str, + docs: &'static str, + content: &mut Vec, +) -> Result<()> { + writeln!(content, "## Description")?; + + let parser = Parser::new(docs); + + // Tracks the content of the current code block if it's using a + // language supported for analysis + let mut language = None; + let mut list_order = None; + let mut list_indentation = 0; + + // Tracks the type and metadata of the link + let mut start_link_tag: Option = None; + + for event in parser { + match event { + // CodeBlock-specific handling + Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(meta))) => { + // Track the content of code blocks to pass them through the analyzer + let test = CodeBlockTest::from_str(meta.as_ref())?; + + // Erase the lintdoc-specific attributes in the output by + // re-generating the language ID from the source type + write!(content, "```{}", &test.tag)?; + writeln!(content)?; + + language = Some((test, String::new())); + } + + Event::End(TagEnd::CodeBlock) => { + writeln!(content, "```")?; + writeln!(content)?; + + if let Some((test, block)) = language.take() { + if test.expect_diagnostic { + writeln!(content, "```sh")?; + } + + print_diagnostics(group, rule, &test, &block, content)?; + + if test.expect_diagnostic { + writeln!(content, "```")?; + writeln!(content)?; + } + } + } + + Event::Text(text) => { + let mut hide_line = false; + + if let Some((test, block)) = &mut language { + if let Some(inner_text) = text.strip_prefix("# ") { + // Lines prefixed with "# " are hidden from the public documentation + write!(block, "{inner_text}")?; + hide_line = true; + test.hidden_lines.push(test.line_count); + } else { + write!(block, "{text}")?; + } + test.line_count += 1; + } + + if hide_line { + // Line should not be emitted into the output + } else if matches!(text.as_ref(), "`" | "*" | "_") { + write!(content, "\\{text}")?; + } else { + write!(content, "{text}")?; + } + } + + // Other markdown events are emitted as-is + Event::Start(Tag::Heading { level, .. }) => { + write!(content, "{} ", "#".repeat(level as usize))?; + } + Event::End(TagEnd::Heading { .. }) => { + writeln!(content)?; + writeln!(content)?; + } + + Event::Start(Tag::Paragraph) => { + continue; + } + Event::End(TagEnd::Paragraph) => { + writeln!(content)?; + writeln!(content)?; + } + + Event::Code(text) => { + write!(content, "`{text}`")?; + } + Event::Start(ref link_tag @ Tag::Link { link_type, .. }) => { + start_link_tag = Some(link_tag.clone()); + match link_type { + LinkType::Autolink => { + write!(content, "<")?; + } + LinkType::Inline | LinkType::Reference | LinkType::Shortcut => { + write!(content, "[")?; + } + _ => { + panic!("unimplemented link type") + } + } + } + Event::End(TagEnd::Link) => { + if let Some(Tag::Link { + link_type, + dest_url, + title, + .. + }) = start_link_tag + { + match link_type { + LinkType::Autolink => { + write!(content, ">")?; + } + LinkType::Inline | LinkType::Reference | LinkType::Shortcut => { + write!(content, "]({dest_url}")?; + if !title.is_empty() { + write!(content, " \"{title}\"")?; + } + write!(content, ")")?; + } + _ => { + panic!("unimplemented link type") + } + } + start_link_tag = None; + } else { + panic!("missing start link tag"); + } + } + + Event::SoftBreak => { + writeln!(content)?; + } + + Event::HardBreak => { + writeln!(content, "
")?; + } + + Event::Start(Tag::List(num)) => { + list_indentation += 1; + if let Some(num) = num { + list_order = Some(num); + } + if list_indentation > 1 { + writeln!(content)?; + } + } + + Event::End(TagEnd::List(_)) => { + list_order = None; + list_indentation -= 1; + writeln!(content)?; + } + Event::Start(Tag::Item) => { + write!(content, "{}", " ".repeat(list_indentation - 1))?; + if let Some(num) = list_order { + write!(content, "{num}. ")?; + } else { + write!(content, "- ")?; + } + } + + Event::End(TagEnd::Item) => { + list_order = list_order.map(|item| item + 1); + writeln!(content)?; + } + + Event::Start(Tag::Strong) => { + write!(content, "**")?; + } + + Event::End(TagEnd::Strong) => { + write!(content, "**")?; + } + + Event::Start(Tag::Emphasis) => { + write!(content, "_")?; + } + + Event::End(TagEnd::Emphasis) => { + write!(content, "_")?; + } + + Event::Start(Tag::Strikethrough) => { + write!(content, "~")?; + } + + Event::End(TagEnd::Strikethrough) => { + write!(content, "~")?; + } + + Event::Start(Tag::BlockQuote(_)) => { + write!(content, ">")?; + } + + Event::End(TagEnd::BlockQuote(_)) => { + writeln!(content)?; + } + + _ => { + bail!("unimplemented event {event:?}") + } + } + } + + Ok(()) +} + +struct CodeBlockTest { + /// The language tag of this code block. + tag: String, + + /// True if this is an invalid example that should trigger a diagnostic. + expect_diagnostic: bool, + + /// Whether to ignore this code block. + ignore: bool, + + /// The number of lines in this code block. + line_count: u32, + + // The indices of lines that should be hidden from the public documentation. + hidden_lines: Vec, +} + +impl FromStr for CodeBlockTest { + type Err = anyhow::Error; + + fn from_str(input: &str) -> Result { + // This is based on the parsing logic for code block languages in `rustdoc`: + // https://github.com/rust-lang/rust/blob/6ac8adad1f7d733b5b97d1df4e7f96e73a46db42/src/librustdoc/html/markdown.rs#L873 + let tokens = input + .split([',', ' ', '\t']) + .map(str::trim) + .filter(|token| !token.is_empty()); + + let mut test = CodeBlockTest { + tag: String::new(), + expect_diagnostic: false, + ignore: false, + line_count: 0, + hidden_lines: vec![], + }; + + for token in tokens { + match token { + // Other attributes + "expect_diagnostic" => test.expect_diagnostic = true, + "ignore" => test.ignore = true, + // Regard as language tags, last one wins + _ => test.tag = token.to_string(), + } + } + + Ok(test) + } +} + +/// Prints diagnostics documentation from a gode block into the content buffer. +/// +/// * `group`: The group of the rule. +/// * `rule`: The rule name. +/// * `test`: The code block test. +/// * `code`: The code block content. +/// * `content`: The buffer to write the documentation to. +fn print_diagnostics( + group: &'static str, + rule: &'static str, + test: &CodeBlockTest, + code: &str, + content: &mut Vec, +) -> Result<()> { + let file_path = format!("code-block.{}", test.tag); + + let mut write_diagnostic = |_: &str, diag: pglt_diagnostics::Error| -> Result<()> { + let printer = PrintDiagnostic::simple(&diag); + writeln!(content, "{}", StdDisplay(printer)).unwrap(); + + Ok(()) + }; + if test.ignore { + return Ok(()); + } + + let rule_filter = RuleFilter::Rule(group, rule); + let filter = AnalysisFilter { + enabled_rules: Some(slice::from_ref(&rule_filter)), + ..AnalysisFilter::default() + }; + let settings = Settings::default(); + let options = AnalyserOptions::default(); + let analyser = Analyser::new(AnalyserConfig { + options: &options, + filter, + }); + + // split and parse each statement + let stmts = pglt_statement_splitter::split(code); + for stmt in stmts.ranges { + match pglt_query_ext::parse(&code[stmt]) { + Ok(ast) => { + for rule_diag in analyser.run(pglt_analyser::AnalyserContext { root: &ast }) { + let diag = pglt_diagnostics::serde::Diagnostic::new(rule_diag); + + let category = diag.category().expect("linter diagnostic has no code"); + let severity = settings.get_severity_from_rule_code(category).expect( + "If you see this error, it means you need to run cargo codegen-configuration", + ); + + let error = diag + .with_severity(severity) + .with_file_path(&file_path) + .with_file_source_code(code); + + write_diagnostic(code, error)?; + } + } + Err(e) => { + let error = SyntaxDiagnostic::from(e) + .with_file_path(&file_path) + .with_file_source_code(code); + write_diagnostic(code, error)?; + } + }; + } + + Ok(()) +} diff --git a/docs/codegen/src/rules_index.rs b/docs/codegen/src/rules_index.rs new file mode 100644 index 000000000..a00ab365c --- /dev/null +++ b/docs/codegen/src/rules_index.rs @@ -0,0 +1,124 @@ +use biome_string_case::Case; +use pglt_analyse::RuleMetadata; +use pglt_console::fmt::{Formatter, HTML}; +use pglt_console::{markup, Markup}; +use pulldown_cmark::{Event, Parser, Tag, TagEnd}; +use std::{ + collections::BTreeMap, + fs, + io::{self}, + path::Path, + str::{self}, +}; + +use crate::utils; + +/// Generates the lint rules index. +/// +/// * `docs_dir`: Path to the docs directory. +pub fn generate_rules_index(docs_dir: &Path) -> anyhow::Result<()> { + let index_file = docs_dir.join("rules.md"); + + let mut visitor = crate::utils::LintRulesVisitor::default(); + pglt_analyser::visit_registry(&mut visitor); + + let crate::utils::LintRulesVisitor { groups } = visitor; + + let mut content = Vec::new(); + + for (group, rules) in groups { + generate_group(group, rules, &mut content)?; + } + + let new_content = String::from_utf8(content)?; + + let file_content = fs::read_to_string(&index_file)?; + + let new_content = utils::replace_section(&file_content, "RULES_INDEX", &new_content); + + fs::write(index_file, new_content)?; + + Ok(()) +} + +fn generate_group( + group: &'static str, + rules: BTreeMap<&'static str, RuleMetadata>, + content: &mut dyn io::Write, +) -> io::Result<()> { + let (group_name, description) = extract_group_metadata(group); + + writeln!(content, "\n## {group_name}")?; + writeln!(content)?; + write_markup_to_string(content, description)?; + writeln!(content)?; + writeln!(content, "| Rule name | Description | Properties |")?; + writeln!(content, "| --- | --- | --- |")?; + + for (rule_name, rule_metadata) in rules { + let is_recommended = rule_metadata.recommended; + let dashed_rule = Case::Kebab.convert(rule_name); + + let mut properties = String::new(); + if is_recommended { + properties.push_str(""); + } + + let summary = generate_rule_summary(rule_metadata.docs)?; + + write!( + content, + "| [{rule_name}](./rules/{dashed_rule}) | {summary} | {properties} |" + )?; + + writeln!(content)?; + } + + Ok(()) +} + +fn extract_group_metadata(group: &str) -> (&str, Markup) { + match group { + "safety" => ( + "Safety", + markup! { + "Rules that detect potential safety issues in your code." + }, + ), + _ => panic!("Unknown group ID {group:?}"), + } +} + +fn write_markup_to_string(buffer: &mut dyn io::Write, markup: Markup) -> io::Result<()> { + let mut write = HTML::new(buffer).with_mdx(); + let mut fmt = Formatter::new(&mut write); + fmt.write_markup(markup) +} + +/// Parsed the rule documentation to extract the summary. +/// The summary is the first paragraph in the rule documentation. +fn generate_rule_summary(docs: &'static str) -> io::Result { + let parser = Parser::new(docs); + + let mut buffer = String::new(); + + for event in parser { + match event { + Event::Start(Tag::Paragraph) => { + continue; + } + Event::Text(text) => { + buffer.push_str(&text); + } + Event::Code(code) => { + buffer.push_str(format!("`{}`", code).as_str()); + } + Event::End(TagEnd::Paragraph) => { + return Ok(buffer); + } + _ => {} + } + } + + panic!("No summary found in rule documentation"); +} diff --git a/docs/codegen/src/rules_sources.rs b/docs/codegen/src/rules_sources.rs new file mode 100644 index 000000000..00bdfd361 --- /dev/null +++ b/docs/codegen/src/rules_sources.rs @@ -0,0 +1,109 @@ +use anyhow::Result; +use biome_string_case::Case; +use pglt_analyse::RuleMetadata; +use std::cmp::Ordering; +use std::collections::{BTreeMap, BTreeSet}; +use std::fs; +use std::io::Write; +use std::path::Path; + +#[derive(Debug, Eq, PartialEq)] +struct SourceSet { + source_rule_name: String, + source_link: String, + rule_name: String, + link: String, +} + +impl Ord for SourceSet { + fn cmp(&self, other: &Self) -> Ordering { + self.source_rule_name.cmp(&other.source_rule_name) + } +} + +impl PartialOrd for SourceSet { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +pub fn generate_rule_sources(docs_dir: &Path) -> anyhow::Result<()> { + let rule_sources_file = docs_dir.join("rule_sources.md"); + + let mut visitor = crate::utils::LintRulesVisitor::default(); + pglt_analyser::visit_registry(&mut visitor); + + let crate::utils::LintRulesVisitor { groups } = visitor; + + let mut buffer = Vec::new(); + + let rules = groups + .into_iter() + .flat_map(|(_, rule)| rule) + .collect::>(); + + let mut rules_by_source = BTreeMap::>::new(); + let mut exclusive_rules = BTreeSet::<(String, String)>::new(); + + for (rule_name, metadata) in rules { + let kebab_rule_name = Case::Kebab.convert(rule_name); + if metadata.sources.is_empty() { + exclusive_rules.insert((rule_name.to_string(), format!("./rules/{kebab_rule_name}"))); + } else { + for source in metadata.sources { + let source_set = SourceSet { + rule_name: rule_name.to_string(), + link: format!("./rules/{kebab_rule_name}"), + source_link: source.to_rule_url(), + source_rule_name: source.as_rule_name().to_string(), + }; + + if let Some(set) = rules_by_source.get_mut(&format!("{source}")) { + set.insert(source_set); + } else { + let mut set = BTreeSet::new(); + set.insert(source_set); + rules_by_source.insert(format!("{source}"), set); + } + } + } + } + + writeln!(buffer, "## Exclusive rules",)?; + for (rule, link) in exclusive_rules { + writeln!(buffer, "- [{rule}]({link}) ")?; + } + + writeln!(buffer, "## Rules from other sources",)?; + + for (source, rules) in rules_by_source { + writeln!(buffer, "### {source}")?; + writeln!(buffer, r#"| {source} Rule Name | Rule Name |"#)?; + writeln!(buffer, r#"| ---- | ---- |"#)?; + + push_to_table(rules, &mut buffer)?; + } + + let new_content = String::from_utf8(buffer)?; + + fs::write(rule_sources_file, new_content)?; + + Ok(()) +} + +fn push_to_table(source_set: BTreeSet, buffer: &mut Vec) -> Result<()> { + for source_set in source_set { + write!( + buffer, + "| [{}]({}) |[{}]({})", + source_set.source_rule_name, + source_set.source_link, + source_set.rule_name, + source_set.link + )?; + + writeln!(buffer, " |")?; + } + + Ok(()) +} diff --git a/docs/codegen/src/utils.rs b/docs/codegen/src/utils.rs new file mode 100644 index 000000000..7770a5085 --- /dev/null +++ b/docs/codegen/src/utils.rs @@ -0,0 +1,55 @@ +use pglt_analyse::{GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleGroup, RuleMetadata}; +use regex::Regex; +use std::collections::BTreeMap; + +pub(crate) fn replace_section( + content: &str, + section_identifier: &str, + replacement: &str, +) -> String { + let pattern = format!( + r"(\[//\]: # \(BEGIN {}\)\n)(?s).*?(\n\[//\]: # \(END {}\))", + section_identifier, section_identifier + ); + let re = Regex::new(&pattern).unwrap(); + re.replace_all(content, format!("${{1}}{}${{2}}", replacement)) + .to_string() +} + +#[derive(Default)] +pub(crate) struct LintRulesVisitor { + /// This is mapped to: + /// group (e.g. "safety") -> + /// where is: + /// -> metadata + pub(crate) groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, +} + +impl LintRulesVisitor { + fn push_rule(&mut self) + where + R: Rule + 'static, + { + let group = self + .groups + .entry(::NAME) + .or_default(); + + group.insert(R::METADATA.name, R::METADATA); + } +} + +impl RegistryVisitor for LintRulesVisitor { + fn record_category(&mut self) { + if matches!(C::CATEGORY, RuleCategory::Lint) { + C::record_groups(self); + } + } + + fn record_rule(&mut self) + where + R: Rule + 'static, + { + self.push_rule::() + } +} diff --git a/docs/env_variables.md b/docs/env_variables.md new file mode 100644 index 000000000..e8f2d493c --- /dev/null +++ b/docs/env_variables.md @@ -0,0 +1,19 @@ +## Environment Variables + +[//]: # (BEGIN ENV_VARS) + + +### `PGLT_LOG_PATH` + + The directory where the Daemon logs will be saved. + +### `PGLT_LOG_PREFIX_NAME` + + A prefix that's added to the name of the log. Default: `server.log.` + +### `PGLT_CONFIG_PATH` + + A path to the configuration file + + +[//]: # (END ENV_VARS) diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 000000000..5e1317c88 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,92 @@ +![Postgres Language Server](images/pls-github.png) + +# Postgres Language Server + +A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling. + +--- + +**Source Code**: https://github.com/supabase-community/postgres_lsp + +--- + +## Overview + +This project provides a toolchain for Postgres development, built on Postgres' own parser `libpg_query` to ensure 100% syntax compatibility. It is built on a Server-Client architecture with a transport-agnostic design. This means all features can be accessed not only through the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/), but also through other interfaces like a CLI, HTTP APIs, or a WebAssembly module. The goal is to make all the great Postgres tooling out there as accessible as possible, and to build anything that is missing ourselves. + +Currently, the following features are implemented: +- Autocompletion +- Syntax Error Highlighting +- Type-checking (via `EXPLAIN` error insights) +- Linter, inspired by [Squawk](https://squawkhq.com) + +Our current focus is on refining and enhancing these core features while building a robust and easily accessible infrastructure. For future plans and opportunities to contribute, please check out the issues and discussions. Any contributions are welcome! + +## Installation + +> [!NOTE] +> We will update this section once we have published the binaries. + +## Configuration + +We recommend that you create a `pglt.toml` configuration file for each project. This eliminates the need to repeat the CLI options each time you run a command, and ensures that we use the same configuration in your editor. Some options are also only available from a configuration file. If you are happy with the defaults, you don’t need to create a configuration file. To create the `pglt.toml` file, run the `init` command in the root folder of your project: + +```sh +pglt init +``` + +After running the `init` command, you’ll have a new `pglt.toml` file in your directory: + +[//]: # (BEGIN DEFAULT_CONFIGURATION) + +```toml +[vcs] +enabled = false +client_kind = "git" +use_ignore_file = false + +[files] +ignore = [] + +[linter] +enabled = true + +[linter.rules] +recommended = true + +[db] +host = "127.0.0.1" +port = 5432 +username = "postgres" +password = "postgres" +database = "postgres" +conn_timeout_secs = 10 +``` + +[//]: # (END DEFAULT_CONFIGURATION) + +Make sure to point the database connection settings at your local development database. To see what else can be configured, run `--help`. + +## Usage + +You can check SQL files using the `check` command: + +```sh +pglt check myfile.sql +``` + +Make sure to check out the other options. We will provide guides for specific use cases like linting migration files soon. + +## Install an Editor Plugin + +We recommend installing an editor plugin to get the most out of Postgres Language Tools. + +> [!NOTE] +> We will update this section once we have published the binaries. + + +## CI Setup + +> [!NOTE] +> We will update this section once we have published the binaries. + diff --git a/docs/rule_sources.md b/docs/rule_sources.md new file mode 100644 index 000000000..5036ae51f --- /dev/null +++ b/docs/rule_sources.md @@ -0,0 +1,7 @@ +## Exclusive rules +## Rules from other sources +### Squawk +| Squawk Rule Name | Rule Name | +| ---- | ---- | +| [ban-drop-column](https://squawkhq.com/docs/ban-drop-column) |[banDropColumn](./rules/ban-drop-column) | +| [ban-drop-not-null](https://squawkhq.com/docs/ban-drop-not-null) |[banDropNotNull](./rules/ban-drop-not-null) | diff --git a/docs/rules.md b/docs/rules.md new file mode 100644 index 000000000..cf933b4ba --- /dev/null +++ b/docs/rules.md @@ -0,0 +1,19 @@ +# Rules + +Below the list of rules supported by Postgres Language Tools, divided by group. Here's a legend of the emojis: + +- The icon indicates that the rule is part of the recommended rules. + +[//]: # (BEGIN RULES_INDEX) + +## Safety + +Rules that detect potential safety issues in your code. +| Rule name | Description | Properties | +| --- | --- | --- | +| [banDropColumn](./rules/ban-drop-column) | Dropping a column may break existing clients. | | +| [banDropNotNull](./rules/ban-drop-not-null) | Dropping a NOT NULL constraint may break existing clients. | | + +[//]: # (END RULES_INDEX) + + diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md new file mode 100644 index 000000000..75cabd8bd --- /dev/null +++ b/docs/rules/ban-drop-column.md @@ -0,0 +1,42 @@ +# banDropColumn +**Diagnostic Category: `lint/safety/banDropColumn`** + +**Since**: `vnext` + +> [!NOTE] +> This rule is recommended. A diagnostic error will appear when linting your code. + +**Sources**: +- Inspired from: squawk/ban-drop-column + +## Description +Dropping a column may break existing clients. + +Update your application code to no longer read or write the column. + +You can leave the column as nullable or delete the column once queries no longer select or modify the column. + +## Examples + +### Invalid + +```sql +alter table test drop column id; +``` + +```sh +code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + × Dropping a column may break existing clients. + + i You can leave the column as nullable or delete the column once queries no longer select or modify the column. + + +``` + +## How to configure +```toml title="pglt.toml" +[linter.rules.safety] +banDropColumn = "error" + +``` diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md new file mode 100644 index 000000000..3722534cb --- /dev/null +++ b/docs/rules/ban-drop-not-null.md @@ -0,0 +1,42 @@ +# banDropNotNull +**Diagnostic Category: `lint/safety/banDropNotNull`** + +**Since**: `vnext` + +> [!NOTE] +> This rule is recommended. A diagnostic error will appear when linting your code. + +**Sources**: +- Inspired from: squawk/ban-drop-not-null + +## Description +Dropping a NOT NULL constraint may break existing clients. + +Application code or code written in procedural languages like PL/SQL or PL/pgSQL may not expect NULL values for the column that was previously guaranteed to be NOT NULL and therefore may fail to process them correctly. + +You can consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. + +## Examples + +### Invalid + +```sql +alter table users alter column email drop not null; +``` + +```sh +code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + × Dropping a NOT NULL constraint may break existing clients. + + i Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. + + +``` + +## How to configure +```toml title="pglt.toml" +[linter.rules.safety] +banDropNotNull = "error" + +``` diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md new file mode 100644 index 000000000..f9d91561e --- /dev/null +++ b/docs/troubleshooting.md @@ -0,0 +1,9 @@ +## Troubleshooting + +This guide describes how to resolve common issues with Postgres Language Tools. + +### Incorrect and / or misplaced diagnostics + +We are employing pragmatic solutions to split a SQL file into statements, and they might be incorrect in certain cases. If you see diagnostics like `Unexpected token` in the middle of a valid statement, make sure to either end all statements with a semicolon, or put two double newlines between them. If there are still issues, its most likely a bug in the change handler that is gone after reopening the file. But please file an issue with sample code so we can fix the root cause. + + diff --git a/xtask/codegen/src/generate_configuration.rs b/xtask/codegen/src/generate_configuration.rs index 398d461ae..cecf1a045 100644 --- a/xtask/codegen/src/generate_configuration.rs +++ b/xtask/codegen/src/generate_configuration.rs @@ -299,7 +299,7 @@ fn generate_for_groups( #[cfg_attr(feature = "schema", derive(JsonSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct Rules { - /// It enables the lint rules recommended by PGLSP. `true` by default. + /// It enables the lint rules recommended by PgLT. `true` by default. #[serde(skip_serializing_if = "Option::is_none")] pub recommended: Option, diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index f5618114e..e847fd550 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -74,7 +74,7 @@ impl Rule for {rule_name_upper_camel} {{ } fn gen_sql(category_name: &str) -> String { - format!("-- expect_only_{category_name}\n-- select 1;").into() + format!("-- expect_only_{category_name}\n-- select 1;") } pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) {