diff --git a/.github/bench/target.js b/.github/bench/target.js index 7d74ab6..15beaa4 100644 --- a/.github/bench/target.js +++ b/.github/bench/target.js @@ -44,6 +44,10 @@ function buildStaticApp(createApp, label) { const app = createApp(); if (label === "http-native") { + /** [Auto generated by http-native] + * [http-native optimization] static-fast-path + * This route is served by the static fast path and avoids generic bridge dispatch. + */ app.get("/", (req, res) => { res.json({ ok: true, diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..f9e3946 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,68 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +`@http-native/core` — a fast, Express-like HTTP framework for JavaScript powered by a Rust native module via napi-rs. The Rust layer (monoio-based async TCP server) handles routing, connection management, TLS, sessions, rate limiting, and response caching. JavaScript handles route registration, middleware execution, and handler dispatch. The two layers communicate through a custom binary protocol (bridge.js). + +## Build & Development Commands + +```bash +bun run build # Debug build of the Rust native module (.node binary) +bun run build:release # Release build (LTO, stripped symbols) +bun run test # Build + run all tests +bun run dev # Dev server with hot reload (via CLI) +``` + +Individual test files can be run directly after building: +```bash +bun .github/tests/test.js +bun .github/tests/test-dev.js +bun .github/tests/test-rate-limit.js +``` + +The native `.node` binary is output to the project root as `http-native.node`. It can be overridden via `HTTP_NATIVE_NATIVE_PATH` env var. + +## Architecture + +### Rust Native Layer (`rsrc/src/`) + +- **lib.rs** — NAPI entry point. Runs monoio event loop on worker threads, accepts TCP connections, parses HTTP/1.1 requests, performs routing, and dispatches to JS via `ThreadsafeFunction`. Handles keep-alive, TLS (via rustls), streaming, static route responses, and native response caching entirely in Rust without crossing the JS bridge. +- **router.rs** — O(1) exact-match HashMap router + O(M) radix-tree router for parameterized routes. Handles static response routes, WebSocket upgrade detection, and dynamic fast-path responses. +- **analyzer.rs** — Static analysis of JS handler source code at compile time. Generates fast-path response templates so Rust can serve certain dynamic routes without calling into JS. +- **manifest.rs** — Deserializes the JSON manifest from JS that describes routes, middlewares, session config, TLS, and server settings. +- **session.rs** — Rust-backed in-memory session store (HMAC-SHA256 signed cookies). +- **rate_limit.rs** — Native sliding-window rate limiter exposed via NAPI. +- **websocket.rs** — WebSocket frame encoding/decoding. + +### JavaScript Layer (`src/`) + +- **index.js** — `createApp()` factory. Express-like API for routes, middleware, groups, error handlers, static routes, WebSocket, and the chainable `listen().port().tls().hot()` builder. Compiles routes into a manifest, creates the dispatcher, and starts the native server. +- **bridge.js** — Binary protocol codec between Rust and JS. Encodes/decodes request/response envelopes, static-analyzes handler source to build access plans (determines which request fields to materialize), and manages object pooling for zero-allocation hot paths. +- **native.js** — Loads the compiled `.node` binary via `createRequire`. +- **session.js** — JS-side session middleware that integrates with the Rust session store. +- **cors.js** — CORS middleware. +- **validate.js** — Request validation middleware (works with Zod schemas). +- **rate-limit.js** — JS wrapper for the native rate limiter. +- **cli.js** — CLI entry point (`http-native` binary). Supports `dev`, `setup`, and `start` subcommands. +- **dev/** — Dev server with hot reload and route source-annotation comments. +- **opt/** — Runtime optimization tracking (dispatch timing, route analysis, optimization summaries). + +### Key Design Pattern: Binary Bridge Protocol + +Rust and JS communicate via a custom binary envelope format (bridge version 2). Requests are encoded as: `version | methodCode | flags | handlerId | lengths... | url | path | ip | params | headers | body`. Responses use: `status | headerCount | bodyLen | headers | body`. This avoids JSON serialization overhead on every request. + +### Key Design Pattern: Access Plans + +Handler source code is statically analyzed (`analyzeRequestAccess`) to determine which request fields (params, query, headers, method, path, url) are actually accessed. Fields that are never read are never materialized from the binary envelope — this is the primary zero-copy optimization. + +## Commit Convention + +Prefix commits with type: `opt:`, `chore:`, `rm:`, `other:`. Types can be combined with `&` (e.g., `opt&chore:`). See CONTRIBUTING.md. + +## Runtime Requirements + +- Bun (primary) or Node.js for the JS layer +- Rust toolchain for building the native module +- The native binary must exist at project root (or `HTTP_NATIVE_NATIVE_PATH`) before tests or the server can run diff --git a/Cargo.lock b/Cargo.lock index a2a5f9b..d664c28 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,27 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + [[package]] name = "anyhow" version = "1.0.102" @@ -83,6 +104,27 @@ dependencies = [ "generic-array", ] +[[package]] +name = "brotli" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + [[package]] name = "bumpalo" version = "3.20.2" @@ -146,6 +188,15 @@ dependencies = [ "libc", ] +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -241,6 +292,16 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "flume" version = "0.11.1" @@ -428,8 +489,10 @@ dependencies = [ "anyhow", "arc-swap", "base64", + "brotli", "bytes", "dashmap", + "flate2", "flume", "getrandom 0.2.17", "hmac", @@ -661,6 +724,16 @@ dependencies = [ "autocfg", ] +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + [[package]] name = "mio" version = "0.8.11" @@ -1110,6 +1183,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "simd-adler32" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703d5c7ef118737c72f1af64ad2f6f8c5e1921f818cdcb97b8fe6fc69bf66214" + [[package]] name = "slab" version = "0.4.12" diff --git a/Cargo.toml b/Cargo.toml index 0a6f819..98f0611 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,8 @@ serde_json = "1.0" sha2 = "0.10" socket2 = { version = "0.5", features = ["all"] } url = "2.5" +flate2 = "1.1" +brotli = "8" [build-dependencies] napi-build = "2" diff --git a/package.json b/package.json index 299c78f..a29a0c4 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,7 @@ }, "./hot": "./src/hot.js", "./session": "./src/session.js", + "./compress": "./src/compress.js", "./rate-limit": { "types": "./src/rate-limit.d.ts", "default": "./src/rate-limit.js" diff --git a/rsrc/src/compress.rs b/rsrc/src/compress.rs new file mode 100644 index 0000000..bce8761 --- /dev/null +++ b/rsrc/src/compress.rs @@ -0,0 +1,270 @@ +use std::io::Write; + +use flate2::write::GzEncoder; +use flate2::Compression; + +use crate::manifest::CompressionConfigInput; + +// ─── Configuration ───────────────────── + +#[derive(Clone)] +pub struct CompressionConfig { + pub min_size: usize, + pub brotli_quality: u32, + pub gzip_level: u32, + /// Per-content-type quality overrides, checked in order. + pub quality_map: Vec, +} + +#[derive(Clone)] +pub struct ContentTypeQuality { + /// Either an exact media-type prefix like "image/svg+xml" or a wildcard like "text/" + pattern: ContentTypePattern, + brotli_quality: Option, + gzip_level: Option, +} + +#[derive(Clone)] +enum ContentTypePattern { + /// Matches "type/" prefix, e.g. "text/*" stored as "text/" + Wildcard(Box<[u8]>), + /// Exact media-type match (case-insensitive) + Exact(Box<[u8]>), +} + +impl CompressionConfig { + pub fn from_manifest(input: Option<&CompressionConfigInput>) -> Option { + let cfg = input?; + if !cfg.enabled { + return None; + } + + let quality_map = cfg.quality_map.iter().map(|entry| { + let pattern = if entry.pattern.ends_with("/*") { + // "text/*" → match prefix "text/" + let prefix = entry.pattern[..entry.pattern.len() - 1].to_ascii_lowercase(); + ContentTypePattern::Wildcard(prefix.into_bytes().into_boxed_slice()) + } else if entry.pattern.ends_with('/') { + ContentTypePattern::Wildcard(entry.pattern.to_ascii_lowercase().into_bytes().into_boxed_slice()) + } else { + ContentTypePattern::Exact(entry.pattern.to_ascii_lowercase().into_bytes().into_boxed_slice()) + }; + ContentTypeQuality { + pattern, + brotli_quality: entry.brotli_quality.map(|q| q.min(11)), + gzip_level: entry.gzip_level.map(|l| l.min(9)), + } + }).collect(); + + Some(Self { + min_size: cfg.min_size, + brotli_quality: cfg.brotli_quality.min(11), + gzip_level: cfg.gzip_level.min(9), + quality_map, + }) + } + + /// Resolve brotli quality for a given content-type, checking quality_map first. + fn brotli_quality_for(&self, content_type: Option<&[u8]>) -> u32 { + if let Some(ct) = content_type { + if let Some(q) = self.find_override(ct, |e| e.brotli_quality) { + return q; + } + } + self.brotli_quality + } + + /// Resolve gzip level for a given content-type, checking quality_map first. + fn gzip_level_for(&self, content_type: Option<&[u8]>) -> u32 { + if let Some(ct) = content_type { + if let Some(l) = self.find_override(ct, |e| e.gzip_level) { + return l; + } + } + self.gzip_level + } + + fn find_override(&self, content_type: &[u8], getter: fn(&ContentTypeQuality) -> Option) -> Option { + // Extract media type (before ;charset= etc.) + let media = content_type + .split(|&b| b == b';') + .next() + .map(trim_ascii) + .unwrap_or(content_type); + + for entry in &self.quality_map { + let matched = match &entry.pattern { + ContentTypePattern::Wildcard(prefix) => { + media.len() >= prefix.len() + && media[..prefix.len()].eq_ignore_ascii_case(prefix) + } + ContentTypePattern::Exact(exact) => { + media.eq_ignore_ascii_case(exact) + } + }; + if matched { + return getter(entry); + } + } + None + } +} + +// ─── Accepted Encoding ───────────────── + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum AcceptedEncoding { + Identity, + Gzip, + Brotli, +} + +/// Parse the Accept-Encoding header value and return the best encoding. +/// Preference order: Brotli > Gzip > Identity. +pub fn parse_accept_encoding(value: &[u8]) -> AcceptedEncoding { + let mut best = AcceptedEncoding::Identity; + for part in value.split(|&b| b == b',') { + let trimmed = trim_ascii(part); + // Extract the encoding name (before any ;q= weight) + let name = trimmed + .split(|&b| b == b';') + .next() + .map(trim_ascii) + .unwrap_or(trimmed); + if name.eq_ignore_ascii_case(b"br") { + return AcceptedEncoding::Brotli; // Best possible — return immediately + } + if name.eq_ignore_ascii_case(b"gzip") { + best = AcceptedEncoding::Gzip; + } + } + best +} + +// ─── Compression Decision ────────────── + +/// Determine whether compression should be applied. +/// Returns the encoding to use, or None if compression should be skipped. +pub fn should_compress( + config: &CompressionConfig, + encoding: AcceptedEncoding, + body_len: usize, + content_type: Option<&[u8]>, + has_existing_content_encoding: bool, +) -> Option { + if encoding == AcceptedEncoding::Identity { + return None; + } + if body_len < config.min_size { + return None; + } + if has_existing_content_encoding { + return None; + } + if let Some(ct) = content_type { + if !is_compressible_content_type(ct) { + return None; + } + } + Some(encoding) +} + +/// Check whether a content-type is eligible for compression. +fn is_compressible_content_type(content_type: &[u8]) -> bool { + // Extract the media type (before any ;charset= etc.) + let media = content_type + .split(|&b| b == b';') + .next() + .map(trim_ascii) + .unwrap_or(content_type); + + // text/* — always compressible + if media.len() >= 5 && media[..5].eq_ignore_ascii_case(b"text/") { + return true; + } + + // application/* subtypes + if media.len() >= 12 && media[..12].eq_ignore_ascii_case(b"application/") { + let subtype = &media[12..]; + if subtype.eq_ignore_ascii_case(b"json") + || subtype.eq_ignore_ascii_case(b"javascript") + || subtype.eq_ignore_ascii_case(b"xml") + || subtype.eq_ignore_ascii_case(b"xhtml+xml") + || subtype.eq_ignore_ascii_case(b"x-javascript") + || subtype.eq_ignore_ascii_case(b"ld+json") + || subtype.eq_ignore_ascii_case(b"graphql+json") + || subtype.eq_ignore_ascii_case(b"manifest+json") + || subtype.eq_ignore_ascii_case(b"vnd.api+json") + { + return true; + } + } + + // image/svg+xml + if media.eq_ignore_ascii_case(b"image/svg+xml") { + return true; + } + + false +} + +// ─── Compression ─────────────────────── + +/// Compress a body with the given encoding. +/// Returns None if compression produces output >= the original size. +/// When `content_type` is provided, per-content-type quality overrides are applied. +pub fn compress_body( + body: &[u8], + encoding: AcceptedEncoding, + config: &CompressionConfig, + content_type: Option<&[u8]>, +) -> Option> { + match encoding { + AcceptedEncoding::Brotli => compress_brotli(body, config.brotli_quality_for(content_type)), + AcceptedEncoding::Gzip => compress_gzip(body, config.gzip_level_for(content_type)), + AcceptedEncoding::Identity => None, + } +} + +fn compress_brotli(body: &[u8], quality: u32) -> Option> { + let mut output = Vec::with_capacity(body.len()); + let params = brotli::enc::BrotliEncoderParams { + quality: quality as i32, + ..Default::default() + }; + let mut cursor = std::io::Cursor::new(body); + brotli::BrotliCompress(&mut cursor, &mut output, ¶ms).ok()?; + if output.len() < body.len() { + Some(output) + } else { + None + } +} + +fn compress_gzip(body: &[u8], level: u32) -> Option> { + let mut encoder = GzEncoder::new(Vec::with_capacity(body.len()), Compression::new(level)); + encoder.write_all(body).ok()?; + let output = encoder.finish().ok()?; + if output.len() < body.len() { + Some(output) + } else { + None + } +} + +/// Returns the Content-Encoding header value for the given encoding. +pub fn encoding_header_value(encoding: AcceptedEncoding) -> &'static [u8] { + match encoding { + AcceptedEncoding::Brotli => b"br", + AcceptedEncoding::Gzip => b"gzip", + AcceptedEncoding::Identity => b"identity", + } +} + +// ─── Helpers ─────────────────────────── + +fn trim_ascii(bytes: &[u8]) -> &[u8] { + let start = bytes.iter().position(|b| !b.is_ascii_whitespace()).unwrap_or(bytes.len()); + let end = bytes.iter().rposition(|b| !b.is_ascii_whitespace()).map_or(start, |p| p + 1); + &bytes[start..end] +} diff --git a/rsrc/src/lib.rs b/rsrc/src/lib.rs index 734e3f5..845407e 100644 --- a/rsrc/src/lib.rs +++ b/rsrc/src/lib.rs @@ -1,4 +1,5 @@ mod analyzer; +pub mod compress; mod manifest; mod rate_limit; mod router; @@ -113,6 +114,7 @@ struct HttpServerConfig { header_connection_prefix: Vec, header_content_length_prefix: Vec, header_transfer_encoding_prefix: Vec, + compression: Option, } impl HttpServerConfig { @@ -175,6 +177,9 @@ impl HttpServerConfig { FALLBACK_HEADER_TRANSFER_ENCODING_PREFIX, ) .into_bytes(), + compression: compress::CompressionConfig::from_manifest( + manifest.compression.as_ref(), + ), }) } } @@ -229,7 +234,8 @@ impl NativeServerHandle { pub fn reload(&self, manifest_json: String) -> napi::Result<()> { let manifest: ManifestInput = serde_json::from_str(&manifest_json).map_err(to_napi_error)?; validate_manifest(&manifest).map_err(to_napi_error)?; - let next_router = Arc::new(Router::from_manifest(&manifest).map_err(to_napi_error)?); + let comp_config = compress::CompressionConfig::from_manifest(manifest.compression.as_ref()); + let next_router = Arc::new(Router::from_manifest(&manifest, comp_config.as_ref()).map_err(to_napi_error)?); let next_namespaces = next_router.cache_namespaces(); { @@ -581,7 +587,7 @@ pub fn start_server( validate_manifest(&manifest).map_err(to_napi_error)?; let server_config = Arc::new(HttpServerConfig::from_manifest(&manifest).map_err(to_napi_error)?); - let router = Arc::new(Router::from_manifest(&manifest).map_err(to_napi_error)?); + let router = Arc::new(Router::from_manifest(&manifest, server_config.compression.as_ref()).map_err(to_napi_error)?); let registered_cache_namespaces = router.cache_namespaces(); register_cache_namespaces(®istered_cache_namespaces); let live_router = Arc::new(LiveRouter { @@ -887,6 +893,8 @@ struct ParsedRequest<'a> { is_websocket_upgrade: bool, /// The Sec-WebSocket-Key header value, if present ws_key: Option<&'a str>, + /// Best accepted encoding from Accept-Encoding header + accepted_encoding: compress::AcceptedEncoding, } use monoio::time::timeout; @@ -1007,6 +1015,7 @@ where let keep_alive = parsed.keep_alive; let has_body = parsed.has_body; let content_length = parsed.content_length; + let accepted_encoding = parsed.accepted_encoding; // Security (S1): reject requests with non-identity Transfer-Encoding if parsed.has_chunked_te { @@ -1031,7 +1040,7 @@ where if let Some(static_route) = router.exact_get_root() { drop(parsed); drain_consumed_bytes(buffer, header_bytes); - write_exact_static_response(stream, static_route, keep_alive).await?; + write_exact_static_response(stream, static_route, keep_alive, accepted_encoding).await?; if !keep_alive { stream.shutdown().await?; return Ok(()); @@ -1042,7 +1051,7 @@ where if let Some(static_route) = router.exact_static_route(parsed.method, parsed.path) { drop(parsed); drain_consumed_bytes(buffer, header_bytes); - write_exact_static_response(stream, static_route, keep_alive).await?; + write_exact_static_response(stream, static_route, keep_alive, accepted_encoding).await?; if !keep_alive { stream.shutdown().await?; return Ok(()); @@ -1075,7 +1084,7 @@ where // String/Vec allocations for method, target, path, and headers. if !has_body { let dispatch_decision = - build_dispatch_decision_zero_copy(router.as_ref(), &parsed, &[], peer_ip)?; + build_dispatch_decision_zero_copy(router.as_ref(), &parsed, &[], peer_ip, accepted_encoding, server_config.compression.as_ref())?; // Extract session before dropping parsed let (session_id, is_new_session) = resolve_session(session_store, parsed.cookie_header); @@ -1085,7 +1094,7 @@ where match dispatch_decision { DispatchDecision::BridgeRequest(request, cache_insertion, handler_id, cache_namespace, url_bytes) => { - write_dynamic_dispatch_response(stream, dispatcher, request, keep_alive, cache_insertion, handler_id, cache_namespace, &url_bytes, session_store, session_id, is_new_session) + write_dynamic_dispatch_response(stream, dispatcher, request, keep_alive, cache_insertion, handler_id, cache_namespace, &url_bytes, session_store, session_id, is_new_session, accepted_encoding, server_config.compression.as_ref()) .await?; } DispatchDecision::SpecializedResponse(response) => { @@ -1184,11 +1193,12 @@ where &headers_owned, &body_bytes, peer_ip, + accepted_encoding, )?; match dispatch_decision_owned { DispatchDecision::BridgeRequest(request, cache_insertion, handler_id, cache_namespace, url_bytes) => { - write_dynamic_dispatch_response(stream, dispatcher, request, keep_alive, cache_insertion, handler_id, cache_namespace, &url_bytes, session_store, session_id_body, is_new_session_body).await?; + write_dynamic_dispatch_response(stream, dispatcher, request, keep_alive, cache_insertion, handler_id, cache_namespace, &url_bytes, session_store, session_id_body, is_new_session_body, accepted_encoding, server_config.compression.as_ref()).await?; } DispatchDecision::SpecializedResponse(response) => { let (write_result, _) = stream.write_all(response).await; @@ -1242,6 +1252,7 @@ fn parse_request_httparse(bytes: &[u8]) -> Option> { let mut cookie_header: Option<&str> = None; let mut is_websocket_upgrade = false; let mut ws_key: Option<&str> = None; + let mut accepted_encoding = compress::AcceptedEncoding::Identity; let mut headers = Vec::with_capacity(req.headers.len()); for header in req.headers.iter() { @@ -1305,6 +1316,13 @@ fn parse_request_httparse(bytes: &[u8]) -> Option> { ws_key = Some(value); } + // Compression: parse Accept-Encoding + if accepted_encoding != compress::AcceptedEncoding::Brotli + && name.eq_ignore_ascii_case("accept-encoding") + { + accepted_encoding = compress::parse_accept_encoding(value.as_bytes()); + } + headers.push((name, value)); } @@ -1321,6 +1339,7 @@ fn parse_request_httparse(bytes: &[u8]) -> Option> { cookie_header, is_websocket_upgrade, ws_key, + accepted_encoding, }) } @@ -1344,6 +1363,7 @@ fn parse_hot_root_request( let header_end = find_header_end(bytes)?; let mut keep_alive = keep_alive; let mut has_body = false; + let mut accepted_encoding = compress::AcceptedEncoding::Identity; let mut line_start = bytes.iter().position(|b| *b == b'\n')? + 1; while line_start + 2 <= header_end { @@ -1383,6 +1403,12 @@ fn parse_hot_root_request( if !value.is_empty() && !value.eq_ignore_ascii_case(b"identity") { has_body = true; } + } else if accepted_encoding != compress::AcceptedEncoding::Brotli + && line.len() >= 17 + && line[..16].eq_ignore_ascii_case(b"accept-encoding:") + { + let value = trim_ascii_spaces(&line[16..]); + accepted_encoding = compress::parse_accept_encoding(value); } line_start = next_end + 2; @@ -1401,6 +1427,7 @@ fn parse_hot_root_request( cookie_header: None, // Hot path doesn't parse cookies is_websocket_upgrade: false, ws_key: None, + accepted_encoding, }) } @@ -1425,6 +1452,8 @@ fn build_dispatch_decision_zero_copy( parsed: &ParsedRequest<'_>, body: &[u8], peer_ip: Option<&str>, + accepted_encoding: compress::AcceptedEncoding, + compression_config: Option<&compress::CompressionConfig>, ) -> Result { let method_code = method_code_from_bytes(parsed.method).unwrap_or(UNKNOWN_METHOD_CODE); let path_cow = String::from_utf8_lossy(parsed.path); @@ -1465,21 +1494,22 @@ fn build_dispatch_decision_zero_copy( let mut cache_insertion = None; if let Some(cfg) = matched_route.cache_config { - let key = crate::router::interpolate_cache_key(cfg, parsed, url_str, matched_route.param_names, &matched_route.param_values); + let base_key = crate::router::interpolate_cache_key(cfg, parsed, url_str, matched_route.param_names, &matched_route.param_values); + let key = vary_cache_key_by_encoding(base_key, accepted_encoding); if let Some(cached_response) = crate::router::get_cached_response(matched_route.cache_namespace, key, parsed.keep_alive) { return Ok(DispatchDecision::CachedResponse(cached_response)); } cache_insertion = Some((matched_route.cache_namespace, key, cfg.max_entries, cfg.ttl_secs)); } else { // ncache lookup: check if a previous res.ncache() call cached this response - let ncache_key = compute_ncache_key(parsed.target); + let ncache_key = vary_cache_key_by_encoding(compute_ncache_key(parsed.target), accepted_encoding); if let Some(cached_response) = crate::router::get_cached_response(matched_route.cache_namespace, ncache_key, parsed.keep_alive) { return Ok(DispatchDecision::CachedResponse(cached_response)); } } if let Some(response) = - build_dynamic_fast_path_response(&matched_route, url_str, &parsed.headers, parsed.keep_alive)? + build_dynamic_fast_path_response(&matched_route, url_str, &parsed.headers, parsed.keep_alive, accepted_encoding, compression_config)? { return Ok(DispatchDecision::SpecializedResponse(response)); }; @@ -1513,6 +1543,7 @@ fn build_dispatch_decision_owned( headers: &[(String, String)], body: &[u8], peer_ip: Option<&str>, + accepted_encoding: compress::AcceptedEncoding, ) -> Result { let method_code = method_code_from_bytes(method).unwrap_or(UNKNOWN_METHOD_CODE); @@ -1573,12 +1604,14 @@ fn build_dispatch_decision_owned( cookie_header: None, is_websocket_upgrade: false, ws_key: None, + accepted_encoding: compress::AcceptedEncoding::Identity, }; - let key = crate::router::interpolate_cache_key(cfg, &mock_parsed, url_str, matched_route.param_names, &matched_route.param_values); + let base_key = crate::router::interpolate_cache_key(cfg, &mock_parsed, url_str, matched_route.param_names, &matched_route.param_values); + let key = vary_cache_key_by_encoding(base_key, accepted_encoding); cache_insertion = Some((matched_route.cache_namespace, key, cfg.max_entries, cfg.ttl_secs)); } else { // ncache lookup: check if a previous res.ncache() call cached this response - let ncache_key = compute_ncache_key(target); + let ncache_key = vary_cache_key_by_encoding(compute_ncache_key(target), accepted_encoding); if let Some(cached_response) = crate::router::get_cached_response(matched_route.cache_namespace, ncache_key, false) { return Ok(DispatchDecision::CachedResponse(cached_response)); } @@ -1782,6 +1815,8 @@ fn build_dynamic_fast_path_response( url: &str, headers: &[(&str, &str)], keep_alive: bool, + encoding: compress::AcceptedEncoding, + compression_config: Option<&compress::CompressionConfig>, ) -> Result>> { let Some(fast_path) = matched_route.fast_path else { return Ok(None); @@ -1802,6 +1837,8 @@ fn build_dynamic_fast_path_response( fast_path.headers.as_ref(), &body, keep_alive, + encoding, + compression_config, ))) } @@ -2085,14 +2122,45 @@ fn build_response_bytes_fast( headers: &[(Box, Box)], body: &[u8], keep_alive: bool, + encoding: compress::AcceptedEncoding, + compression_config: Option<&compress::CompressionConfig>, ) -> Vec { + // ── Scan headers for content-type / content-encoding ── + let mut content_type: Option<&[u8]> = None; + let mut has_content_encoding = false; + for (name, value) in headers { + if name.eq_ignore_ascii_case("content-type") { + content_type = Some(value.as_bytes()); + } else if name.eq_ignore_ascii_case("content-encoding") { + has_content_encoding = true; + } + } + + // ── Attempt compression ── + let compressed = compression_config.and_then(|config| { + compress::should_compress(config, encoding, body.len(), content_type, has_content_encoding) + .and_then(|enc| compress::compress_body(body, enc, config, content_type).map(|data| (data, enc))) + }); + + let (final_body, applied_encoding): (&[u8], Option) = + match &compressed { + Some((data, enc)) => (data.as_slice(), Some(*enc)), + None => (body, None), + }; + + // ── Build HTTP response ── let reason = status_reason(status); let connection = if keep_alive { "keep-alive" } else { "close" }; - let body_len = body.len(); + let body_len = final_body.len(); let mut total_size = 9 + 3 + 1 + reason.len() + 2 + 16 + count_digits(body_len) + 2 + 12 + connection.len() + 2; + if applied_encoding.is_some() { + // "content-encoding: br\r\nvary: accept-encoding\r\n" worst case ~50 bytes + total_size += 50; + } + for (name, value) in headers { if name.eq_ignore_ascii_case("content-length") || name.eq_ignore_ascii_case("connection") { continue; @@ -2122,6 +2190,13 @@ fn build_response_bytes_fast( output.extend_from_slice(connection.as_bytes()); output.extend_from_slice(b"\r\n"); + // Compression headers + if let Some(enc) = applied_encoding { + output.extend_from_slice(b"content-encoding: "); + output.extend_from_slice(compress::encoding_header_value(enc)); + output.extend_from_slice(b"\r\nvary: accept-encoding\r\n"); + } + for (name, value) in headers { if name.eq_ignore_ascii_case("content-length") || name.eq_ignore_ascii_case("connection") { continue; @@ -2140,7 +2215,7 @@ fn build_response_bytes_fast( } output.extend_from_slice(b"\r\n"); - output.extend_from_slice(body); + output.extend_from_slice(final_body); output } @@ -2150,14 +2225,34 @@ async fn write_exact_static_response( stream: &mut S, static_route: &ExactStaticRoute, keep_alive: bool, + encoding: compress::AcceptedEncoding, ) -> Result<()> where S: AsyncWriteRent + Unpin, { - let response = if keep_alive { - static_route.keep_alive_response.clone() - } else { - static_route.close_response.clone() + let response = match (encoding, keep_alive) { + (compress::AcceptedEncoding::Brotli, true) => { + static_route.keep_alive_response_br.clone() + .unwrap_or_else(|| static_route.keep_alive_response.clone()) + } + (compress::AcceptedEncoding::Brotli, false) => { + static_route.close_response_br.clone() + .unwrap_or_else(|| static_route.close_response.clone()) + } + (compress::AcceptedEncoding::Gzip, true) => { + static_route.keep_alive_response_gzip.clone() + .unwrap_or_else(|| static_route.keep_alive_response.clone()) + } + (compress::AcceptedEncoding::Gzip, false) => { + static_route.close_response_gzip.clone() + .unwrap_or_else(|| static_route.close_response.clone()) + } + (compress::AcceptedEncoding::Identity, true) => { + static_route.keep_alive_response.clone() + } + (compress::AcceptedEncoding::Identity, false) => { + static_route.close_response.clone() + } }; let (write_result, _) = stream.write_all(response).await; @@ -2256,6 +2351,17 @@ fn compute_ncache_key(url_bytes: &[u8]) -> u64 { hasher.finish() } +/// Mix accepted encoding into a cache key so that identity/br/gzip +/// requests are stored and looked up independently (Vary: Accept-Encoding). +#[inline] +fn vary_cache_key_by_encoding(key: u64, encoding: compress::AcceptedEncoding) -> u64 { + match encoding { + compress::AcceptedEncoding::Identity => key, + compress::AcceptedEncoding::Gzip => key ^ 0x9E3779B97F4A7C15, + compress::AcceptedEncoding::Brotli => key ^ 0x517CC1B727220A95, + } +} + // ─── Session Trailer Extraction ──────── // // Extracts session write instructions from the response envelope trailer. @@ -2377,6 +2483,8 @@ async fn write_dynamic_dispatch_response( session_store: Option<&session::SessionStore>, session_id: Option<[u8; session::SESSION_ID_BYTES]>, is_new_session: bool, + accepted_encoding: compress::AcceptedEncoding, + compression_config: Option<&compress::CompressionConfig>, ) -> Result<()> where S: AsyncWriteRent + Unpin, @@ -2491,7 +2599,7 @@ where return Ok(()); } - match build_http_response_from_dispatch(response.as_ref(), keep_alive) { + match build_http_response_from_dispatch(response.as_ref(), keep_alive, accepted_encoding, compression_config) { Ok(mut http_response) => { if let Some((cache_namespace, cache_key, max_entries, ttl_secs)) = cache_insertion { // Route-level cache insertion (takes precedence over ncache) @@ -2517,7 +2625,7 @@ where if let Some((ncache_ttl, ncache_max_entries)) = extract_ncache_trailer(response.as_ref()) { if ncache_ttl > 0 { if let Some(cache_namespace) = cache_namespace { - let ncache_key = compute_ncache_key(url_bytes); + let ncache_key = vary_cache_key_by_encoding(compute_ncache_key(url_bytes), accepted_encoding); let response_bytes_close: bytes::Bytes = if !keep_alive { http_response.clone().into() @@ -2628,17 +2736,66 @@ where /// Build HTTP response bytes directly from the binary dispatch envelope, /// avoiding all intermediate String/Bytes allocations. -fn build_http_response_from_dispatch(dispatch_bytes: &[u8], keep_alive: bool) -> Result> { +fn build_http_response_from_dispatch( + dispatch_bytes: &[u8], + keep_alive: bool, + encoding: compress::AcceptedEncoding, + compression_config: Option<&compress::CompressionConfig>, +) -> Result> { let mut offset = 0usize; let status = read_u16(dispatch_bytes, &mut offset)?; let header_count = read_u16(dispatch_bytes, &mut offset)? as usize; let body_length = read_u32(dispatch_bytes, &mut offset)? as usize; + // ── Parse headers (collect refs, note content-type / content-encoding) ── + let mut headers: Vec<(&[u8], &[u8])> = Vec::with_capacity(header_count); + let mut content_type: Option<&[u8]> = None; + let mut has_content_encoding = false; + + for _ in 0..header_count { + let name_len = read_u8(dispatch_bytes, &mut offset)? as usize; + let value_len = read_u16(dispatch_bytes, &mut offset)? as usize; + + if offset + name_len + value_len > dispatch_bytes.len() { + return Err(anyhow!("response envelope truncated")); + } + + let name_bytes = &dispatch_bytes[offset..offset + name_len]; + offset += name_len; + let value_bytes = &dispatch_bytes[offset..offset + value_len]; + offset += value_len; + + if name_bytes.eq_ignore_ascii_case(b"content-type") { + content_type = Some(value_bytes); + } else if name_bytes.eq_ignore_ascii_case(b"content-encoding") { + has_content_encoding = true; + } + + headers.push((name_bytes, value_bytes)); + } + + // ── Extract body ── + if offset + body_length > dispatch_bytes.len() { + return Err(anyhow!("response body truncated")); + } + let body = &dispatch_bytes[offset..offset + body_length]; + + // ── Attempt compression ── + let compressed = compression_config.and_then(|config| { + compress::should_compress(config, encoding, body.len(), content_type, has_content_encoding) + .and_then(|enc| compress::compress_body(body, enc, config, content_type).map(|data| (data, enc))) + }); + + let (final_body, applied_encoding): (&[u8], Option) = + match &compressed { + Some((data, enc)) => (data.as_slice(), Some(*enc)), + None => (body, None), + }; + + // ── Assemble HTTP response ── let reason = status_reason(status); let connection = if keep_alive { "keep-alive" } else { "close" }; - - // Conservative estimate: framing overhead + all dispatch bytes - let mut output = Vec::with_capacity(dispatch_bytes.len() + 128); + let mut output = Vec::with_capacity(final_body.len() + 128); // Status line output.extend_from_slice(b"HTTP/1.1 "); @@ -2647,28 +2804,23 @@ fn build_http_response_from_dispatch(dispatch_bytes: &[u8], keep_alive: bool) -> output.extend_from_slice(reason.as_bytes()); output.extend_from_slice(b"\r\n"); - // Mandatory headers + // Content-Length (uses final — possibly compressed — body size) output.extend_from_slice(b"content-length: "); - write_usize(&mut output, body_length); + write_usize(&mut output, final_body.len()); output.extend_from_slice(b"\r\n"); output.extend_from_slice(b"connection: "); output.extend_from_slice(connection.as_bytes()); output.extend_from_slice(b"\r\n"); - // User headers — read directly from binary without String allocation - for _ in 0..header_count { - let name_len = read_u8(dispatch_bytes, &mut offset)? as usize; - let value_len = read_u16(dispatch_bytes, &mut offset)? as usize; - - if offset + name_len + value_len > dispatch_bytes.len() { - return Err(anyhow!("response envelope truncated")); - } - - let name_bytes = &dispatch_bytes[offset..offset + name_len]; - offset += name_len; - let value_bytes = &dispatch_bytes[offset..offset + value_len]; - offset += value_len; + // Compression headers + if let Some(enc) = applied_encoding { + output.extend_from_slice(b"content-encoding: "); + output.extend_from_slice(compress::encoding_header_value(enc)); + output.extend_from_slice(b"\r\nvary: accept-encoding\r\n"); + } + // User headers + for (name_bytes, value_bytes) in &headers { // Skip headers we already wrote if name_bytes.eq_ignore_ascii_case(b"content-length") || name_bytes.eq_ignore_ascii_case(b"connection") @@ -2690,12 +2842,7 @@ fn build_http_response_from_dispatch(dispatch_bytes: &[u8], keep_alive: bool) -> } output.extend_from_slice(b"\r\n"); - - // Body - if offset + body_length > dispatch_bytes.len() { - return Err(anyhow!("response body truncated")); - } - output.extend_from_slice(&dispatch_bytes[offset..offset + body_length]); + output.extend_from_slice(final_body); Ok(output) } diff --git a/rsrc/src/manifest.rs b/rsrc/src/manifest.rs index b1814ab..cbe5878 100644 --- a/rsrc/src/manifest.rs +++ b/rsrc/src/manifest.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; use serde::Deserialize; +use serde::de; #[derive(Debug, Clone, Deserialize)] #[serde(rename_all = "camelCase")] @@ -15,6 +16,8 @@ pub struct ManifestInput { pub ws_routes: Vec, #[serde(default)] pub session: Option, + #[serde(default)] + pub compression: Option, } #[derive(Debug, Clone, Deserialize)] @@ -137,3 +140,39 @@ pub struct WsRouteInput { pub path: String, pub handler_id: u32, } + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CompressionConfigInput { + #[serde(default = "default_true")] + pub enabled: bool, + #[serde(default = "default_min_size")] + pub min_size: usize, + #[serde(default = "default_brotli_quality")] + pub brotli_quality: u32, + #[serde(default = "default_gzip_level")] + pub gzip_level: u32, + #[serde(default, deserialize_with = "deserialize_quality_map")] + pub quality_map: Vec, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ContentTypeQualityInput { + /// Content-type pattern, e.g. "image/svg+xml", "application/json", "text/*" + pub pattern: String, + pub brotli_quality: Option, + pub gzip_level: Option, +} + +fn deserialize_quality_map<'de, D>(deserializer: D) -> Result, D::Error> +where + D: de::Deserializer<'de>, +{ + let opt: Option> = Option::deserialize(deserializer)?; + Ok(opt.unwrap_or_default()) +} + +fn default_min_size() -> usize { 1024 } +fn default_brotli_quality() -> u32 { 4 } +fn default_gzip_level() -> u32 { 6 } diff --git a/rsrc/src/router.rs b/rsrc/src/router.rs index 6f170e1..4193e2f 100644 --- a/rsrc/src/router.rs +++ b/rsrc/src/router.rs @@ -35,6 +35,10 @@ pub struct Router { pub struct ExactStaticRoute { pub close_response: Bytes, pub keep_alive_response: Bytes, + pub close_response_br: Option, + pub keep_alive_response_br: Option, + pub close_response_gzip: Option, + pub keep_alive_response_gzip: Option, } pub struct MatchedRoute<'a, 'b> { @@ -207,7 +211,7 @@ impl RadixNode { // ─── Router Implementation ────────────── impl Router { - pub fn from_manifest(manifest: &ManifestInput) -> Result { + pub fn from_manifest(manifest: &ManifestInput, compression_config: Option<&crate::compress::CompressionConfig>) -> Result { let mut exact_get_root = None; let mut dynamic_exact_routes = HashMap::new(); let mut exact_static_routes = HashMap::new(); @@ -221,7 +225,7 @@ impl Router { continue; }; - let exact_route = build_exact_static_route(static_response); + let exact_route = build_exact_static_route(static_response, compression_config); if method_key == MethodKey::Get && path == "/" { exact_get_root = Some(exact_route); @@ -242,18 +246,9 @@ impl Router { continue; }; - let exact_route = ExactStaticRoute { - close_response: Bytes::from(build_close_response( - spec.status, - &spec.headers, - &spec.body, - )), - keep_alive_response: Bytes::from(build_keep_alive_response( - spec.status, - &spec.headers, - &spec.body, - )), - }; + let exact_route = build_exact_static_route_from_spec( + spec.status, &spec.headers, &spec.body, compression_config, + ); if method_key == MethodKey::Get && path == "/" { exact_get_root = Some(exact_route); @@ -488,19 +483,162 @@ fn compile_dynamic_route_spec(route: &RouteInput, middlewares: &[MiddlewareInput } } -fn build_exact_static_route(static_response: &StaticResponseInput) -> ExactStaticRoute { +fn build_exact_static_route( + static_response: &StaticResponseInput, + compression_config: Option<&crate::compress::CompressionConfig>, +) -> ExactStaticRoute { let body = static_response.body.as_bytes(); + + let identity_close = build_close_response(static_response.status, &static_response.headers, body); + let identity_ka = build_keep_alive_response(static_response.status, &static_response.headers, body); + + // Pre-compress if compression is configured and the body qualifies + let content_type: Option<&[u8]> = static_response.headers.get("content-type").map(|v| v.as_bytes()); + let has_content_encoding = static_response.headers.contains_key("content-encoding"); + + let (br_close, br_ka, gz_close, gz_ka) = match compression_config { + Some(config) if !has_content_encoding => { + let br = crate::compress::should_compress( + config, crate::compress::AcceptedEncoding::Brotli, body.len(), content_type, false, + ).and_then(|_| crate::compress::compress_body(body, crate::compress::AcceptedEncoding::Brotli, config, content_type)); + + let gz = crate::compress::should_compress( + config, crate::compress::AcceptedEncoding::Gzip, body.len(), content_type, false, + ).and_then(|_| crate::compress::compress_body(body, crate::compress::AcceptedEncoding::Gzip, config, content_type)); + + let br_variants = br.map(|compressed_body| { + let close = build_compressed_response_bytes( + static_response.status, &static_response.headers, &compressed_body, false, + crate::compress::AcceptedEncoding::Brotli, + ); + let ka = build_compressed_response_bytes( + static_response.status, &static_response.headers, &compressed_body, true, + crate::compress::AcceptedEncoding::Brotli, + ); + (Bytes::from(close), Bytes::from(ka)) + }); + + let gz_variants = gz.map(|compressed_body| { + let close = build_compressed_response_bytes( + static_response.status, &static_response.headers, &compressed_body, false, + crate::compress::AcceptedEncoding::Gzip, + ); + let ka = build_compressed_response_bytes( + static_response.status, &static_response.headers, &compressed_body, true, + crate::compress::AcceptedEncoding::Gzip, + ); + (Bytes::from(close), Bytes::from(ka)) + }); + + ( + br_variants.as_ref().map(|(c, _)| c.clone()), + br_variants.map(|(_, k)| k), + gz_variants.as_ref().map(|(c, _)| c.clone()), + gz_variants.map(|(_, k)| k), + ) + } + _ => (None, None, None, None), + }; + + ExactStaticRoute { + close_response: Bytes::from(identity_close), + keep_alive_response: Bytes::from(identity_ka), + close_response_br: br_close, + keep_alive_response_br: br_ka, + close_response_gzip: gz_close, + keep_alive_response_gzip: gz_ka, + } +} + +fn build_compressed_response_bytes( + status: u16, + headers: &HashMap, + compressed_body: &[u8], + keep_alive: bool, + encoding: crate::compress::AcceptedEncoding, +) -> Vec { + let mut response = format!( + "HTTP/1.1 {} {}\r\ncontent-length: {}\r\nconnection: {}\r\ncontent-encoding: {}\r\nvary: accept-encoding\r\n", + status, + status_reason(status), + compressed_body.len(), + if keep_alive { "keep-alive" } else { "close" }, + std::str::from_utf8(crate::compress::encoding_header_value(encoding)).unwrap_or("identity"), + ) + .into_bytes(); + + for (name, value) in headers { + if name.eq_ignore_ascii_case("content-length") + || name.eq_ignore_ascii_case("connection") + || name.eq_ignore_ascii_case("content-encoding") + { + continue; + } + if name.contains('\r') || name.contains('\n') || value.contains('\r') || value.contains('\n') { + continue; + } + response.extend_from_slice(name.as_bytes()); + response.extend_from_slice(b": "); + response.extend_from_slice(value.as_bytes()); + response.extend_from_slice(b"\r\n"); + } + + response.extend_from_slice(b"\r\n"); + response.extend_from_slice(compressed_body); + response +} + +fn build_exact_static_route_from_spec( + status: u16, + headers: &HashMap, + body: &[u8], + compression_config: Option<&crate::compress::CompressionConfig>, +) -> ExactStaticRoute { + let identity_close = build_response_bytes(status, headers, body, false); + let identity_ka = build_response_bytes(status, headers, body, true); + + let content_type: Option<&[u8]> = headers.get("content-type").map(|v| v.as_bytes()); + let has_content_encoding = headers.contains_key("content-encoding"); + + let (br_close, br_ka, gz_close, gz_ka) = match compression_config { + Some(config) if !has_content_encoding => { + let br = crate::compress::should_compress( + config, crate::compress::AcceptedEncoding::Brotli, body.len(), content_type, false, + ).and_then(|_| crate::compress::compress_body(body, crate::compress::AcceptedEncoding::Brotli, config, content_type)); + + let gz = crate::compress::should_compress( + config, crate::compress::AcceptedEncoding::Gzip, body.len(), content_type, false, + ).and_then(|_| crate::compress::compress_body(body, crate::compress::AcceptedEncoding::Gzip, config, content_type)); + + let br_variants = br.map(|compressed_body| { + let close = build_compressed_response_bytes(status, headers, &compressed_body, false, crate::compress::AcceptedEncoding::Brotli); + let ka = build_compressed_response_bytes(status, headers, &compressed_body, true, crate::compress::AcceptedEncoding::Brotli); + (Bytes::from(close), Bytes::from(ka)) + }); + + let gz_variants = gz.map(|compressed_body| { + let close = build_compressed_response_bytes(status, headers, &compressed_body, false, crate::compress::AcceptedEncoding::Gzip); + let ka = build_compressed_response_bytes(status, headers, &compressed_body, true, crate::compress::AcceptedEncoding::Gzip); + (Bytes::from(close), Bytes::from(ka)) + }); + + ( + br_variants.as_ref().map(|(c, _)| c.clone()), + br_variants.map(|(_, k)| k), + gz_variants.as_ref().map(|(c, _)| c.clone()), + gz_variants.map(|(_, k)| k), + ) + } + _ => (None, None, None, None), + }; + ExactStaticRoute { - close_response: Bytes::from(build_close_response( - static_response.status, - &static_response.headers, - body, - )), - keep_alive_response: Bytes::from(build_keep_alive_response( - static_response.status, - &static_response.headers, - body, - )), + close_response: Bytes::from(identity_close), + keep_alive_response: Bytes::from(identity_ka), + close_response_br: br_close, + keep_alive_response_br: br_ka, + close_response_gzip: gz_close, + keep_alive_response_gzip: gz_ka, } } diff --git a/src/compress.js b/src/compress.js new file mode 100644 index 0000000..c680fe5 --- /dev/null +++ b/src/compress.js @@ -0,0 +1,37 @@ +/** + * http-native response compression middleware. + * + * All compression happens in the Rust native layer — this middleware + * is a no-op that carries configuration to the manifest. + * + * Usage: + * import { compress } from "@http-native/core/compress"; + * app.use(compress()); + * app.use(compress({ minSize: 512, brotliQuality: 6 })); + */ + +/** + * Create a compression middleware. + * + * @param {Object} [options] + * @param {number} [options.minSize=1024] - Minimum body size in bytes to compress + * @param {number} [options.brotliQuality=4] - Brotli quality (0-11, default 4) + * @param {number} [options.gzipLevel=6] - Gzip compression level (0-9, default 6) + * @param {Array} [options.qualityMap] - Per-content-type quality overrides + * @returns {Function} Middleware function + */ +export function compress(options = {}) { + const config = { + minSize: options.minSize ?? 1024, + brotliQuality: options.brotliQuality ?? 4, + gzipLevel: options.gzipLevel ?? 6, + qualityMap: options.qualityMap ?? [], + }; + + function compressionMiddleware(req, res, next) { + return next(); + } + + compressionMiddleware._compressionConfig = config; + return compressionMiddleware; +} diff --git a/src/index.d.ts b/src/index.d.ts index 86b924f..5c7cd09 100644 --- a/src/index.d.ts +++ b/src/index.d.ts @@ -419,6 +419,31 @@ export interface CorsOptions { /** Create a CORS middleware */ export function cors(options?: CorsOptions): Middleware; +// ─── Compression Types ──────────────────── + +export interface ContentTypeQualityOption { + /** Content-type pattern, e.g. "image/svg+xml", "application/json", "text/*" */ + pattern: string; + /** Brotli quality override for this content-type (0-11) */ + brotliQuality?: number; + /** Gzip level override for this content-type (0-9) */ + gzipLevel?: number; +} + +export interface CompressOptions { + /** Minimum body size in bytes to compress (default 1024) */ + minSize?: number; + /** Brotli quality level 0-11 (default 4) */ + brotliQuality?: number; + /** Gzip compression level 0-9 (default 6) */ + gzipLevel?: number; + /** Per-content-type quality overrides, checked in order */ + qualityMap?: ContentTypeQualityOption[]; +} + +/** Create a compression middleware (Brotli + Gzip, handled in native layer) */ +export function compress(options?: CompressOptions): Middleware; + // ─── Validation Types ─────────────────── export interface ValidationSchema { diff --git a/src/index.js b/src/index.js index 540dc6d..eee5b54 100644 --- a/src/index.js +++ b/src/index.js @@ -1657,6 +1657,20 @@ function buildCompiledApplication(app, normalizedOptions) { }; } + const compressionMiddleware = app._middlewares.find( + (middleware) => middleware.handler._compressionConfig, + ); + if (compressionMiddleware) { + const cfg = compressionMiddleware.handler._compressionConfig; + manifest.compression = { + enabled: true, + minSize: cfg.minSize, + brotliQuality: cfg.brotliQuality, + gzipLevel: cfg.gzipLevel, + qualityMap: cfg.qualityMap, + }; + } + const runtimeOptimizer = createRuntimeOptimizer( compiledRoutes, compiledMiddlewares,