Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement Content-Encoding aware query decompression support #995

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 6 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ string_number = []
tokio-sync = ["tokio"]
tracing = ["tracinglib", "tracing-futures"]
unblock = ["blocking"]
compression = ["brotli", "flate2", "zstd"]

[dependencies]
async-graphql-derive = { path = "derive", version = "4.0.13" }
Expand Down Expand Up @@ -103,6 +104,11 @@ serde_cbor = { version = "0.11.1", optional = true }
sha2 = { version = "0.10.2", optional = true }
zxcvbn = { version = "2.1.2", optional = true }

# compression feature
brotli = { version = "3.3.4", optional = true }
flate2 = { version = "1.0.24", optional = true }
zstd = { version = "0.11.2", optional = true }

[dev-dependencies]
futures-channel = "0.3.13"
tokio = { version = "1.4.0", features = [
Expand Down
4 changes: 4 additions & 0 deletions integrations/actix-web/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ cbor = ["serde_cbor"]
default = []

[dev-dependencies]
async-graphql = { path = "../..", version = "4.0.6", default-features = false, features = [ "compression" ] }
actix-rt = "2.6.0"
async-mutex = "1.4.0"
serde = { version = "1", features = ["derive"] }
brotli = { version = "3.3.4" }
flate2 = { version = "1.0.24" }
zstd = { version = "0.11.2" }
7 changes: 7 additions & 0 deletions integrations/actix-web/src/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,12 @@ impl FromRequest for GraphQLBatchRequest {
.and_then(|value| value.to_str().ok())
.map(|value| value.to_string());

let content_encoding = req
.headers()
.get(http::header::CONTENT_ENCODING)
.and_then(|value| value.to_str().ok())
.map(|value| value.to_string());

let (tx, rx) = async_channel::bounded(16);

// Payload is !Send so we create indirection with a channel
Expand All @@ -100,6 +106,7 @@ impl FromRequest for GraphQLBatchRequest {
Ok(GraphQLBatchRequest(
async_graphql::http::receive_batch_body(
content_type,
content_encoding,
rx.map_err(|e| match e {
PayloadError::Incomplete(Some(e)) | PayloadError::Io(e) => e,
PayloadError::Incomplete(None) => {
Expand Down
45 changes: 45 additions & 0 deletions integrations/actix-web/tests/graphql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -266,3 +266,48 @@ async fn test_cbor() {
}
);
}

#[actix_rt::test]
async fn test_compression() {
let srv = test::init_service(
App::new()
.app_data(Data::new(Schema::new(
AddQueryRoot,
EmptyMutation,
EmptySubscription,
)))
.service(
web::resource("/")
.guard(guard::Post())
.to(gql_handle_schema::<AddQueryRoot, EmptyMutation, EmptySubscription>),
),
)
.await;

for &encoding in ContentEncoding::ALL {
let response = srv
.call(
test::TestRequest::with_uri("/")
.method(Method::POST)
.set_payload(compress_query(
r#"{"query":"{ add(a: 10, b: 20) }"}"#,
encoding,
))
.insert_header((actix_http::header::ACCEPT, "application/json"))
.insert_header((actix_web::http::header::CONTENT_ENCODING, encoding.header()))
.to_request(),
)
.await
.unwrap();

assert!(response.status().is_success(), "using {:?}", encoding);
let body = response.into_body();

assert_eq!(
actix_web::body::to_bytes(body).await.unwrap(),
json!({"data": {"add": 30}}).to_string().into_bytes(),
"using {:?}",
encoding
);
}
}
60 changes: 60 additions & 0 deletions integrations/actix-web/tests/test_utils.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::io::Write;

use actix_web::{web, HttpRequest, HttpResponse};
use async_graphql::{
http::{playground_source, GraphQLPlaygroundConfig},
Expand Down Expand Up @@ -89,3 +91,61 @@ pub async fn gql_handle_schema_with_header<T: ObjectType + 'static>(
}
schema.execute(request).await.into()
}

#[derive(Debug, Clone, Copy)]
pub enum ContentEncoding {
Gzip,
Deflate,
Br,
Zstd,
}

impl ContentEncoding {
pub const fn header(&self) -> &'static str {
match self {
ContentEncoding::Gzip => "gzip",
ContentEncoding::Deflate => "deflate",
ContentEncoding::Br => "br",
ContentEncoding::Zstd => "zstd",
}
}

pub const ALL: &'static [ContentEncoding] = &[
ContentEncoding::Gzip,
ContentEncoding::Deflate,
ContentEncoding::Br,
ContentEncoding::Zstd,
];
}

// #[cfg(feature = "compression")]
pub fn compress_query(data: impl AsRef<str>, algo: ContentEncoding) -> Vec<u8> {
match algo {
ContentEncoding::Gzip => {
let mut encoder =
flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.finish().unwrap()
}
ContentEncoding::Deflate => {
let mut encoder =
flate2::write::ZlibEncoder::new(Vec::new(), flate2::Compression::default());
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.finish().unwrap()
}
ContentEncoding::Br => {
let mut buff = Vec::new();
let mut encoder =
brotli::CompressorWriter::with_params(&mut buff, 4096, &Default::default());
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.flush().unwrap();
encoder.into_inner().to_vec()
}
ContentEncoding::Zstd => {
let mut buff = Vec::new();
let mut encoder = zstd::stream::Encoder::new(&mut buff, 9).unwrap();
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.finish().unwrap().to_vec()
}
}
}
9 changes: 9 additions & 0 deletions integrations/axum/src/extract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,13 @@ where
.get(http::header::CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string);

let content_encoding = req
.headers()
.get(http::header::CONTENT_ENCODING)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string);

let body_stream = BodyStream::from_request(req)
.await
.map_err(|_| {
Expand All @@ -129,10 +136,12 @@ where
))
})?
.map_err(|err| std::io::Error::new(ErrorKind::Other, err.to_string()));

let body_reader = tokio_util::io::StreamReader::new(body_stream).compat();
Ok(Self(
async_graphql::http::receive_batch_body(
content_type,
content_encoding,
body_reader,
MultipartOptions::default(),
)
Expand Down
8 changes: 8 additions & 0 deletions integrations/poem/src/extractor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,17 @@ impl<'a> FromRequest<'a> for GraphQLBatchRequest {
.get(header::CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string);

let content_encoding = req
.headers()
.get(header::CONTENT_ENCODING)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string);

Ok(Self(
async_graphql::http::receive_batch_body(
content_type,
content_encoding,
body.take()?.into_async_read().compat(),
MultipartOptions::default(),
)
Expand Down
1 change: 1 addition & 0 deletions integrations/rocket/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ impl<'r> FromData<'r> for GraphQLBatchRequest {

let request = async_graphql::http::receive_batch_body(
req.headers().get_one("Content-Type"),
req.headers().get_one("Content-Encoding"),
data.open(
req.limits()
.get("graphql")
Expand Down
4 changes: 4 additions & 0 deletions integrations/tide/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,14 @@ tide = { version = "0.16.0", default-features = false, features = [
tide-websockets = { version = "0.4.0", optional = true }

[dev-dependencies]
async-graphql = { path = "../..", version = "4.0.6", default-features = false, features = [ "compression" ] }
# Surf lacks multipart support
async-std = { version = "1.9.0", features = ["attributes", "tokio1"] }
reqwest = { version = "0.11.2", default-features = false, features = [
"json",
"multipart",
] }
serde_json = "1.0.64"
brotli = { version = "3.3.4" }
flate2 = { version = "1.0.24" }
zstd = { version = "0.11.2" }
8 changes: 7 additions & 1 deletion integrations/tide/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,12 +135,18 @@ pub async fn receive_batch_request_opts<State: Clone + Send + Sync + 'static>(
request.query::<async_graphql::Request>().map(Into::into)
} else if request.method() == Method::Post {
let body = request.take_body();

let content_type = request
.header(headers::CONTENT_TYPE)
.and_then(|values| values.get(0))
.map(HeaderValue::as_str);

async_graphql::http::receive_batch_body(content_type, body, opts)
let content_encoding = request
.header(headers::CONTENT_ENCODING)
.and_then(|values| values.get(0))
.map(HeaderValue::as_str);

async_graphql::http::receive_batch_body(content_type, content_encoding, body, opts)
.await
.map_err(|e| {
tide::Error::new(
Expand Down
56 changes: 56 additions & 0 deletions integrations/tide/tests/graphql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ use std::io::Read;
use async_graphql::*;
use reqwest::{header, StatusCode};
use serde_json::json;
use test_utils::ContentEncoding;

use crate::test_utils::compress_query;

type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>;

Expand Down Expand Up @@ -215,3 +218,56 @@ async fn upload() -> Result<()> {

Ok(())
}

#[async_std::test]
async fn compression() -> Result<()> {
let listen_addr = "127.0.0.1:8081";

async_std::task::spawn(async move {
struct QueryRoot;
#[Object]
impl QueryRoot {
/// Returns the sum of a and b
async fn add(&self, a: i32, b: i32) -> i32 {
a + b
}
}

let schema = Schema::build(QueryRoot, EmptyMutation, EmptySubscription).finish();

let mut app = tide::new();
let endpoint = async_graphql_tide::graphql(schema);
app.at("/").post(endpoint.clone()).get(endpoint);
app.listen(listen_addr).await
});

test_utils::wait_server_ready().await;

let client = test_utils::client();

for &encoding in ContentEncoding::ALL {
let resp = client
.post(&format!("http://{}", listen_addr))
.header("Content-Type", "application/json")
.header("Content-Encoding", encoding.header())
.body(compress_query(
r#"{"query":"{ add(a: 10, b: 20) }"}"#,
encoding,
))
.send()
.await?;

assert_eq!(resp.status(), StatusCode::OK, "using {:?}", encoding);
let string = resp.text().await?;
println!("via post {}", string);

assert_eq!(
string,
json!({"data": {"add": 30}}).to_string(),
"using {:?}",
encoding
);
}

Ok(())
}
60 changes: 59 additions & 1 deletion integrations/tide/tests/test_utils.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::time::Duration;
use std::{io::Write, time::Duration};

use reqwest::Client;

Expand All @@ -9,3 +9,61 @@ pub fn client() -> Client {
pub async fn wait_server_ready() {
async_std::task::sleep(Duration::from_secs(1)).await;
}

#[derive(Debug, Clone, Copy)]
pub enum ContentEncoding {
Gzip,
Deflate,
Br,
Zstd,
}

impl ContentEncoding {
pub const fn header(&self) -> &'static str {
match self {
ContentEncoding::Gzip => "gzip",
ContentEncoding::Deflate => "deflate",
ContentEncoding::Br => "br",
ContentEncoding::Zstd => "zstd",
}
}

pub const ALL: &'static [ContentEncoding] = &[
ContentEncoding::Gzip,
ContentEncoding::Deflate,
ContentEncoding::Br,
ContentEncoding::Zstd,
];
}

// #[cfg(feature = "compression")]
pub fn compress_query(data: impl AsRef<str>, algo: ContentEncoding) -> Vec<u8> {
match algo {
ContentEncoding::Gzip => {
let mut encoder =
flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.finish().unwrap()
}
ContentEncoding::Deflate => {
let mut encoder =
flate2::write::ZlibEncoder::new(Vec::new(), flate2::Compression::default());
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.finish().unwrap()
}
ContentEncoding::Br => {
let mut buff = Vec::new();
let mut encoder =
brotli::CompressorWriter::with_params(&mut buff, 4096, &Default::default());
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.flush().unwrap();
encoder.into_inner().to_vec()
}
ContentEncoding::Zstd => {
let mut buff = Vec::new();
let mut encoder = zstd::stream::Encoder::new(&mut buff, 9).unwrap();
encoder.write_all(data.as_ref().as_bytes()).unwrap();
encoder.finish().unwrap().to_vec()
}
}
}