From 04190c4c69be3b70514dc4e79ce1bfd6bec877bb Mon Sep 17 00:00:00 2001 From: DreaMer963 Date: Sat, 11 Dec 2021 23:49:58 +0800 Subject: [PATCH] Fix: stack overflow --- datafusion/Cargo.toml | 1 + datafusion/src/error.rs | 5 + datafusion/src/lib.rs | 1 + datafusion/src/logical_plan/expr.rs | 245 ++++++++++++++-------------- datafusion/src/safe_stack.rs | 84 ++++++++++ datafusion/src/sql/planner.rs | 31 +++- datafusion/tests/sql.rs | 42 +++++ 7 files changed, 288 insertions(+), 121 deletions(-) create mode 100644 datafusion/src/safe_stack.rs diff --git a/datafusion/Cargo.toml b/datafusion/Cargo.toml index a8c075ea7a83..339fa4a6467a 100644 --- a/datafusion/Cargo.toml +++ b/datafusion/Cargo.toml @@ -77,6 +77,7 @@ rand = "0.8" avro-rs = { version = "0.13", features = ["snappy"], optional = true } num-traits = { version = "0.2", optional = true } pyo3 = { version = "0.14", optional = true } +stacker = { version = "0.1.14"} [dev-dependencies] criterion = "0.3" diff --git a/datafusion/src/error.rs b/datafusion/src/error.rs index 6b6bb1381111..6d4c0e646ba4 100644 --- a/datafusion/src/error.rs +++ b/datafusion/src/error.rs @@ -61,6 +61,8 @@ pub enum DataFusionError { /// Error returned during execution of the query. /// Examples include files not found, errors in parsing certain types. Execution(String), + /// Error returned if recursion exceeded limit + RecursionLimitErr(usize), } impl DataFusionError { @@ -129,6 +131,9 @@ impl Display for DataFusionError { DataFusionError::Execution(ref desc) => { write!(f, "Execution error: {}", desc) } + DataFusionError::RecursionLimitErr(ref desc) => { + write!(f, "Recursion exceeded limit: {}", desc) + } } } } diff --git a/datafusion/src/lib.rs b/datafusion/src/lib.rs index 4f4cd664fd41..1e5ccc20e1fb 100644 --- a/datafusion/src/lib.rs +++ b/datafusion/src/lib.rs @@ -232,6 +232,7 @@ pub use arrow; pub use parquet; pub(crate) mod field_util; +pub(crate) mod safe_stack; #[cfg(feature = "pyarrow")] mod pyarrow; diff --git a/datafusion/src/logical_plan/expr.rs b/datafusion/src/logical_plan/expr.rs index e7801e35f039..25c7fba18096 100644 --- a/datafusion/src/logical_plan/expr.rs +++ b/datafusion/src/logical_plan/expr.rs @@ -27,6 +27,7 @@ use crate::physical_plan::{ aggregates, expressions::binary_operator_data_type, functions, udf::ScalarUDF, window_functions, }; +use crate::safe_stack::maybe_grow; use crate::{physical_plan::udaf::AggregateUDF, scalar::ScalarValue}; use aggregates::{AccumulatorFunctionImplementation, StateTypeFunction}; use arrow::{compute::can_cast_types, datatypes::DataType}; @@ -379,7 +380,7 @@ impl Expr { /// This happens when e.g. the expression refers to a column that does not exist in the schema, or when /// the expression is incorrectly typed (e.g. `[utf8] + [bool]`). pub fn get_type(&self, schema: &DFSchema) -> Result { - match self { + maybe_grow(|| match self { Expr::Alias(expr, _) => expr.get_type(schema), Expr::Column(c) => Ok(schema.field_from_column(c)?.data_type().clone()), Expr::ScalarVariable(_) => Ok(DataType::Utf8), @@ -446,7 +447,7 @@ impl Expr { get_indexed_field(&data_type, key).map(|x| x.data_type().clone()) } - } + }) } /// Returns the nullability of the expression based on [arrow::datatypes::Schema]. @@ -456,7 +457,7 @@ impl Expr { /// This function errors when it is not possible to compute its nullability. /// This happens when the expression refers to a column that does not exist in the schema. pub fn nullable(&self, input_schema: &DFSchema) -> Result { - match self { + maybe_grow(|| match self { Expr::Alias(expr, _) => expr.nullable(input_schema), Expr::Column(c) => Ok(input_schema.field_from_column(c)?.is_nullable()), Expr::Literal(value) => Ok(value.is_null()), @@ -505,7 +506,7 @@ impl Expr { let data_type = expr.get_type(input_schema)?; get_indexed_field(&data_type, key).map(|x| x.is_nullable()) } - } + }) } /// Returns the name of this expression based on [crate::logical_plan::DFSchema]. @@ -702,7 +703,7 @@ impl Expr { }; // recurse (and cover all expression types) - let visitor = match self { + let visitor = maybe_grow(|| match self { Expr::Alias(expr, _) => expr.accept(visitor), Expr::Column(_) => Ok(visitor), Expr::ScalarVariable(..) => Ok(visitor), @@ -784,7 +785,7 @@ impl Expr { } Expr::Wildcard => Ok(visitor), Expr::GetIndexedField { ref expr, .. } => expr.accept(visitor), - }?; + })?; visitor.post_visit(self) } @@ -834,121 +835,125 @@ impl Expr { }; // recurse into all sub expressions(and cover all expression types) - let expr = match self { - Expr::Alias(expr, name) => Expr::Alias(rewrite_boxed(expr, rewriter)?, name), - Expr::Column(_) => self.clone(), - Expr::ScalarVariable(names) => Expr::ScalarVariable(names), - Expr::Literal(value) => Expr::Literal(value), - Expr::BinaryExpr { left, op, right } => Expr::BinaryExpr { - left: rewrite_boxed(left, rewriter)?, - op, - right: rewrite_boxed(right, rewriter)?, - }, - Expr::Not(expr) => Expr::Not(rewrite_boxed(expr, rewriter)?), - Expr::IsNotNull(expr) => Expr::IsNotNull(rewrite_boxed(expr, rewriter)?), - Expr::IsNull(expr) => Expr::IsNull(rewrite_boxed(expr, rewriter)?), - Expr::Negative(expr) => Expr::Negative(rewrite_boxed(expr, rewriter)?), - Expr::Between { - expr, - low, - high, - negated, - } => Expr::Between { - expr: rewrite_boxed(expr, rewriter)?, - low: rewrite_boxed(low, rewriter)?, - high: rewrite_boxed(high, rewriter)?, - negated, - }, - Expr::Case { - expr, - when_then_expr, - else_expr, - } => { - let expr = rewrite_option_box(expr, rewriter)?; - let when_then_expr = when_then_expr - .into_iter() - .map(|(when, then)| { - Ok(( - rewrite_boxed(when, rewriter)?, - rewrite_boxed(then, rewriter)?, - )) - }) - .collect::>>()?; - - let else_expr = rewrite_option_box(else_expr, rewriter)?; - + let expr = maybe_grow(|| { + Ok::<_, DataFusionError>(match self { + Expr::Alias(expr, name) => { + Expr::Alias(rewrite_boxed(expr, rewriter)?, name) + } + Expr::Column(_) => self.clone(), + Expr::ScalarVariable(names) => Expr::ScalarVariable(names), + Expr::Literal(value) => Expr::Literal(value), + Expr::BinaryExpr { left, op, right } => Expr::BinaryExpr { + left: rewrite_boxed(left, rewriter)?, + op, + right: rewrite_boxed(right, rewriter)?, + }, + Expr::Not(expr) => Expr::Not(rewrite_boxed(expr, rewriter)?), + Expr::IsNotNull(expr) => Expr::IsNotNull(rewrite_boxed(expr, rewriter)?), + Expr::IsNull(expr) => Expr::IsNull(rewrite_boxed(expr, rewriter)?), + Expr::Negative(expr) => Expr::Negative(rewrite_boxed(expr, rewriter)?), + Expr::Between { + expr, + low, + high, + negated, + } => Expr::Between { + expr: rewrite_boxed(expr, rewriter)?, + low: rewrite_boxed(low, rewriter)?, + high: rewrite_boxed(high, rewriter)?, + negated, + }, Expr::Case { expr, when_then_expr, else_expr, + } => { + let expr = rewrite_option_box(expr, rewriter)?; + let when_then_expr = when_then_expr + .into_iter() + .map(|(when, then)| { + Ok(( + rewrite_boxed(when, rewriter)?, + rewrite_boxed(then, rewriter)?, + )) + }) + .collect::>>()?; + + let else_expr = rewrite_option_box(else_expr, rewriter)?; + + Expr::Case { + expr, + when_then_expr, + else_expr, + } } - } - Expr::Cast { expr, data_type } => Expr::Cast { - expr: rewrite_boxed(expr, rewriter)?, - data_type, - }, - Expr::TryCast { expr, data_type } => Expr::TryCast { - expr: rewrite_boxed(expr, rewriter)?, - data_type, - }, - Expr::Sort { - expr, - asc, - nulls_first, - } => Expr::Sort { - expr: rewrite_boxed(expr, rewriter)?, - asc, - nulls_first, - }, - Expr::ScalarFunction { args, fun } => Expr::ScalarFunction { - args: rewrite_vec(args, rewriter)?, - fun, - }, - Expr::ScalarUDF { args, fun } => Expr::ScalarUDF { - args: rewrite_vec(args, rewriter)?, - fun, - }, - Expr::WindowFunction { - args, - fun, - partition_by, - order_by, - window_frame, - } => Expr::WindowFunction { - args: rewrite_vec(args, rewriter)?, - fun, - partition_by: rewrite_vec(partition_by, rewriter)?, - order_by: rewrite_vec(order_by, rewriter)?, - window_frame, - }, - Expr::AggregateFunction { - args, - fun, - distinct, - } => Expr::AggregateFunction { - args: rewrite_vec(args, rewriter)?, - fun, - distinct, - }, - Expr::AggregateUDF { args, fun } => Expr::AggregateUDF { - args: rewrite_vec(args, rewriter)?, - fun, - }, - Expr::InList { - expr, - list, - negated, - } => Expr::InList { - expr: rewrite_boxed(expr, rewriter)?, - list: rewrite_vec(list, rewriter)?, - negated, - }, - Expr::Wildcard => Expr::Wildcard, - Expr::GetIndexedField { expr, key } => Expr::GetIndexedField { - expr: rewrite_boxed(expr, rewriter)?, - key, - }, - }; + Expr::Cast { expr, data_type } => Expr::Cast { + expr: rewrite_boxed(expr, rewriter)?, + data_type, + }, + Expr::TryCast { expr, data_type } => Expr::TryCast { + expr: rewrite_boxed(expr, rewriter)?, + data_type, + }, + Expr::Sort { + expr, + asc, + nulls_first, + } => Expr::Sort { + expr: rewrite_boxed(expr, rewriter)?, + asc, + nulls_first, + }, + Expr::ScalarFunction { args, fun } => Expr::ScalarFunction { + args: rewrite_vec(args, rewriter)?, + fun, + }, + Expr::ScalarUDF { args, fun } => Expr::ScalarUDF { + args: rewrite_vec(args, rewriter)?, + fun, + }, + Expr::WindowFunction { + args, + fun, + partition_by, + order_by, + window_frame, + } => Expr::WindowFunction { + args: rewrite_vec(args, rewriter)?, + fun, + partition_by: rewrite_vec(partition_by, rewriter)?, + order_by: rewrite_vec(order_by, rewriter)?, + window_frame, + }, + Expr::AggregateFunction { + args, + fun, + distinct, + } => Expr::AggregateFunction { + args: rewrite_vec(args, rewriter)?, + fun, + distinct, + }, + Expr::AggregateUDF { args, fun } => Expr::AggregateUDF { + args: rewrite_vec(args, rewriter)?, + fun, + }, + Expr::InList { + expr, + list, + negated, + } => Expr::InList { + expr: rewrite_boxed(expr, rewriter)?, + list: rewrite_vec(list, rewriter)?, + negated, + }, + Expr::Wildcard => Expr::Wildcard, + Expr::GetIndexedField { expr, key } => Expr::GetIndexedField { + expr: rewrite_boxed(expr, rewriter)?, + key, + }, + }) + })?; // now rewrite this expression itself if need_mutate { @@ -1722,7 +1727,7 @@ fn fmt_function( impl fmt::Debug for Expr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { + maybe_grow(|| match self { Expr::Alias(expr, alias) => write!(f, "{:?} AS {}", expr, alias), Expr::Column(c) => write!(f, "{}", c), Expr::ScalarVariable(var_names) => write!(f, "{}", var_names.join(".")), @@ -1841,7 +1846,7 @@ impl fmt::Debug for Expr { Expr::GetIndexedField { ref expr, key } => { write!(f, "({:?})[{}]", expr, key) } - } + }) } } @@ -1865,7 +1870,7 @@ fn create_function_name( /// Returns a readable name of an expression based on the input schema. /// This function recursively transverses the expression for names such as "CAST(a > 2)". fn create_name(e: &Expr, input_schema: &DFSchema) -> Result { - match e { + maybe_grow(|| match e { Expr::Alias(_, name) => Ok(name.clone()), Expr::Column(c) => Ok(c.flat_name()), Expr::ScalarVariable(variable_names) => Ok(variable_names.join(".")), @@ -2002,7 +2007,7 @@ fn create_name(e: &Expr, input_schema: &DFSchema) -> Result { Expr::Wildcard => Err(DataFusionError::Internal( "Create name does not support wildcard".to_string(), )), - } + }) } /// Create field meta-data from an expression, for use in a result set schema diff --git a/datafusion/src/safe_stack.rs b/datafusion/src/safe_stack.rs new file mode 100644 index 000000000000..9ed27a6a2b21 --- /dev/null +++ b/datafusion/src/safe_stack.rs @@ -0,0 +1,84 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use crate::error::{DataFusionError, Result}; +use std::cell::RefCell; + +#[derive(Default, Debug, Clone)] +/// Record recursion depth +pub struct ProtectRecursion { + /// the depth of recursion + depth: RefCell, + /// the limit of recursion + limit: usize, +} + +impl ProtectRecursion { + /// Make a protect recursion with specific limit + pub fn new_with_limit(limit: usize) -> ProtectRecursion { + ProtectRecursion { + depth: RefCell::new(0), + limit, + } + } + + fn depth_ascend(&self) { + *self.depth.borrow_mut() -= 1; + } + + fn depth_descend(&self) -> Result<()> { + let mut depth = self.depth.borrow_mut(); + if *depth >= self.limit { + return Err(DataFusionError::RecursionLimitErr(self.limit)); + } + *depth += 1; + Ok(()) + } +} + +/// Bytes available in the current stack +pub const STACKER_RED_ZONE: usize = { + 64 << 10 // 64KB +}; + +/// Allocate a new stack of at least stack_size bytes. +pub const STACKER_SIZE: usize = { + 4 << 20 // 4MB +}; + +/// The trait is used to prevent stack overflow panic +pub trait SafeRecursion { + fn protect_recursion(&self) -> &ProtectRecursion; + + fn safe_recursion(&self, f: F) -> Result + where + F: FnOnce() -> Result, + { + self.protect_recursion().depth_descend().unwrap(); + let result = maybe_grow(f); + self.protect_recursion().depth_ascend(); + result + } +} + +/// Wrap stacker::maybe_grow with STACKER_RED_ZONE and STACKER_SIZE +pub fn maybe_grow(f: F) -> R +where + F: FnOnce() -> R, +{ + stacker::maybe_grow(STACKER_RED_ZONE, STACKER_SIZE, f) +} diff --git a/datafusion/src/sql/planner.rs b/datafusion/src/sql/planner.rs index 3558d6ca4e23..e8d38d7194c2 100644 --- a/datafusion/src/sql/planner.rs +++ b/datafusion/src/sql/planner.rs @@ -68,6 +68,7 @@ use super::{ }; use crate::logical_plan::builder::project_with_alias; use crate::logical_plan::plan::{Analyze, Explain}; +use crate::safe_stack::{ProtectRecursion, SafeRecursion}; /// The ContextProvider trait allows the query planner to obtain meta-data about tables and /// functions referenced in SQL statements @@ -83,6 +84,13 @@ pub trait ContextProvider { /// SQL query planner pub struct SqlToRel<'a, S: ContextProvider> { schema_provider: &'a S, + project_recursion: ProtectRecursion, +} + +impl<'a, S: ContextProvider> SafeRecursion for SqlToRel<'_, S> { + fn protect_recursion(&self) -> &ProtectRecursion { + &self.project_recursion + } } fn plan_key(key: Value) -> ScalarValue { @@ -114,7 +122,10 @@ fn plan_indexed(expr: Expr, mut keys: Vec) -> Expr { impl<'a, S: ContextProvider> SqlToRel<'a, S> { /// Create a new query planner pub fn new(schema_provider: &'a S) -> Self { - SqlToRel { schema_provider } + SqlToRel { + schema_provider, + project_recursion: ProtectRecursion::new_with_limit(1024), + } } /// Generate a logical plan from an DataFusion SQL statement @@ -210,6 +221,16 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> { query: &Query, alias: Option, ctes: &mut HashMap, + ) -> Result { + self.safe_recursion(|| self.query_to_plan_with_alias_inner(query, alias, ctes)) + } + + /// The inner of query_to_plan_with_alias + pub fn query_to_plan_with_alias_inner( + &self, + query: &Query, + alias: Option, + ctes: &mut HashMap, ) -> Result { let set_expr = &query.body; if let Some(with) = &query.with { @@ -1291,6 +1312,14 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> { } fn sql_expr_to_logical_expr(&self, sql: &SQLExpr, schema: &DFSchema) -> Result { + self.safe_recursion(|| self.sql_expr_to_logical_expr_inner(sql, schema)) + } + + fn sql_expr_to_logical_expr_inner( + &self, + sql: &SQLExpr, + schema: &DFSchema, + ) -> Result { match sql { SQLExpr::Value(Value::Number(n, _)) => parse_sql_number(n), SQLExpr::Value(Value::SingleQuotedString(ref s)) => Ok(lit(s.clone())), diff --git a/datafusion/tests/sql.rs b/datafusion/tests/sql.rs index 945bb7ebc2eb..9e043f8dfcd2 100644 --- a/datafusion/tests/sql.rs +++ b/datafusion/tests/sql.rs @@ -6512,3 +6512,45 @@ async fn csv_query_with_decimal_by_sql() -> Result<()> { assert_batches_eq!(expected, &actual); Ok(()) } + +#[tokio::test] +async fn test_query_with_many_conditions() -> Result<()> { + let schema = Arc::new(Schema::new(vec![Field::new("c1", DataType::Utf8, true)])); + let data = RecordBatch::try_new( + schema.clone(), + vec![Arc::new(StringArray::from(vec!["foo", "bar"]))], + ) + .unwrap(); + + let table = MemTable::try_new(schema, vec![vec![data]])?; + + let mut ctx = ExecutionContext::new(); + ctx.register_table("test", Arc::new(table)).unwrap(); + let num_conditions = 100; + let where_clause = (0..num_conditions) + .map(|i| format!("c1 = 'value{:?}'", i)) + .collect::>() + .join(" OR "); + let sql = format!("SELECT * from test where {};", where_clause); + let actual = execute_to_batches(&mut ctx, &sql).await; + assert_eq!(true, actual.is_empty()); + Ok(()) +} + +#[tokio::test] +async fn test_select_with_many_projections() -> Result<()> { + let mut ctx = ExecutionContext::new(); + let sql = "SELECT 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16 + 17 + 18 + 19 + 20 + 21 + 22 + 23 + 24 + 25 + 26 + 27 + 28 + 29 + 30 + 31 + 32 + 33 + 34 + 35 + 36 + 37 + 38 + 39 + 40 + + 41 + 42 + 43 + 44 + 45 + 46 + 47 + 48 + 49 + 50 + 51 + 52 + 53 + 54 + 55 + 56 + 57 + 58 + 59 + 60 + 61 + 62 + 63 + 64 + 65 + 66 + 67 + 68 + 69 + 70 + 71 + 72 + 73 + 74 + 75 + 76 + 77 + 78 + 79 + 80 + 81 + 82 + 83 + 84 + 85 + 86 + 87 + 88 + 89 + 90 + 91 + 92 + 93 + 94 + 95 + 96 + 97 + 98 + 99 + 100 + 101 + 102 + 103 + 104 + 105 + 106 + 107 + 108 + 109 + 110 + 111 + 112 + 113 + 114 + 115 + 116 + 117 + 118 + 119 + 120 + 121 + 122 + 123 + 124 + 125 + 126 + 127 + 128 + 129 + 130 + 131 + 132 + 133 + 134 + 135 + 136 + 137 + 138 + 139 + 140 + 141 + 142 + 143 + 144 + 145 + 146 + 147 + 148 + 149 + 150 + 151 + 152 + 153 + 154 + 155 + 156 + 157 + 158 + 159 + 160 + 161 + 162 + 163 + 164 + 165 + 166 + 167 + 168 + 169 + 170 + 171 + 172 + 173 + 174 + 175 + 176 + 177 + 178 + 179 + 180 + 181 + 182 + 183 + 184 + 185 + 186 + 187 + 188 + 189 + 190 + 191 + 192 + 193 + 194 + 195 + 196 + 197 + 198 + 199 + 200 + 201 + 202 + 203 + 204 + 205 + 206 + 207 + 208 + 209 + 210 + 211 + 212 + 213 + 214 + 215 + 216 + 217 + 218 + 219 + 220 + 221 + 222 + 223 + 224 + 225 + 226 + 227 + 228 + 229 + 230 + 231 + 232 + 233 + 234 + 235 + 236 + 237 + 238 + 239 + 240 + 241 + 242 + 243 + 244 + 245 + 246 + 247 + 248 + 249 + 250 + 251 + 252 + 253 + 254 + 255 + 256 + 257 + 258 + 259 + 260 + 261 + 262 + 263 + 264 + 265 + 266 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + 13 + 14 + 15 + 16 + 17 + 18 + 19 + 20 + 21 + 22 + 23 + 24 + 25 + 26 + 27 + 28 + 29 + 30 + 31 + 32 + 33 + 34 + 35 + 36 + 37 + 38 + 39 + 40 + + 41 + 42 + 43 + 44 + 45 + 46 + 47 + 48 + 49 + 50 + 51 + 52 + 53 + 54 + 55 + 56 + 57 + 58 + 59 + 60 + 61 + 62 + 63 + 64 + 65 + 66 + 67 + 68 + 69 + 70 + 71 + 72 + 73 + 74 + 75 + 76 + 77 + 78 + 79 + 80 + 81 + 82 + 83 + 84 + 85 + 86 + 87 + 88 + 89 + 90 + 91 + 92 + 93 + 94 + 95 + 96 + 97 + 98 + 99 + 100 + 101 + 102 + 103 + 104 + 105 + 106 + 107 + 108 + 109 + 110 + 111 + 112 + 113 + 114 + 115 + 116 + 117 + 118 + 119 + 120 + 121 + 122 + 123 + 124 + 125 + 126 + 127 + 128 + 129 + 130 + 131 + 132 + 133 + 134 + 135 + 136 + 137 + 138 + 139 + 140 + 141 + 142 + 143 + 144 + 145 + 146 + 147 + 148 + 149 + 150 + 151 + 152 + 153 + 154 + 155 + 156 + 157 + 158 + 159 + 160 + 161 + 162 + 163 + 164 + 165 + 166 + 167 + 168 + 169 + 170 + 171 + 172 + 173 + 174 + 175 + 176 + 177 + 178 + 179 + 180 + 181 + 182 + 183 + 184 + 185 + 186 + 187 + 188 + 189 + 190 + 191 + 192 + 193 + 194 + 195 + 196 + 197 + 198 + 199 + 200 + 201 + 202 + 203 + 204 + 205 + 206 + 207 + 208 + 209 + 210 + 211 + 212 + 213 + 214 + 215 + 216 + 217 + 218 + 219 + 220 + 221 + 222 + 223 + 224 + 225 + 226 + 227 + 228 + 229 + 230 + 231 + 232 + 233 + 234 + 235 + 236 + 237 + 238 + 239 + 240 + 241 + 242 + 243 + 244 + 245 + 246 + 247 + 248 + 249 + 250 + 251 + 252 + 253 + 254 + 255 + 256 + 257 + 258 + 259 + 260 + 261 + 262 + 263 + 264 + 265 + 266"; + let actual = execute_to_batches(&mut ctx, sql).await; + let expected = vec![ + "+-----+--------+", + "| num | letter |", + "+-----+--------+", + "| | three |", + "| 1 | one |", + "| 2 | two |", + "+-----+--------+", + ]; + assert_batches_eq!(expected, &actual); + Ok(()) +}