Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions datafusion/core/src/datasource/file_format/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ mod tests {
projection: Option<Vec<usize>>,
limit: Option<usize>,
) -> Result<Arc<dyn ExecutionPlan>> {
let filename = "tests/jsons/2.json";
let filename = "tests/data/2.json";
let format = JsonFormat::default();
scan_format(state, &format, ".", filename, projection, limit).await
}
Expand All @@ -266,7 +266,7 @@ mod tests {
let session = SessionContext::new();
let ctx = session.state();
let store = Arc::new(LocalFileSystem::new()) as _;
let filename = "tests/jsons/schema_infer_limit.json";
let filename = "tests/data/schema_infer_limit.json";
let format = JsonFormat::default().with_schema_infer_max_rec(Some(3));

let file_schema = format
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_plan/file_format/csv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -785,7 +785,7 @@ mod tests {
let options = CsvReadOptions::default()
.schema_infer_max_records(2)
.has_header(true);
let df = ctx.read_csv("tests/csv/corrupt.csv", options).await?;
let df = ctx.read_csv("tests/data/corrupt.csv", options).await?;
let tmp_dir = TempDir::new()?;
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
let e = df
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/physical_plan/file_format/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ mod tests {

use super::*;

const TEST_DATA_BASE: &str = "tests/jsons";
const TEST_DATA_BASE: &str = "tests/data";

async fn prepare_store(
state: &SessionState,
Expand Down Expand Up @@ -707,7 +707,7 @@ mod tests {
let options = CsvReadOptions::default()
.schema_infer_max_records(2)
.has_header(true);
let df = ctx.read_csv("tests/csv/corrupt.csv", options).await?;
let df = ctx.read_csv("tests/data/corrupt.csv", options).await?;
let tmp_dir = TempDir::new()?;
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
let e = df
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_plan/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -940,7 +940,7 @@ mod tests {
let options = CsvReadOptions::default()
.schema_infer_max_records(2)
.has_header(true);
let df = ctx.read_csv("tests/csv/corrupt.csv", options).await?;
let df = ctx.read_csv("tests/data/corrupt.csv", options).await?;
let tmp_dir = TempDir::new()?;
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
let e = df
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
4 changes: 2 additions & 2 deletions datafusion/core/tests/sql/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

use super::*;

const TEST_DATA_BASE: &str = "tests/jsons";
const TEST_DATA_BASE: &str = "tests/data";

#[tokio::test]
async fn json_query() {
Expand Down Expand Up @@ -92,7 +92,7 @@ async fn json_explain() {
\n CoalescePartitionsExec\
\n AggregateExec: mode=Partial, gby=[], aggr=[COUNT(UInt8(1))]\
\n RepartitionExec: partitioning=RoundRobinBatch(NUM_CORES), input_partitions=1\
\n JsonExec: file_groups={1 group: [[WORKING_DIR/tests/jsons/2.json]]}, projection=[a]\n",
\n JsonExec: file_groups={1 group: [[WORKING_DIR/tests/data/2.json]]}, projection=[a]\n",
],
];
assert_eq!(expected, actual);
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/order.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use test_utils::{batches_to_vec, partitions_to_sorted_vec};
#[tokio::test]
async fn sort_with_lots_of_repetition_values() -> Result<()> {
let ctx = SessionContext::new();
let filename = "tests/parquet/data/repeat_much.snappy.parquet";
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the parquet one was particularly confusing because tests/parquet also has a bunch of rust code as well

let filename = "tests/data/repeat_much.snappy.parquet";

ctx.register_parquet("rep", filename, ParquetReadOptions::default())
.await?;
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/tests/sql/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ async fn fixed_size_binary_columns() {
let ctx = SessionContext::new();
ctx.register_parquet(
"t0",
"tests/parquet/data/test_binary.parquet",
"tests/data/test_binary.parquet",
ParquetReadOptions::default(),
)
.await
Expand All @@ -170,7 +170,7 @@ async fn window_fn_timestamp_tz() {
let ctx = SessionContext::new();
ctx.register_parquet(
"t0",
"tests/parquet/data/timestamp_with_tz.parquet",
"tests/data/timestamp_with_tz.parquet",
ParquetReadOptions::default(),
)
.await
Expand Down