Skip to content

Commit

Permalink
Providing the path to the file now conforms to Spark defaults
Browse files Browse the repository at this point in the history
  • Loading branch information
nightscape committed Aug 16, 2017
1 parent b95bd8a commit 6da6955
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 4 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
0.8.6
=====
- Change: Providing the path to the file is now either done via `.option("path", thePath)` or `.load(thePath)`
- Change: Some previously required parameters are now optional and have a default

0.8.5
Expand Down
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,6 @@ import org.apache.spark.sql.SQLContext
val sqlContext = new SQLContext(sc)
val df = sqlContext.read
.format("com.crealytics.spark.excel")
.option("location", "Worktime.xlsx")
.load()
.option("sheetName", "Daily") // Required
.option("useHeader", "true") // Required
.option("treatEmptyValuesAsNulls", "false") // Optional, default: true
Expand All @@ -50,6 +48,7 @@ val df = sqlContext.read
.option("startColumn", 0) // Optional, default: 0
.option("endColumn", 99) // Optional, default: Int.MaxValue
.schema(myCustomSchema) // Optional, default: Either inferred schema, or all columns are Strings
.load("Worktime.xlsx")
```

## Building From Source
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class DefaultSource
schema: StructType
): ExcelRelation = {
ExcelRelation(
location = checkParameter(parameters, "location"),
location = checkParameter(parameters, "path"),
sheetName = parameters.get("sheetName"),
useHeader = checkParameter(parameters, "useHeader").toBoolean,
treatEmptyValuesAsNulls = parameters.get("treatEmptyValuesAsNulls").fold(true)(_.toBoolean),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ class IntegrationSuite extends FunSuite with PropertyChecks with DataFrameSuiteB
.option("inferSchema", "false")
.option("addColorColumns", "false")
.schema(exampleDataSchema)
.load()
val expected = spark.createDataset(rows).toDF
.load(fileName)
assertDataFrameEquals(expected, result)
}
}
Expand Down

0 comments on commit 6da6955

Please sign in to comment.