Skip to content

Commit

Permalink
Use official release
Browse files Browse the repository at this point in the history
  • Loading branch information
dongjoon-hyun committed Sep 16, 2021
1 parent 72857b7 commit ac7e61d
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 9 deletions.
5 changes: 0 additions & 5 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -290,11 +290,6 @@
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>apache.staging</id>
<name>Apache Staging Repository</name>
<url>https://repository.apache.org/content/repositories/orgapacheorc-1050</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
Expand Down
1 change: 0 additions & 1 deletion project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,6 @@ object SparkBuild extends PomBuild {
"gcs-maven-central-mirror" at "https://maven-central.storage-download.googleapis.com/maven2/",
DefaultMavenRepository,
Resolver.mavenLocal,
"Apache Staging Repository" at "https://repository.apache.org/content/repositories/orgapacheorc-1050",
Resolver.file("ivyLocal", file(Path.userHome.absolutePath + "/.ivy2/local"))(Resolver.ivyStylePatterns)
),
externalResolvers := resolvers.value,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -693,9 +693,9 @@ class FileBasedDataSourceSuite extends QueryTest
}

test("SPARK-22790,SPARK-27668: spark.sql.sources.compressionFactor takes effect") {
Seq(1.0, 0.4).foreach { compressionFactor =>
Seq(1.0, 0.5).foreach { compressionFactor =>
withSQLConf(SQLConf.FILE_COMPRESSION_FACTOR.key -> compressionFactor.toString,
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "250") {
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "350") {
withTempPath { workDir =>
// the file size is 504 bytes
val workDirPath = workDir.getAbsolutePath
Expand All @@ -706,7 +706,7 @@ class FileBasedDataSourceSuite extends QueryTest
data2.write.orc(workDirPath + "/data2")
val df2FromFile = spark.read.orc(workDirPath + "/data2")
val joinedDF = df1FromFile.join(df2FromFile, Seq("count"))
if (compressionFactor == 0.4) {
if (compressionFactor == 0.5) {
val bJoinExec = collect(joinedDF.queryExecution.executedPlan) {
case bJoin: BroadcastHashJoinExec => bJoin
}
Expand Down

0 comments on commit ac7e61d

Please sign in to comment.