Skip to content

Commit

Permalink
[SPARK-39371][DOCS][CORE] Review and fix issues in Scala/Java API doc…
Browse files Browse the repository at this point in the history
…s of Core module

### What changes were proposed in this pull request?

Compare the 3.3.0 API doc with the latest release version 3.2.1. Fix the following issues:

* Add missing Since annotation for new APIs
* Remove the leaking class/object in API doc

### Why are the changes needed?

Improve API docs

### Does this PR introduce _any_ user-facing change?

No

### How was this patch tested?

Existing UT

Closes #36757 from xuanyuanking/doc.

Authored-by: Yuanjian Li <yuanjian.li@databricks.com>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
  • Loading branch information
xuanyuanking authored and HyukjinKwon committed Jun 3, 2022
1 parent bed0b90 commit 1fbb1d4
Show file tree
Hide file tree
Showing 7 changed files with 11 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.apache.spark.storage.{BlockId, BlockManagerId, BlockNotFoundException
/**
* Object for grouping error messages from (most) exceptions thrown during query execution.
*/
object SparkCoreErrors {
private[spark] object SparkCoreErrors {
def unexpectedPy4JServerError(other: Object): Throwable = {
new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@

package org.apache.spark.storage

class BlockSavedOnDecommissionedBlockManagerException(blockId: BlockId)
private[spark] class BlockSavedOnDecommissionedBlockManagerException(blockId: BlockId)
extends Exception(s"Block $blockId cannot be saved on decommissioned executor")
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
/**
* Base class for launcher implementations.
*
* @since Spark 2.3.0
* @since 2.3.0
*/
public abstract class AbstractLauncher<T extends AbstractLauncher<T>> {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
* driver memory or configs which modify the driver's class path) do not take effect. Logging
* configuration is also inherited from the parent application.
*
* @since Spark 2.3.0
* @since 2.3.0
*/
public class InProcessLauncher extends AbstractLauncher<InProcessLauncher> {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
* This helper class is used to place the all `--add-opens` options
* required by Spark when using Java 17. `DEFAULT_MODULE_OPTIONS` has added
* `-XX:+IgnoreUnrecognizedVMOptions` to be compatible with Java 8 and Java 11.
*
* @since 3.3.0
*/
public class JavaModuleOptions {
private static final String[] DEFAULT_MODULE_OPTIONS = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.apache.spark.status.{ElementTrackingStore, KVUtils}
*
* @param kvStore used to store the diagnostic information
*/
class DiagnosticListener(
private[spark] class DiagnosticListener(
conf: SparkConf,
kvStore: ElementTrackingStore) extends SparkListener {

Expand Down Expand Up @@ -107,6 +107,6 @@ class DiagnosticListener(
}
}

object DiagnosticListener {
private[spark] object DiagnosticListener {
val QUEUE_NAME = "diagnostics"
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.util.kvstore.{KVIndex, KVStore}
* information. There's no state kept in this class, so it's ok to have multiple instances
* of it in an application.
*/
class DiagnosticStore(store: KVStore) {
private[spark] class DiagnosticStore(store: KVStore) {

def diagnosticsList(offset: Int, length: Int): Seq[ExecutionDiagnosticData] = {
KVUtils.viewToSeq(store.view(classOf[ExecutionDiagnosticData]).skip(offset).max(length))
Expand All @@ -51,15 +51,15 @@ class DiagnosticStore(store: KVStore) {
}

/* Represents the diagnostic data of a SQL execution */
class ExecutionDiagnosticData(
private[spark] class ExecutionDiagnosticData(
@KVIndexParam val executionId: Long,
val physicalPlan: String,
val submissionTime: Long,
val completionTime: Option[Long],
val errorMessage: Option[String])

/* Represents the plan change of an adaptive execution */
class AdaptiveExecutionUpdate(
private[spark] class AdaptiveExecutionUpdate(
@KVIndexParam("id")
val executionId: Long,
@KVIndexParam(value = "updateTime", parent = "id")
Expand Down

0 comments on commit 1fbb1d4

Please sign in to comment.