diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala index d5de1366ac053..63d981c5fde82 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala @@ -79,6 +79,7 @@ private[deploy] class Master( private val addressToApp = new HashMap[RpcAddress, ApplicationInfo] private val completedApps = new ArrayBuffer[ApplicationInfo] private var nextAppNumber = 0 + private val moduloAppNumber = conf.get(APP_NUMBER_MODULO).getOrElse(0) private val drivers = new HashSet[DriverInfo] private val completedDrivers = new ArrayBuffer[DriverInfo] @@ -1156,6 +1157,9 @@ private[deploy] class Master( private def newApplicationId(submitDate: Date): String = { val appId = appIdPattern.format(createDateFormat.format(submitDate), nextAppNumber) nextAppNumber += 1 + if (moduloAppNumber > 0) { + nextAppNumber %= moduloAppNumber + } appId } diff --git a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala index c6ccf9550bc91..906ec0fc99737 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala @@ -82,6 +82,16 @@ private[spark] object Deploy { .checkValue(_ > 0, "The maximum number of running drivers should be positive.") .createWithDefault(Int.MaxValue) + val APP_NUMBER_MODULO = ConfigBuilder("spark.deploy.appNumberModulo") + .doc("The modulo for app number. By default, the next of `app-yyyyMMddHHmmss-9999` is " + + "`app-yyyyMMddHHmmss-10000`. If we have 10000 as modulo, it will be " + + "`app-yyyyMMddHHmmss-0000`. In most cases, the prefix `app-yyyyMMddHHmmss` is increased " + + "already during creating 10000 applications.") + .version("4.0.0") + .intConf + .checkValue(_ >= 1000, "The modulo for app number should be greater than or equal to 1000.") + .createOptional + val DRIVER_ID_PATTERN = ConfigBuilder("spark.deploy.driverIdPattern") .doc("The pattern for driver ID generation based on Java `String.format` method. " + "The default value is `driver-%s-%04d` which represents the existing driver id string " + diff --git a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala index e8615cdbdd559..4f8457f930e4a 100644 --- a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala @@ -1266,6 +1266,15 @@ class MasterSuite extends SparkFunSuite }.getMessage assert(m.contains("Whitespace is not allowed")) } + + test("SPARK-45785: Rotate app num with modulo operation") { + val conf = new SparkConf().set(APP_ID_PATTERN, "%2$d").set(APP_NUMBER_MODULO, 1000) + val master = makeMaster(conf) + val submitDate = new Date() + (0 to 2000).foreach { i => + assert(master.invokePrivate(_newApplicationId(submitDate)) === s"${i % 1000}") + } + } } private class FakeRecoveryModeFactory(conf: SparkConf, ser: serializer.Serializer)