{code:java}
java.util.concurrent.CompletionException: java.lang.OutOfMemoryError: Java heap space
at java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:273) ~[?:1.8.0_392]
at java.util.concurrent.CompletableFuture.uniComposeStage(CompletableFuture.java:1005) ~[?:1.8.0_392]
at java.util.concurrent.CompletableFuture.thenCompose(CompletableFuture.java:2137) ~[?:1.8.0_392]
at io.javalin.http.JavalinServletHandler.queueNextTaskOrFinish$javalin(JavalinServletHandler.kt:85) ~[javalin-4.6.7.jar:4.6.7]
at io.javalin.http.JavalinServlet.service(JavalinServlet.kt:89) ~[javalin-4.6.7.jar:4.6.7]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) ~[javax.servlet-api-3.1.0.jar:3.1.0]
at io.javalin.jetty.JavalinJettyServlet.service(JavalinJettyServlet.kt:58) ~[javalin-4.6.7.jar:4.6.7]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) ~[javax.servlet-api-3.1.0.jar:3.1.0]
at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799) ~[jetty-servlet-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554) ~[jetty-servlet-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at io.javalin.jetty.JettyServer$start$wsAndHttpHandler$1.doHandle(JettyServer.kt:52) ~[javalin-4.6.7.jar:4.6.7]
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505) ~[jetty-servlet-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1355) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.Server.handle(Server.java:516) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:732) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:479) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) ~[jetty-io-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) ~[jetty-io-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) ~[jetty-io-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at java.lang.Thread.run(Thread.java:750) ~[?:1.8.0_392]
Caused by: java.lang.OutOfMemoryError: Java heap space
at java.io.BufferedInputStream.(BufferedInputStream.java:203) ~[?:1.8.0_392]
at org.apache.hadoop.fs.BufferedFSInputStream.(BufferedFSInputStream.java:50) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hadoop.fs.RawLocalFileSystem.open(RawLocalFileSystem.java:212) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSInputChecker.(ChecksumFileSystem.java:147) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hadoop.fs.ChecksumFileSystem.open(ChecksumFileSystem.java:347) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:203) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieLogFileReader.getFSDataInputStream(HoodieLogFileReader.java:499) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieLogFileReader.(HoodieLogFileReader.java:120) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieLogFormatReader.hasNext(HoodieLogFormatReader.java:110) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java:247) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java:220) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java:201) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.(HoodieMergedLogRecordScanner.java:117) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.(HoodieMergedLogRecordScanner.java:76) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java:466) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieMetadataLogRecordReader$Builder.build(HoodieMetadataLogRecordReader.java:219) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getLogRecordScanner(HoodieBackedTableMetadata.java:501) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.openReaders(HoodieBackedTableMetadata.java:432) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.lambda$getOrCreateReaders$10(HoodieBackedTableMetadata.java:415) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata$$Lambda$3800/1949887064.apply(Unknown Source) ~[?:?]
at java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660) ~[?:1.8.0_392]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getOrCreateReaders(HoodieBackedTableMetadata.java:415) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.lookupKeysFromFileSlice(HoodieBackedTableMetadata.java:294) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordsByKeys(HoodieBackedTableMetadata.java:258) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordByKey(HoodieBackedTableMetadata.java:148) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.BaseTableMetadata.fetchAllPartitionPaths(BaseTableMetadata.java:316) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:125) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieMetadataFileSystemView.getAllPartitionPaths(HoodieMetadataFileSystemView.java:72) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.AbstractTableFileSystemView.ensureAllPartitionsLoadedCorrectly(AbstractTableFileSystemView.java:336) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.AbstractTableFileSystemView.loadAllPartitions(AbstractTableFileSystemView.java:807) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.timeline.service.handlers.FileSliceHandler.loadAllPartitions(FileSliceHandler.java:163) ~[hudi-timeline-service-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.timeline.service.RequestHandler.lambda$registerFileSlicesAPI$59(RequestHandler.java:450) ~[hudi-timeline-service-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
3385303 [stream execution thread for [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c]] ERROR org.apache.hudi.common.table.view.PriorityBasedFileSystemView [] - Got error running preferred function. Trying secondary
org.apache.hudi.exception.HoodieRemoteException: Server Error
at org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.loadAllPartitions(RemoteHoodieTableFileSystemView.java:535) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.PriorityBasedFileSystemView.execute(PriorityBasedFileSystemView.java:69) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.PriorityBasedFileSystemView.loadAllPartitions(PriorityBasedFileSystemView.java:172) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanner.(CleanPlanner.java:110) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:107) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:159) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanActionExecutor.execute(CleanPlanActionExecutor.java:185) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.HoodieSparkCopyOnWriteTable.scheduleCleaning(HoodieSparkCopyOnWriteTable.java:217) ~[hudi-spark-client-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieTableServiceClient.scheduleTableServiceInternal(BaseHoodieTableServiceClient.java:631) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieTableServiceClient.clean(BaseHoodieTableServiceClient.java:754) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:862) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:835) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:866) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.autoCleanOnCommit(BaseHoodieWriteClient.java:600) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.mayBeCleanAndArchive(BaseHoodieWriteClient.java:579) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java:248) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java:104) ~[hudi-spark-client-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriterInternal.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:965) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriterInternal.writeInternal(HoodieSparkSqlWriter.scala:530) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriterInternal.write(HoodieSparkSqlWriter.scala:203) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:120) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:145) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:46) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:90) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) ~[spark-core_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:127) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:126) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:962) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:962) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:414) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:398) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:287) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.hudi.functional.cdc.TestCDCStreamingSuite.$anonfun$cdcStreaming$2(TestCDCStreamingSuite.scala:156) ~[test-classes/:?]
at org.apache.hudi.functional.cdc.TestCDCStreamingSuite.$anonfun$cdcStreaming$2$adapted(TestCDCStreamingSuite.scala:123) ~[test-classes/:?]
at org.apache.spark.sql.execution.streaming.sources.ForeachBatchSink.addBatch(ForeachBatchSink.scala:36) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:583) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:581) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:352) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:350) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:69) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:581) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:223) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) ~[scala-library-2.12.10.jar:?]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:352) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:350) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:69) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:191) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:185) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:334) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:245) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
Caused by: org.apache.http.client.HttpResponseException: Server Error
at org.apache.http.impl.client.AbstractResponseHandler.handleResponse(AbstractResponseHandler.java:69) ~[httpclient-4.4.1.jar:4.4.1]
at org.apache.http.client.fluent.Response.handleResponse(Response.java:90) ~[fluent-hc-4.4.1.jar:4.4.1]
at org.apache.http.client.fluent.Response.returnContent(Response.java:97) ~[fluent-hc-4.4.1.jar:4.4.1]
at org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.executeRequest(RemoteHoodieTableFileSystemView.java:208) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.loadAllPartitions(RemoteHoodieTableFileSystemView.java:532) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
... 66 more
3387347 [stream execution thread for [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c]] WARN org.apache.hudi.HoodieSparkSqlWriterInternal [] - Closing write client
3387351 [stream execution thread for [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c]] ERROR org.apache.spark.sql.execution.streaming.MicroBatchExecution [] - Query [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c] terminated with error {code}
JIRA info
{code:java}
java.util.concurrent.CompletionException: java.lang.OutOfMemoryError: Java heap space
at java.util.concurrent.CompletableFuture.encodeThrowable(CompletableFuture.java:273) ~[?:1.8.0_392]
at java.util.concurrent.CompletableFuture.uniComposeStage(CompletableFuture.java:1005) ~[?:1.8.0_392]
at java.util.concurrent.CompletableFuture.thenCompose(CompletableFuture.java:2137) ~[?:1.8.0_392]
at io.javalin.http.JavalinServletHandler.queueNextTaskOrFinish$javalin(JavalinServletHandler.kt:85) ~[javalin-4.6.7.jar:4.6.7]
at io.javalin.http.JavalinServlet.service(JavalinServlet.kt:89) ~[javalin-4.6.7.jar:4.6.7]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) ~[javax.servlet-api-3.1.0.jar:3.1.0]
at io.javalin.jetty.JavalinJettyServlet.service(JavalinJettyServlet.kt:58) ~[javalin-4.6.7.jar:4.6.7]
at javax.servlet.http.HttpServlet.service(HttpServlet.java:790) ~[javax.servlet-api-3.1.0.jar:3.1.0]
at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:799) ~[jetty-servlet-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:554) ~[jetty-servlet-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1624) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at io.javalin.jetty.JettyServer$start$wsAndHttpHandler$1.doHandle(JettyServer.kt:52) ~[javalin-4.6.7.jar:4.6.7]
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505) ~[jetty-servlet-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1594) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1355) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.Server.handle(Server.java:516) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpChannel.dispatch(HttpChannel.java:732) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:479) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:277) ~[jetty-server-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311) ~[jetty-io-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:105) ~[jetty-io-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104) ~[jetty-io-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034) ~[jetty-util-9.4.48.v20220622.jar:9.4.48.v20220622]
at java.lang.Thread.run(Thread.java:750) ~[?:1.8.0_392]
Caused by: java.lang.OutOfMemoryError: Java heap space
at java.io.BufferedInputStream.(BufferedInputStream.java:203) ~[?:1.8.0_392]
at org.apache.hadoop.fs.BufferedFSInputStream.(BufferedFSInputStream.java:50) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hadoop.fs.RawLocalFileSystem.open(RawLocalFileSystem.java:212) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSInputChecker.(ChecksumFileSystem.java:147) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hadoop.fs.ChecksumFileSystem.open(ChecksumFileSystem.java:347) ~[hadoop-common-2.10.2.jar:?]
at org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:203) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieLogFileReader.getFSDataInputStream(HoodieLogFileReader.java:499) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieLogFileReader.(HoodieLogFileReader.java:120) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieLogFormatReader.hasNext(HoodieLogFormatReader.java:110) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternalV1(AbstractHoodieLogRecordReader.java:247) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.AbstractHoodieLogRecordReader.scanInternal(AbstractHoodieLogRecordReader.java:220) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.performScan(HoodieMergedLogRecordScanner.java:201) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.(HoodieMergedLogRecordScanner.java:117) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner.(HoodieMergedLogRecordScanner.java:76) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.log.HoodieMergedLogRecordScanner$Builder.build(HoodieMergedLogRecordScanner.java:466) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieMetadataLogRecordReader$Builder.build(HoodieMetadataLogRecordReader.java:219) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getLogRecordScanner(HoodieBackedTableMetadata.java:501) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.openReaders(HoodieBackedTableMetadata.java:432) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.lambda$getOrCreateReaders$10(HoodieBackedTableMetadata.java:415) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata$$Lambda$3800/1949887064.apply(Unknown Source) ~[?:?]
at java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660) ~[?:1.8.0_392]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getOrCreateReaders(HoodieBackedTableMetadata.java:415) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.lookupKeysFromFileSlice(HoodieBackedTableMetadata.java:294) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordsByKeys(HoodieBackedTableMetadata.java:258) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieBackedTableMetadata.getRecordByKey(HoodieBackedTableMetadata.java:148) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.BaseTableMetadata.fetchAllPartitionPaths(BaseTableMetadata.java:316) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.BaseTableMetadata.getAllPartitionPaths(BaseTableMetadata.java:125) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.metadata.HoodieMetadataFileSystemView.getAllPartitionPaths(HoodieMetadataFileSystemView.java:72) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.AbstractTableFileSystemView.ensureAllPartitionsLoadedCorrectly(AbstractTableFileSystemView.java:336) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.AbstractTableFileSystemView.loadAllPartitions(AbstractTableFileSystemView.java:807) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.timeline.service.handlers.FileSliceHandler.loadAllPartitions(FileSliceHandler.java:163) ~[hudi-timeline-service-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.timeline.service.RequestHandler.lambda$registerFileSlicesAPI$59(RequestHandler.java:450) ~[hudi-timeline-service-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
3385303 [stream execution thread for [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c]] ERROR org.apache.hudi.common.table.view.PriorityBasedFileSystemView [] - Got error running preferred function. Trying secondary
org.apache.hudi.exception.HoodieRemoteException: Server Error
at org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.loadAllPartitions(RemoteHoodieTableFileSystemView.java:535) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.PriorityBasedFileSystemView.execute(PriorityBasedFileSystemView.java:69) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.PriorityBasedFileSystemView.loadAllPartitions(PriorityBasedFileSystemView.java:172) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanner.(CleanPlanner.java:110) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:107) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanActionExecutor.requestClean(CleanPlanActionExecutor.java:159) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.action.clean.CleanPlanActionExecutor.execute(CleanPlanActionExecutor.java:185) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.table.HoodieSparkCopyOnWriteTable.scheduleCleaning(HoodieSparkCopyOnWriteTable.java:217) ~[hudi-spark-client-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieTableServiceClient.scheduleTableServiceInternal(BaseHoodieTableServiceClient.java:631) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieTableServiceClient.clean(BaseHoodieTableServiceClient.java:754) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:862) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:835) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.clean(BaseHoodieWriteClient.java:866) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.autoCleanOnCommit(BaseHoodieWriteClient.java:600) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.mayBeCleanAndArchive(BaseHoodieWriteClient.java:579) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.BaseHoodieWriteClient.commitStats(BaseHoodieWriteClient.java:248) ~[hudi-client-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.client.SparkRDDWriteClient.commit(SparkRDDWriteClient.java:104) ~[hudi-spark-client-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriterInternal.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:965) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriterInternal.writeInternal(HoodieSparkSqlWriter.scala:530) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriterInternal.write(HoodieSparkSqlWriter.scala:203) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:120) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:145) ~[classes/:1.0.0-SNAPSHOT]
at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:46) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:90) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) ~[spark-core_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:127) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:126) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:962) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:962) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:414) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:398) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:287) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.hudi.functional.cdc.TestCDCStreamingSuite.$anonfun$cdcStreaming$2(TestCDCStreamingSuite.scala:156) ~[test-classes/:?]
at org.apache.hudi.functional.cdc.TestCDCStreamingSuite.$anonfun$cdcStreaming$2$adapted(TestCDCStreamingSuite.scala:123) ~[test-classes/:?]
at org.apache.spark.sql.execution.streaming.sources.ForeachBatchSink.addBatch(ForeachBatchSink.scala:36) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:583) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$15(MicroBatchExecution.scala:581) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:352) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:350) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:69) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:581) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:223) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) ~[scala-library-2.12.10.jar:?]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:352) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:350) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:69) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:191) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:185) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:334) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:245) ~[spark-sql_2.12-3.0.2.jar:3.0.2]
Caused by: org.apache.http.client.HttpResponseException: Server Error
at org.apache.http.impl.client.AbstractResponseHandler.handleResponse(AbstractResponseHandler.java:69) ~[httpclient-4.4.1.jar:4.4.1]
at org.apache.http.client.fluent.Response.handleResponse(Response.java:90) ~[fluent-hc-4.4.1.jar:4.4.1]
at org.apache.http.client.fluent.Response.returnContent(Response.java:97) ~[fluent-hc-4.4.1.jar:4.4.1]
at org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.executeRequest(RemoteHoodieTableFileSystemView.java:208) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at org.apache.hudi.common.table.view.RemoteHoodieTableFileSystemView.loadAllPartitions(RemoteHoodieTableFileSystemView.java:532) ~[hudi-common-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
... 66 more
3387347 [stream execution thread for [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c]] WARN org.apache.hudi.HoodieSparkSqlWriterInternal [] - Closing write client
3387351 [stream execution thread for [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c]] ERROR org.apache.spark.sql.execution.streaming.MicroBatchExecution [] - Query [id = 0499fd07-0abb-4e74-9d3a-685633ff49bc, runId = 754fdc56-cea1-43f4-9f8d-3d5bfe7bfe5c] terminated with error {code}
JIRA info