Details
Description
I've seen this more than once, let's record it for now.
https://github.com/apache/spark/actions/runs/5875252243/job/15931264374
2023-08-16T06:49:14.0550627Z [0m[[0m[0minfo[0m] [0m[0m[31m- SPARK-35896: metrics in StateOperatorProgress are output correctly (RocksDBStateStore) *** FAILED *** (1 minute, 1 second)[0m[0m 2023-08-16T06:49:14.0560354Z [0m[[0m[0minfo[0m] [0m[0m[31m Timed out waiting for stream: The code passed to failAfter did not complete within 60 seconds.[0m[0m 2023-08-16T06:49:14.0568703Z [0m[[0m[0minfo[0m] [0m[0m[31m java.lang.Thread.getStackTrace(Thread.java:1564)[0m[0m 2023-08-16T06:49:14.0578526Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:277)[0m[0m 2023-08-16T06:49:14.0600495Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)[0m[0m 2023-08-16T06:49:14.0609443Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)[0m[0m 2023-08-16T06:49:14.0630028Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)[0m[0m 2023-08-16T06:49:14.0638142Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)[0m[0m 2023-08-16T06:49:14.0704798Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:480)[0m[0m 2023-08-16T06:49:14.0732716Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149)[0m[0m 2023-08-16T06:49:14.0743783Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.collection.mutable.HashTable.foreachEntry(HashTable.scala:237)[0m[0m 2023-08-16T06:49:14.0753421Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.collection.mutable.HashTable.foreachEntry$(HashTable.scala:230)[0m[0m 2023-08-16T06:49:14.0765553Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.0773522Z [0m[[0m[0minfo[0m] [0m[0m[31m Caused by: null[0m[0m 2023-08-16T06:49:14.0787123Z [0m[[0m[0minfo[0m] [0m[0m[31m java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2014)[0m[0m 2023-08-16T06:49:14.0796604Z [0m[[0m[0minfo[0m] [0m[0m[31m java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2173)[0m[0m 2023-08-16T06:49:14.0808419Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:481)[0m[0m 2023-08-16T06:49:14.0817018Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:482)[0m[0m 2023-08-16T06:49:14.0824218Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)[0m[0m 2023-08-16T06:49:14.0831608Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)[0m[0m 2023-08-16T06:49:14.0838059Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)[0m[0m 2023-08-16T06:49:14.0847335Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)[0m[0m 2023-08-16T06:49:14.0854180Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)[0m[0m 2023-08-16T06:49:14.0861298Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)[0m[0m 2023-08-16T06:49:14.0866845Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.0872599Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.0880688Z [0m[[0m[0minfo[0m] [0m[0m[31m == Progress ==[0m[0m 2023-08-16T06:49:14.0887109Z [0m[[0m[0minfo[0m] [0m[0m[31m StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@432e877b,Map(spark.sql.shuffle.partitions -> 3),null)[0m[0m 2023-08-16T06:49:14.0901797Z [0m[[0m[0minfo[0m] [0m[0m[31m AddData to MemoryStream[value#19338]: 3,2,1,3[0m[0m 2023-08-16T06:49:14.0913348Z [0m[[0m[0minfo[0m] [0m[0m[31m => CheckLastBatch: [3,2],[2,1],[1,1][0m[0m 2023-08-16T06:49:14.0919235Z [0m[[0m[0minfo[0m] [0m[0m[31m AssertOnQuery(<condition>, Check total state rows = List(3), updated state rows = List(3), rows dropped by watermark = List(0), removed state rows = Some(List(0)))[0m[0m 2023-08-16T06:49:14.0925028Z [0m[[0m[0minfo[0m] [0m[0m[31m AddData to MemoryStream[value#19338]: 1,4[0m[0m 2023-08-16T06:49:14.0931608Z [0m[[0m[0minfo[0m] [0m[0m[31m CheckLastBatch: [3,2],[2,1],[1,2],[4,1][0m[0m 2023-08-16T06:49:14.0938968Z [0m[[0m[0minfo[0m] [0m[0m[31m AssertOnQuery(<condition>, Check operator progress metrics: operatorName = stateStoreSave, numShufflePartitions = 3, numStateStoreInstances = 3)[0m[0m 2023-08-16T06:49:14.0944383Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.0985018Z [0m[[0m[0minfo[0m] [0m[0m[31m == Stream ==[0m[0m 2023-08-16T06:49:14.1010235Z [0m[[0m[0minfo[0m] [0m[0m[31m Output Mode: Complete[0m[0m 2023-08-16T06:49:14.1015512Z [0m[[0m[0minfo[0m] [0m[0m[31m Stream state: {}[0m[0m 2023-08-16T06:49:14.1020511Z [0m[[0m[0minfo[0m] [0m[0m[31m Thread state: alive[0m[0m 2023-08-16T06:49:14.1026531Z [0m[[0m[0minfo[0m] [0m[0m[31m Thread stack trace: sun.misc.Unsafe.park(Native Method)[0m[0m 2023-08-16T06:49:14.1032459Z [0m[[0m[0minfo[0m] [0m[0m[31m java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)[0m[0m 2023-08-16T06:49:14.1038406Z [0m[[0m[0minfo[0m] [0m[0m[31m java.util.concurrent.locks.AbstractQueuedSynchronizer.parkAndCheckInterrupt(AbstractQueuedSynchronizer.java:836)[0m[0m 2023-08-16T06:49:14.1044652Z [0m[[0m[0minfo[0m] [0m[0m[31m java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:997)[0m[0m 2023-08-16T06:49:14.1052001Z [0m[[0m[0minfo[0m] [0m[0m[31m java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304)[0m[0m 2023-08-16T06:49:14.1058244Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:242)[0m[0m 2023-08-16T06:49:14.1063878Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:258)[0m[0m 2023-08-16T06:49:14.1069740Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:187)[0m[0m 2023-08-16T06:49:14.1081581Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:342)[0m[0m 2023-08-16T06:49:14.1089123Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:974)[0m[0m 2023-08-16T06:49:14.1096254Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.SparkContext.runJob(SparkContext.scala:2405)[0m[0m 2023-08-16T06:49:14.1104050Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:385)[0m[0m 2023-08-16T06:49:14.1112551Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:359)[0m[0m 2023-08-16T06:49:14.1120764Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.writeWithV2(WriteToDataSourceV2Exec.scala:307)[0m[0m 2023-08-16T06:49:14.1141786Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.run(WriteToDataSourceV2Exec.scala:318)[0m[0m 2023-08-16T06:49:14.1149758Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)[0m[0m 2023-08-16T06:49:14.1159611Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)[0m[0m 2023-08-16T06:49:14.1166283Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)[0m[0m 2023-08-16T06:49:14.1172538Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:4344)[0m[0m 2023-08-16T06:49:14.1208809Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:3585)[0m[0m 2023-08-16T06:49:14.1215346Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset$$Lambda$2415/1714895648.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1228588Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:4334)[0m[0m 2023-08-16T06:49:14.1234684Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset$$Lambda$2442/14653745.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1246038Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:546)[0m[0m 2023-08-16T06:49:14.1246768Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:4332)[0m[0m 2023-08-16T06:49:14.1273061Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset$$Lambda$2416/619810288.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1273847Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:132)[0m[0m 2023-08-16T06:49:14.1274620Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$$$Lambda$2366/419259595.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1291096Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:209)[0m[0m 2023-08-16T06:49:14.1291944Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:115)[0m[0m 2023-08-16T06:49:14.1292713Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$$$Lambda$2353/1369125389.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1293407Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)[0m[0m 2023-08-16T06:49:14.1294142Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)[0m[0m 2023-08-16T06:49:14.1305624Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset.withAction(Dataset.scala:4332)[0m[0m 2023-08-16T06:49:14.1306571Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.Dataset.collect(Dataset.scala:3585)[0m[0m 2023-08-16T06:49:14.1307380Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$17(MicroBatchExecution.scala:741)[0m[0m 2023-08-16T06:49:14.1308412Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2352/335540119.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1318048Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:132)[0m[0m 2023-08-16T06:49:14.1324073Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$$$Lambda$2366/419259595.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1326362Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:209)[0m[0m 2023-08-16T06:49:14.1330068Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:115)[0m[0m 2023-08-16T06:49:14.1330922Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$$$Lambda$2353/1369125389.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1418063Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)[0m[0m 2023-08-16T06:49:14.1418867Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)[0m[0m 2023-08-16T06:49:14.1419759Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:729)[0m[0m 2023-08-16T06:49:14.1420680Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2351/1752953868.apply(Unknown Source)[0m[0m 2023-08-16T06:49:14.1421623Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:405)[0m[0m 2023-08-16T06:49:14.1422639Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:403)[0m[0m 2023-08-16T06:49:14.1423746Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)[0m[0m 2023-08-16T06:49:14.1424717Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:729)[0m[0m 2023-08-16T06:49:14.1425711Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:286)[0m[0m 2023-08-16T06:49:14.1426644Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2053/1800928896.apply$mcV$sp(Unknown Source)[0m[0m 2023-08-16T06:49:14.1427389Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)[0m[0m 2023-08-16T06:49:14.1428211Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:405)[0m[0m 2023-08-16T06:49:14.1678185Z 06:49:14.167 WARN org.apache.spark.sql.execution.streaming.ResolveWriteToStream: spark.sql.adaptive.enabled is not supported in streaming DataFrames/Datasets and will be disabled. 2023-08-16T06:49:14.1749754Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:403)[0m[0m 2023-08-16T06:49:14.1760827Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)[0m[0m 2023-08-16T06:49:14.1767884Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:249)[0m[0m 2023-08-16T06:49:14.1772443Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2050/1379075459.apply$mcZ$sp(Unknown Source)[0m[0m 2023-08-16T06:49:14.1775920Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:67)[0m[0m 2023-08-16T06:49:14.1779547Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:239)[0m[0m 2023-08-16T06:49:14.1780654Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:311)[0m[0m 2023-08-16T06:49:14.1781762Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution$$Lambda$2042/403495914.apply$mcV$sp(Unknown Source)[0m[0m 2023-08-16T06:49:14.1787505Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)[0m[0m 2023-08-16T06:49:14.1789663Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)[0m[0m 2023-08-16T06:49:14.1824717Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:289)[0m[0m 2023-08-16T06:49:14.1825945Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.$anonfun$run$1(StreamExecution.scala:211)[0m[0m 2023-08-16T06:49:14.1826944Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1$$Lambda$2038/346934547.apply$mcV$sp(Unknown Source)[0m[0m 2023-08-16T06:49:14.1827779Z [0m[[0m[0minfo[0m] [0m[0m[31m scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)[0m[0m 2023-08-16T06:49:14.1828564Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)[0m[0m 2023-08-16T06:49:14.1829439Z [0m[[0m[0minfo[0m] [0m[0m[31m org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)[0m[0m 2023-08-16T06:49:14.1830060Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1830461Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1830895Z [0m[[0m[0minfo[0m] [0m[0m[31m == Sink ==[0m[0m 2023-08-16T06:49:14.1831298Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1831680Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1832072Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1832499Z [0m[[0m[0minfo[0m] [0m[0m[31m == Plan ==[0m[0m 2023-08-16T06:49:14.1833000Z [0m[[0m[0minfo[0m] [0m[0m[31m == Parsed Logical Plan ==[0m[0m 2023-08-16T06:49:14.1833702Z [0m[[0m[0minfo[0m] [0m[0m[31m WriteToMicroBatchDataSource MemorySink, 0214edd5-ddb7-4c00-befd-a225a61a800e, Complete, 0[0m[0m 2023-08-16T06:49:14.1835181Z [0m[[0m[0minfo[0m] [0m[0m[31m +- Aggregate [value#19338], [value#19338, count(1) AS count(1)#19343L][0m[0m 2023-08-16T06:49:14.1836058Z [0m[[0m[0minfo[0m] [0m[0m[31m +- StreamingDataSourceV2Relation [value#19338], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@65f877f4, MemoryStream[value#19338], -1, 0[0m[0m 2023-08-16T06:49:14.1836684Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1837076Z [0m[[0m[0minfo[0m] [0m[0m[31m == Analyzed Logical Plan ==[0m[0m 2023-08-16T06:49:14.1837697Z [0m[[0m[0minfo[0m] [0m[0m[31m WriteToMicroBatchDataSource MemorySink, 0214edd5-ddb7-4c00-befd-a225a61a800e, Complete, 0[0m[0m 2023-08-16T06:49:14.1838328Z [0m[[0m[0minfo[0m] [0m[0m[31m +- Aggregate [value#19338], [value#19338, count(1) AS count(1)#19343L][0m[0m 2023-08-16T06:49:14.1839185Z [0m[[0m[0minfo[0m] [0m[0m[31m +- StreamingDataSourceV2Relation [value#19338], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@65f877f4, MemoryStream[value#19338], -1, 0[0m[0m 2023-08-16T06:49:14.1839983Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1840380Z [0m[[0m[0minfo[0m] [0m[0m[31m == Optimized Logical Plan ==[0m[0m 2023-08-16T06:49:14.1841274Z [0m[[0m[0minfo[0m] [0m[0m[31m WriteToDataSourceV2 MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@26d87efa][0m[0m 2023-08-16T06:49:14.1842056Z [0m[[0m[0minfo[0m] [0m[0m[31m +- Aggregate [value#19338], [value#19338, count(1) AS count(1)#19343L][0m[0m 2023-08-16T06:49:14.1842987Z [0m[[0m[0minfo[0m] [0m[0m[31m +- StreamingDataSourceV2Relation [value#19338], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@65f877f4, MemoryStream[value#19338], -1, 0[0m[0m 2023-08-16T06:49:14.1843620Z [0m[[0m[0minfo[0m] [0m[0m [0m 2023-08-16T06:49:14.1843994Z [0m[[0m[0minfo[0m] [0m[0m[31m == Physical Plan ==[0m[0m 2023-08-16T06:49:14.1845106Z [0m[[0m[0minfo[0m] [0m[0m[31m WriteToDataSourceV2 MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@26d87efa], org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$$Lambda$2290/663905427@7505e54c[0m[0m 2023-08-16T06:49:14.1846414Z [0m[[0m[0minfo[0m] [0m[0m[31m +- *(4) HashAggregate(keys=[value#19338], functions=[count(1)], output=[value#19338, count(1)#19343L])[0m[0m 2023-08-16T06:49:14.1847513Z [0m[[0m[0minfo[0m] [0m[0m[31m +- StateStoreSave [value#19338], state info [ checkpoint = file:/home/runner/work/spark/spark/target/tmp/streaming.metadata-9b48d443-954d-4338-85cf-fdf0d8ccb029/state, runId = 61c94e63-d753-41bf-b5b3-cdbe2d844d26, opId = 0, ver = 0, numPartitions = 3], Complete, 0, 0, 2[0m[0m 2023-08-16T06:49:14.1848443Z [0m[[0m[0minfo[0m] [0m[0m[31m +- *(3) HashAggregate(keys=[value#19338], functions=[merge_count(1)], output=[value#19338, count#19372L])[0m[0m 2023-08-16T06:49:14.1849537Z [0m[[0m[0minfo[0m] [0m[0m[31m +- StateStoreRestore [value#19338], state info [ checkpoint = file:/home/runner/work/spark/spark/target/tmp/streaming.metadata-9b48d443-954d-4338-85cf-fdf0d8ccb029/state, runId = 61c94e63-d753-41bf-b5b3-cdbe2d844d26, opId = 0, ver = 0, numPartitions = 3], 2[0m[0m 2023-08-16T06:49:14.1850484Z [0m[[0m[0minfo[0m] [0m[0m[31m +- *(2) HashAggregate(keys=[value#19338], functions=[merge_count(1)], output=[value#19338, count#19372L])[0m[0m 2023-08-16T06:49:14.1851143Z [0m[[0m[0minfo[0m] [0m[0m[31m +- Exchange hashpartitioning(value#19338, 3), ENSURE_REQUIREMENTS, [plan_id=89400][0m[0m 2023-08-16T06:49:14.1851838Z [0m[[0m[0minfo[0m] [0m[0m[31m +- *(1) HashAggregate(keys=[value#19338], functions=[partial_count(1)], output=[value#19338, count#19372L])[0m[0m 2023-08-16T06:49:14.1852387Z [0m[[0m[0minfo[0m] [0m[0m[31m +- *(1) Project [value#19338][0m[0m 2023-08-16T06:49:14.1852988Z [0m[[0m[0minfo[0m] [0m[0m[31m +- MicroBatchScan[value#19338] MemoryStreamDataSource (StreamTest.scala:462)[0m[0m 2023-08-16T06:49:14.1853598Z [0m[[0m[0minfo[0m] [0m[0m[31m org.scalatest.exceptions.TestFailedException:[0m[0m 2023-08-16T06:49:14.1854372Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)[0m[0m 2023-08-16T06:49:14.1912224Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)[0m[0m 2023-08-16T06:49:14.1913222Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuite.newAssertionFailedException(AnyFunSuite.scala:1564)[0m[0m 2023-08-16T06:49:14.1914065Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Assertions.fail(Assertions.scala:933)[0m[0m 2023-08-16T06:49:14.1914653Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Assertions.fail$(Assertions.scala:929)[0m[0m 2023-08-16T06:49:14.1915455Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuite.fail(AnyFunSuite.scala:1564)[0m[0m 2023-08-16T06:49:14.1916150Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:462)[0m[0m 2023-08-16T06:49:14.1917119Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:800)[0m[0m 2023-08-16T06:49:14.1917876Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:776)[0m[0m 2023-08-16T06:49:14.1918617Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:342)[0m[0m 2023-08-16T06:49:14.1919669Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamingAggregationSuite.testStream(StreamingAggregationSuite.scala:54)[0m[0m 2023-08-16T06:49:14.1920735Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamingAggregationSuite.$anonfun$new$86(StreamingAggregationSuite.scala:849)[0m[0m 2023-08-16T06:49:14.1921511Z [0m[[0m[0minfo[0m] [0m[0m[31m at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)[0m[0m 2023-08-16T06:49:14.1922369Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.RocksDBStateStoreTest.$anonfun$test$2(RocksDBStateStoreTest.scala:39)[0m[0m 2023-08-16T06:49:14.1923184Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54)[0m[0m 2023-08-16T06:49:14.1923962Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)[0m[0m 2023-08-16T06:49:14.1924964Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamingAggregationSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(StreamingAggregationSuite.scala:54)[0m[0m 2023-08-16T06:49:14.1925885Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:247)[0m[0m 2023-08-16T06:49:14.1926655Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:245)[0m[0m 2023-08-16T06:49:14.1927567Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.StreamingAggregationSuite.withSQLConf(StreamingAggregationSuite.scala:54)[0m[0m 2023-08-16T06:49:14.1928495Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.sql.streaming.RocksDBStateStoreTest.$anonfun$test$1(RocksDBStateStoreTest.scala:39)[0m[0m 2023-08-16T06:49:14.1929226Z [0m[[0m[0minfo[0m] [0m[0m[31m at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)[0m[0m 2023-08-16T06:49:14.1929847Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)[0m[0m 2023-08-16T06:49:14.1930491Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)[0m[0m 2023-08-16T06:49:14.1931279Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)[0m[0m 2023-08-16T06:49:14.1931975Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)[0m[0m 2023-08-16T06:49:14.1932654Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)[0m[0m 2023-08-16T06:49:14.1933321Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)[0m[0m 2023-08-16T06:49:14.1934040Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)[0m[0m 2023-08-16T06:49:14.1934683Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)[0m[0m 2023-08-16T06:49:14.1935267Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)[0m[0m 2023-08-16T06:49:14.1935918Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Transformer.apply(Transformer.scala:22)[0m[0m 2023-08-16T06:49:14.1936614Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Transformer.apply(Transformer.scala:20)[0m[0m 2023-08-16T06:49:14.1937347Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)[0m[0m 2023-08-16T06:49:14.1938043Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)[0m[0m 2023-08-16T06:49:14.1938800Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)[0m[0m 2023-08-16T06:49:14.1939622Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)[0m[0m 2023-08-16T06:49:14.1940287Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)[0m[0m 2023-08-16T06:49:14.1940965Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)[0m[0m 2023-08-16T06:49:14.1941680Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)[0m[0m 2023-08-16T06:49:14.1942528Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)[0m[0m 2023-08-16T06:49:14.1943259Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)[0m[0m 2023-08-16T06:49:14.1989522Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)[0m[0m 2023-08-16T06:49:14.1995333Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69)[0m[0m 2023-08-16T06:49:14.1996628Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)[0m[0m 2023-08-16T06:49:14.1997346Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)[0m[0m 2023-08-16T06:49:14.1998091Z [0m[[0m[0minfo[0m] [0m[0m[31m at scala.collection.immutable.List.foreach(List.scala:431)[0m[0m 2023-08-16T06:49:14.1998995Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)[0m[0m 2023-08-16T06:49:14.1999670Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)[0m[0m 2023-08-16T06:49:14.2000302Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)[0m[0m 2023-08-16T06:49:14.2000993Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)[0m[0m 2023-08-16T06:49:14.2004880Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)[0m[0m 2023-08-16T06:49:14.2005664Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)[0m[0m 2023-08-16T06:49:14.2006251Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Suite.run(Suite.scala:1114)[0m[0m 2023-08-16T06:49:14.2007449Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.Suite.run$(Suite.scala:1096)[0m[0m 2023-08-16T06:49:14.2008151Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)[0m[0m 2023-08-16T06:49:14.2009643Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)[0m[0m 2023-08-16T06:49:14.2010290Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.SuperEngine.runImpl(Engine.scala:535)[0m[0m 2023-08-16T06:49:14.2011063Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)[0m[0m 2023-08-16T06:49:14.2013323Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)[0m[0m 2023-08-16T06:49:14.2014289Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)[0m[0m 2023-08-16T06:49:14.2017752Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)[0m[0m 2023-08-16T06:49:14.2018563Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)[0m[0m 2023-08-16T06:49:14.2019222Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)[0m[0m 2023-08-16T06:49:14.2020623Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)[0m[0m 2023-08-16T06:49:14.2021440Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)[0m[0m 2023-08-16T06:49:14.2022124Z [0m[[0m[0minfo[0m] [0m[0m[31m at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)[0m[0m 2023-08-16T06:49:14.2029114Z [0m[[0m[0minfo[0m] [0m[0m[31m at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)[0m[0m 2023-08-16T06:49:14.2029922Z [0m[[0m[0minfo[0m] [0m[0m[31m at java.util.concurrent.FutureTask.run(FutureTask.java:266)[0m[0m 2023-08-16T06:49:14.2031313Z [0m[[0m[0minfo[0m] [0m[0m[31m at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)[0m[0m 2023-08-16T06:49:14.2032083Z [0m[[0m[0minfo[0m] [0m[0m[31m at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)[0m[0m 2023-08-16T06:49:14.2036045Z [0m[[0m[0minfo[0m] [0m[0m[31m at java.lang.Thread.run(Thread.java:750)[0m[0m 2023-08-16T06:50:14.2328885Z 06:50:14.232 WARN org.apache.spark.sql.execution.streaming.state.RocksDB StateStoreId(opId=0,partId=0,name=default): Error closing RocksDB 2023-08-16T06:50:14.2340748Z org.apache.spark.SparkException: [CANNOT_LOAD_STATE_STORE.UNRELEASED_THREAD_ERROR] An error occurred during loading state. StateStoreId(opId=0,partId=0,name=default): RocksDB instance could not be acquired by [ThreadId: Some(2023)] as it was not released by [ThreadId: Some(2020), task: partition 0.0 in stage 541.0, TID 1401] after 120010 ms. 2023-08-16T06:50:14.2342031Z Thread holding the lock has trace: org.apache.spark.sql.execution.streaming.state.StateStore$.getStateStoreProvider(StateStore.scala:555) 2023-08-16T06:50:14.2342817Z org.apache.spark.sql.execution.streaming.state.StateStore$.get(StateStore.scala:544) 2023-08-16T06:50:14.2343724Z org.apache.spark.sql.execution.streaming.state.StateStoreRDD.compute(StateStoreRDD.scala:126) 2023-08-16T06:50:14.2344430Z org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) 2023-08-16T06:50:14.2344964Z org.apache.spark.rdd.RDD.iterator(RDD.scala:328) 2023-08-16T06:50:14.2345513Z org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52) 2023-08-16T06:50:14.2346107Z org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:364) 2023-08-16T06:50:14.2346618Z org.apache.spark.rdd.RDD.iterator(RDD.scala:328) 2023-08-16T06:50:14.2347140Z org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:93) 2023-08-16T06:50:14.2347736Z org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:161) 2023-08-16T06:50:14.2348282Z org.apache.spark.scheduler.Task.run(Task.scala:141) 2023-08-16T06:50:14.2348805Z org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$4(Executor.scala:620) 2023-08-16T06:50:14.2349373Z org.apache.spark.executor.Executor$TaskRunner$$Lambda$2925/1900945640.apply(Unknown Source) 2023-08-16T06:50:14.2350017Z org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64) 2023-08-16T06:50:14.2350714Z org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61) 2023-08-16T06:50:14.2351555Z org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:95) 2023-08-16T06:50:14.2352082Z org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:623) 2023-08-16T06:50:14.2352688Z java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 2023-08-16T06:50:14.2353420Z java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 2023-08-16T06:50:14.2353927Z java.lang.Thread.run(Thread.java:750) 2023-08-16T06:50:14.2354527Z at org.apache.spark.sql.errors.QueryExecutionErrors$.unreleasedThreadError(QueryExecutionErrors.scala:2637) 2023-08-16T06:50:14.2355260Z at org.apache.spark.sql.execution.streaming.state.RocksDB.acquire(RocksDB.scala:562) 2023-08-16T06:50:14.2356039Z at org.apache.spark.sql.execution.streaming.state.RocksDB.close(RocksDB.scala:467) 2023-08-16T06:50:14.2356887Z at org.apache.spark.sql.execution.streaming.state.RocksDBStateStoreProvider.close(RocksDBStateStoreProvider.scala:222) 2023-08-16T06:50:14.2357743Z at org.apache.spark.sql.execution.streaming.state.StateStore$.$anonfun$unload$1(StateStore.scala:606) 2023-08-16T06:50:14.2358482Z at org.apache.spark.sql.execution.streaming.state.StateStore$.$anonfun$unload$1$adapted(StateStore.scala:606) 2023-08-16T06:50:14.2359162Z at scala.Option.foreach(Option.scala:407) 2023-08-16T06:50:14.2359724Z at org.apache.spark.sql.execution.streaming.state.StateStore$.unload(StateStore.scala:606) 2023-08-16T06:50:14.2360502Z at org.apache.spark.sql.execution.streaming.state.StateStore$.$anonfun$startMaintenanceIfNeeded$4(StateStore.scala:663) 2023-08-16T06:50:14.2361318Z at org.apache.spark.sql.execution.streaming.state.StateStore$.$anonfun$startMaintenanceIfNeeded$4$adapted(StateStore.scala:663) 2023-08-16T06:50:14.2362645Z at scala.collection.mutable.HashMap$$anon$1.$anonfun$foreach$2(HashMap.scala:153) 2023-08-16T06:50:14.2363154Z at scala.collection.mutable.HashTable.foreachEntry(HashTable.scala:237) 2023-08-16T06:50:14.2363683Z at scala.collection.mutable.HashTable.foreachEntry$(HashTable.scala:230) 2023-08-16T06:50:14.2364196Z at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:44) 2023-08-16T06:50:14.2364675Z at scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:153) 2023-08-16T06:50:14.2365279Z at org.apache.spark.sql.execution.streaming.state.StateStore$.$anonfun$startMaintenanceIfNeeded$2(StateStore.scala:663) 2023-08-16T06:50:14.2365989Z at org.apache.spark.sql.execution.streaming.state.StateStore$MaintenanceTask$$anon$1.run(StateStore.scala:471) 2023-08-16T06:50:14.2366577Z at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 2023-08-16T06:50:14.2367068Z at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) 2023-08-16T06:50:14.2367706Z at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) 2023-08-16T06:50:14.2368448Z at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) 2023-08-16T06:50:14.2369108Z at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 2023-08-16T06:50:14.2369683Z at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 2023-08-16T06:50:14.2370116Z at java.lang.Thread.run(Thread.java:750) 2023-08-16T06:50:14.2554839Z 06:50:14.253 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 2.0 in stage 541.0 (TID 1403) (localhost executor driver): TaskKilled (Stage cancelled: Job 274 cancelled part of cancelled job group 61c94e63-d753-41bf-b5b3-cdbe2d844d26) 2023-08-16T06:50:14.2727333Z 06:50:14.268 WARN org.apache.spark.scheduler.TaskSetManager: Lost task 0.0 in stage 541.0 (TID 1401) (localhost executor driver): TaskKilled (Stage cancelled: Job 274 cancelled part of cancelled job group 61c94e63-d753-41bf-b5b3-cdbe2d844d26) 2023-08-16T06:50:14.2979761Z 06:50:14.294 ERROR org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec: Data source write support MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@77a1d641] is aborting. 2023-08-16T06:50:14.2981538Z 06:50:14.294 ERROR org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec: Data source write support MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@77a1d641] aborted.
Attachments
Issue Links
- links to