chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d06f41-601e-002a-13db-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:00 GMT]} 20/03/20 17:18:01 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:00 GMT, start date: Fri Mar 20 17:18:01 UTC 2020, stop date: Fri Mar 20 17:18:01 UTC 2020, service request id: f2d06f48-601e-002a-1adb-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d06f48-601e-002a-1adb-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:00 GMT]} 20/03/20 17:18:01 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/0?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:00 GMT, start date: Fri Mar 20 17:18:01 UTC 2020, stop date: Fri Mar 20 17:18:01 UTC 2020, service request id: f2d06f76-601e-002a-42db-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d06f76-601e-002a-42db-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:00 GMT]} 20/03/20 17:18:01 INFO EventHubsClient: translate: useStart is set to true. 20/03/20 17:18:01 INFO EventHubsClient: translate: PerPartitionPositions = ParMap() 20/03/20 17:18:01 INFO EventHubsClient: translate: Default position = EventPosition(-1,-1,null,true) 20/03/20 17:18:01 INFO EventHubsClient: translate: needsTranslation = ArrayBuffer(({"ehName":"taxi-fare-eh","partitionId":2},EventPosition(-1,-1,null,true)), ({"ehName":"taxi-fare-eh","partitionId":1},EventPosition(-1,-1,null,true)), ({"ehName":"taxi-fare-eh","partitionId":3},EventPosition(-1,-1,null,true)), ({"ehName":"taxi-fare-eh","partitionId":5},EventPosition(-1,-1,null,true)), ({"ehName":"taxi-fare-eh","partitionId":6},EventPosition(-1,-1,null,true)), ({"ehName":"taxi-fare-eh","partitionId":7},EventPosition(-1,-1,null,true)), ({"ehName":"taxi-fare-eh","partitionId":0},EventPosition(-1,-1,null,true)), ({"ehName":"taxi-fare-eh","partitionId":4},EventPosition(-1,-1,null,true))) 20/03/20 17:18:01 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/0?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:00 GMT, start date: Fri Mar 20 17:18:01 UTC 2020, stop date: Fri Mar 20 17:18:01 UTC 2020, service request id: f2d06f7b-601e-002a-45db-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d06f7b-601e-002a-45db-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:00 GMT]} 20/03/20 17:18:01 INFO NativeAzureFileSystem: FS_OP_CREATE FILE[/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp] Creating output stream; permission: rw-r--r--, overwrite: true, bufferSize: 65536 20/03/20 17:18:01 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:00 GMT, start date: Fri Mar 20 17:18:01 UTC 2020, stop date: Fri Mar 20 17:18:01 UTC 2020, service request id: f2d06f80-601e-002a-4adb-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d06f80-601e-002a-4adb-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:00 GMT]} 20/03/20 17:18:01 INFO AzureCheckpointFileManager: Writing atomically to dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/0 using temp file dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp 20/03/20 17:18:01 INFO NativeAzureFileSystem: FS_OP_CREATE FILE[1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp] Closing stream; size: 70 20/03/20 17:18:01 INFO NativeAzureFileSystem: FS_OP_CREATE FILE[1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp] Upload complete; size: 70 20/03/20 17:18:01 INFO NativeAzureFileSystem: FS_OP_RENAME [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp] to [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/0] Starting rename. Issuing rename operation. 20/03/20 17:18:01 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/0?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:00 GMT, start date: Fri Mar 20 17:18:01 UTC 2020, stop date: Fri Mar 20 17:18:01 UTC 2020, service request id: f2d06fae-601e-002a-73db-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d06fae-601e-002a-73db-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:00 GMT]} 20/03/20 17:18:01 INFO NativeAzureFileSystem: FS_OP_RENAME [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp] to [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/0] Rename successful. 20/03/20 17:18:01 INFO AzureCheckpointFileManager: Renamed temp file dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/.0.b30e2560-6cbe-40ad-982b-d8a114fcce6b.tmp to dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/sources/1/0 20/03/20 17:18:01 INFO EventHubsSource: Initial sequence numbers: Map({"ehName":"taxi-fare-eh","partitionId":2} -> 0, {"ehName":"taxi-fare-eh","partitionId":0} -> 0, {"ehName":"taxi-fare-eh","partitionId":6} -> 0, {"ehName":"taxi-fare-eh","partitionId":7} -> 0, {"ehName":"taxi-fare-eh","partitionId":4} -> 0, {"ehName":"taxi-fare-eh","partitionId":1} -> 0, {"ehName":"taxi-fare-eh","partitionId":3} -> 0, {"ehName":"taxi-fare-eh","partitionId":5} -> 0) 20/03/20 17:18:01 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/0?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:01 GMT, start date: Fri Mar 20 17:18:01 UTC 2020, stop date: Fri Mar 20 17:18:01 UTC 2020, service request id: f2d06fff-601e-002a-33db-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d06fff-601e-002a-33db-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:01 GMT]} 20/03/20 17:18:01 INFO NativeAzureFileSystem: FS_OP_CREATE FILE[/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp] Creating output stream; permission: rw-r--r--, overwrite: true, bufferSize: 65536 20/03/20 17:18:01 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:01 GMT, start date: Fri Mar 20 17:18:01 UTC 2020, stop date: Fri Mar 20 17:18:01 UTC 2020, service request id: f2d07007-601e-002a-3bdb-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d07007-601e-002a-3bdb-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:01 GMT]} 20/03/20 17:18:01 INFO AzureCheckpointFileManager: Writing atomically to dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/0 using temp file dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp 20/03/20 17:18:01 INFO NativeAzureFileSystem: FS_OP_CREATE FILE[1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp] Closing stream; size: 580 20/03/20 17:18:02 INFO NativeAzureFileSystem: FS_OP_CREATE FILE[1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp] Upload complete; size: 580 20/03/20 17:18:02 INFO NativeAzureFileSystem: FS_OP_RENAME [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp] to [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/0] Starting rename. Issuing rename operation. 20/03/20 17:18:02 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/0?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:18:01 GMT, start date: Fri Mar 20 17:18:02 UTC 2020, stop date: Fri Mar 20 17:18:02 UTC 2020, service request id: f2d07039-601e-002a-67db-fe258b000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[f2d07039-601e-002a-67db-fe258b000000], Date=[Fri, 20 Mar 2020 17:18:01 GMT]} 20/03/20 17:18:02 INFO NativeAzureFileSystem: FS_OP_RENAME [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp] to [/1207841053205226/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/0] Rename successful. 20/03/20 17:18:02 INFO AzureCheckpointFileManager: Renamed temp file dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/.0.0ddc55e0-db6d-45b6-ae68-075b7a6baf4c.tmp to dbfs:/local_disk0/tmp/temporary-42032832-6196-4cf9-8873-077a4124fcd4/offsets/0 20/03/20 17:18:02 INFO MicroBatchExecution: Committed offsets for batch 0. Metadata OffsetSeqMetadata(0,1584724681910,Map(spark.sql.streaming.stateStore.providerClass -> org.apache.spark.sql.execution.streaming.state.HDFSBackedStateStoreProvider, spark.sql.streaming.flatMapGroupsWithState.stateFormatVersion -> 2, spark.sql.streaming.multipleWatermarkPolicy -> min, spark.sql.streaming.aggregation.stateFormatVersion -> 2, spark.sql.shuffle.partitions -> 200)) 20/03/20 17:18:02 INFO EventHubsSource: getBatch called with start = None and end = {"taxi-ride-eh":{"2":1004,"5":981,"4":1013,"7":988,"1":1035,"3":1002,"6":982,"0":992}} 20/03/20 17:18:02 WARN EventHubsConf: eventhubs.connectionstring has already been set to Endpoint=sb://taxieventhubs3.servicebus.windows.net/;SharedAccessKeyName=taxi-ride-eh-ap;SharedAccessKey=EBCO5rwxnhH4xnN/0N5Z65g2K5dYD3PI4Q/Vp41G3EA=;EntityPath=taxi-ride-eh. Overwriting with Endpoint=sb://taxieventhubs3.servicebus.windows.net/;SharedAccessKeyName=taxi-ride-eh-ap;SharedAccessKey=EBCO5rwxnhH4xnN/0N5Z65g2K5dYD3PI4Q/Vp41G3EA=;EntityPath=taxi-ride-eh 20/03/20 17:18:02 INFO EventHubsSource: GetBatch generating RDD of offset range: OffsetRange(partition: 0 | fromSeqNo: 0 | untilSeqNo: 992), OffsetRange(partition: 1 | fromSeqNo: 0 | untilSeqNo: 1035), OffsetRange(partition: 2 | fromSeqNo: 0 | untilSeqNo: 1004), OffsetRange(partition: 3 | fromSeqNo: 0 | untilSeqNo: 1002), OffsetRange(partition: 4 | fromSeqNo: 0 | untilSeqNo: 1013), OffsetRange(partition: 5 | fromSeqNo: 0 | untilSeqNo: 981), OffsetRange(partition: 6 | fromSeqNo: 0 | untilSeqNo: 982), OffsetRange(partition: 7 | fromSeqNo: 0 | untilSeqNo: 988) 20/03/20 17:18:02 INFO EventHubsSource: getBatch called with start = None and end = {"taxi-fare-eh":{"2":1004,"5":981,"4":1013,"7":988,"1":1035,"3":1002,"6":982,"0":992}} 20/03/20 17:18:02 WARN EventHubsConf: eventhubs.connectionstring has already been set to Endpoint=sb://taxieventhubs3.servicebus.windows.net/;SharedAccessKeyName=taxi-fare-eh-ap;SharedAccessKey=3LuZ9jVaNeHmcaqBVWrWv9Xdza6dY/y3AgWoZO33E+Y=;EntityPath=taxi-fare-eh. Overwriting with Endpoint=sb://taxieventhubs3.servicebus.windows.net/;SharedAccessKeyName=taxi-fare-eh-ap;SharedAccessKey=3LuZ9jVaNeHmcaqBVWrWv9Xdza6dY/y3AgWoZO33E+Y=;EntityPath=taxi-fare-eh 20/03/20 17:18:02 INFO EventHubsSource: GetBatch generating RDD of offset range: OffsetRange(partition: 0 | fromSeqNo: 0 | untilSeqNo: 992), OffsetRange(partition: 1 | fromSeqNo: 0 | untilSeqNo: 1035), OffsetRange(partition: 2 | fromSeqNo: 0 | untilSeqNo: 1004), OffsetRange(partition: 3 | fromSeqNo: 0 | untilSeqNo: 1002), OffsetRange(partition: 4 | fromSeqNo: 0 | untilSeqNo: 1013), OffsetRange(partition: 5 | fromSeqNo: 0 | untilSeqNo: 981), OffsetRange(partition: 6 | fromSeqNo: 0 | untilSeqNo: 982), OffsetRange(partition: 7 | fromSeqNo: 0 | untilSeqNo: 988) 20/03/20 17:18:02 INFO LogicalPlanStats: Setting LogicalPlanStats visitor to com.databricks.sql.optimizer.statsEstimation.DatabricksLogicalPlanStatsVisitor$ 20/03/20 17:18:02 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:02 INFO MemoryStore: Block broadcast_1 stored as values in memory (estimated size 172.9 KB, free 3.3 GB) 20/03/20 17:18:02 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 21.3 KB, free 3.3 GB) 20/03/20 17:18:02 INFO BlockManagerInfo: Added broadcast_1_piece0 in memory on 10.139.64.5:38325 (size: 21.3 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 1 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 0 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_2 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_2_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_2_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 2 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_3 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_3_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_3_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 3 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_4 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_4_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_4_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 4 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_5 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_5_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_5_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 5 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_6 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_6_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_6_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 6 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_7 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_7_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_7_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 7 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_8 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:03 INFO MemoryStore: Block broadcast_8_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Added broadcast_8_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO SparkContext: Created broadcast 8 from start at TaxiCabReader.scala:202 20/03/20 17:18:03 WARN Utils: Truncated the string representation of a plan since it was too large. This behavior can be adjusted by setting 'spark.debug.maxToStringFields' in SparkEnv.conf. 20/03/20 17:18:03 INFO ContextCleaner: Cleaned accumulator 3 (name: ) 20/03/20 17:18:03 INFO ContextCleaner: Cleaned accumulator 10 (name: ) 20/03/20 17:18:03 INFO ContextCleaner: Cleaned accumulator 2 (name: ) 20/03/20 17:18:03 INFO ContextCleaner: Cleaned accumulator 8 (name: ) 20/03/20 17:18:03 INFO BlockManagerInfo: Removed broadcast_0_piece0 on 10.139.64.5:38325 in memory (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Removed broadcast_4_piece0 on 10.139.64.5:38325 in memory (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Removed broadcast_5_piece0 on 10.139.64.5:38325 in memory (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Removed broadcast_7_piece0 on 10.139.64.5:38325 in memory (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO BlockManagerInfo: Removed broadcast_2_piece0 on 10.139.64.5:38325 in memory (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:03 INFO CodeGenerator: Code generated in 245.882249 ms 20/03/20 17:18:03 INFO HashAggregateExec: spark.sql.codegen.aggregate.map.twolevel.enabled is set to true, but current version of codegened fast hashmap does not support this aggregate. 20/03/20 17:18:03 INFO CodeGenerator: Code generated in 32.544569 ms 20/03/20 17:18:03 INFO HashAggregateExec: spark.sql.codegen.aggregate.map.twolevel.enabled is set to true, but current version of codegened fast hashmap does not support this aggregate. 20/03/20 17:18:03 INFO CodeGenerator: Code generated in 31.240879 ms 20/03/20 17:18:03 INFO HashAggregateExec: spark.sql.codegen.aggregate.map.twolevel.enabled is set to true, but current version of codegened fast hashmap does not support this aggregate. 20/03/20 17:18:03 INFO CodeGenerator: Code generated in 40.392617 ms 20/03/20 17:18:04 INFO CodeGenerator: Code generated in 81.144959 ms 20/03/20 17:18:04 INFO CodeGenerator: Code generated in 17.06639 ms 20/03/20 17:18:04 INFO CodeGenerator: Code generated in 62.430053 ms 20/03/20 17:18:04 INFO CodeGenerator: Code generated in 14.266695 ms 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_9 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_9_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:04 INFO BlockManagerInfo: Added broadcast_9_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:04 INFO SparkContext: Created broadcast 9 from start at TaxiCabReader.scala:202 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_10 stored as values in memory (estimated size 424.4 KB, free 3.3 GB) 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_10_piece0 stored as bytes in memory (estimated size 21.7 KB, free 3.3 GB) 20/03/20 17:18:04 INFO BlockManagerInfo: Added broadcast_10_piece0 in memory on 10.139.64.5:38325 (size: 21.7 KB, free: 3.3 GB) 20/03/20 17:18:04 INFO SparkContext: Created broadcast 10 from start at TaxiCabReader.scala:202 20/03/20 17:18:04 INFO WriteToDataSourceV2Exec: Start processing data source writer: org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@24e2d1bf. The input RDD has 200 partitions. 20/03/20 17:18:04 INFO SparkContext: Starting job: start at TaxiCabReader.scala:202 20/03/20 17:18:04 INFO DAGScheduler: Registering RDD 7 (start at TaxiCabReader.scala:202) as input to shuffle 0 20/03/20 17:18:04 INFO DAGScheduler: Registering RDD 12 (start at TaxiCabReader.scala:202) as input to shuffle 1 20/03/20 17:18:04 INFO DAGScheduler: Registering RDD 16 (start at TaxiCabReader.scala:202) as input to shuffle 2 20/03/20 17:18:04 INFO DAGScheduler: Got job 0 (start at TaxiCabReader.scala:202) with 200 output partitions 20/03/20 17:18:04 INFO DAGScheduler: Final stage: ResultStage 3 (start at TaxiCabReader.scala:202) 20/03/20 17:18:04 INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage 2) 20/03/20 17:18:04 INFO DAGScheduler: Missing parents: List(ShuffleMapStage 2) 20/03/20 17:18:04 INFO DAGScheduler: Submitting ShuffleMapStage 0 (MapPartitionsRDD[7] at start at TaxiCabReader.scala:202), which has no missing parents 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_11 stored as values in memory (estimated size 57.6 KB, free 3.3 GB) 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_11_piece0 stored as bytes in memory (estimated size 18.8 KB, free 3.3 GB) 20/03/20 17:18:04 INFO BlockManagerInfo: Added broadcast_11_piece0 in memory on 10.139.64.5:38325 (size: 18.8 KB, free: 3.3 GB) 20/03/20 17:18:04 INFO SparkContext: Created broadcast 11 from broadcast at DAGScheduler.scala:1484 20/03/20 17:18:04 INFO DAGScheduler: Submitting 8 missing tasks from ShuffleMapStage 0 (MapPartitionsRDD[7] at start at TaxiCabReader.scala:202) (first 15 tasks are for partitions Vector(0, 1, 2, 3, 4, 5, 6, 7)) 20/03/20 17:18:04 INFO TaskSchedulerImpl: Adding task set 0.0 with 8 tasks 20/03/20 17:18:04 INFO TaskSetManager: Jars for session None: Map(spark://10.139.64.5:43591/jars/addedFile3442831348035386856jsr_275_1_0_beta_2-80e90.jar -> 1584723970909, spark://10.139.64.5:43591/jars/addedFile3996962909963972797gt_referencing_19_2-00822.jar -> 1584723969059, spark://10.139.64.5:43591/jars/addedFile5583007216404072865azure_eventhubs_spark_2_11_2_3_5-3c889.jar -> 1584723972400, spark://10.139.64.5:43591/jars/addedFile8687218278313632636netty_all_4_0_33_Final-a6cb2.jar -> 1584723969311, spark://10.139.64.5:43591/jars/addedFile8227762715456086999gt_opengis_19_2-45b06.jar -> 1584723969558, spark://10.139.64.5:43591/jars/addedFile2570353732580087347commons_collections_3_2_2-8ae5d.jar -> 1584723970304, spark://10.139.64.5:43591/jars/addedFile3548734869675393587gt_api_19_2-ceffd.jar -> 1584723970533, spark://10.139.64.5:43591/jars/addedFile8229105525908896490commons_pool_1_5_4-59fa8.jar -> 1584723969878, spark://10.139.64.5:43591/jars/addedFile4290775979797428014qpid_proton_j_extensions_1_1_0-fce6d.jar -> 1584723971087, spark://10.139.64.5:43591/jars/addedFile575577840276477859joda_convert_1_2-84d7c.jar -> 1584723972306, spark://10.139.64.5:43591/jars/addedFile8955604004617450696gt_data_19_2-265c5.jar -> 1584723971517, spark://10.139.64.5:43591/jars/addedFile1516452932377227106azure_cosmos_cassandra_spark_helper_1_0_0-9e3cb.jar -> 1584723971007, spark://10.139.64.5:43591/jars/addedFile8184319509357117868GeographicLib_Java_1_44-bb367.jar -> 1584723969381, spark://10.139.64.5:43591/jars/addedFile7505194908477353323gt_shapefile_19_2-5444b.jar -> 1584723970686, spark://10.139.64.5:43591/jars/addedFile1628453214926378049scala_java8_compat_2_11_0_9_0-4b418.jar -> 1584723969691, spark://10.139.64.5:43591/jars/addedFile8748790935058486782jai_core_1_1_3-a9912.jar -> 1584723968169, spark://10.139.64.5:43591/jars/addedFile2595472171217879663slf4j_api_1_7_25-784af.jar -> 1584723971149, spark://10.139.64.5:43591/jars/addedFile7846286548968320981ejml_core_0_32-f12aa.jar -> 1584723972232, spark://10.139.64.5:43591/jars/addedFile2606987707659581323spark_cassandra_connector_2_11_2_0_6-0cc93.jar -> 1584723972119, spark://10.139.64.5:43591/jars/addedFile8034574630270231092gt_metadata_19_2-839f0.jar -> 1584723972481, spark://10.139.64.5:43591/jars/addedFile2209756209979350798ejml_ddense_0_32-c7585.jar -> 1584723971604, spark://10.139.64.5:43591/jars/addedFile6018328576080761864e578b547_b793_4b2c_9b7c_96bdcf532275_azure_databricks_job-daac6.jar -> 1584724672046, spark://10.139.64.5:43591/jars/addedFile8834878213653999262jsr166e_1_1_0-f65fb.jar -> 1584723969994, spark://10.139.64.5:43591/jars/addedFile1824498792517533364azure_eventhubs_1_2_0-a5543.jar -> 1584723970832, spark://10.139.64.5:43591/jars/addedFile7586850005984797614gt_main_19_2-65791.jar -> 1584723971296, spark://10.139.64.5:43591/jars/addedFile6541188595061640819jts_core_1_14_0-ede36.jar -> 1584723971451, spark://10.139.64.5:43591/jars/addedFile2211216886605987311jdom2_2_0_6-02f67.jar -> 1584723970452, spark://10.139.64.5:43591/jars/addedFile289444791567269712proton_j_0_29_0-cd8ac.jar -> 1584723968884, spark://10.139.64.5:43591/jars/addedFile8059279970000382056commons_beanutils_1_9_3-b22d6.jar -> 1584723969463, spark://10.139.64.5:43591/jars/addedFile7348553714132626780jgridshift_1_0-b8c6e.jar -> 1584723970192, spark://10.139.64.5:43591/jars/addedFile1453776487616863254joda_time_2_3-fb321.jar -> 1584723969807) 20/03/20 17:18:04 INFO TaskSetManager: Files for session None: Map(spark://10.139.64.5:43591/files/addedFile8834878213653999262jsr166e_1_1_0-f65fb.jar -> 1584723969987, spark://10.139.64.5:43591/files/addedFile1628453214926378049scala_java8_compat_2_11_0_9_0-4b418.jar -> 1584723969683, spark://10.139.64.5:43591/files/addedFile2211216886605987311jdom2_2_0_6-02f67.jar -> 1584723970446, spark://10.139.64.5:43591/files/addedFile6541188595061640819jts_core_1_14_0-ede36.jar -> 1584723971444, spark://10.139.64.5:43591/files/addedFile1453776487616863254joda_time_2_3-fb321.jar -> 1584723969800, spark://10.139.64.5:43591/files/addedFile8184319509357117868GeographicLib_Java_1_44-bb367.jar -> 1584723969375, spark://10.139.64.5:43591/files/addedFile6018328576080761864e578b547_b793_4b2c_9b7c_96bdcf532275_azure_databricks_job-daac6.jar -> 1584724672040, spark://10.139.64.5:43591/files/addedFile2570353732580087347commons_collections_3_2_2-8ae5d.jar -> 1584723970297, spark://10.139.64.5:43591/files/addedFile3548734869675393587gt_api_19_2-ceffd.jar -> 1584723970518, spark://10.139.64.5:43591/files/addedFile1516452932377227106azure_cosmos_cassandra_spark_helper_1_0_0-9e3cb.jar -> 1584723971000, spark://10.139.64.5:43591/files/addedFile8687218278313632636netty_all_4_0_33_Final-a6cb2.jar -> 1584723969298, spark://10.139.64.5:43591/files/addedFile1824498792517533364azure_eventhubs_1_2_0-a5543.jar -> 1584723970826, spark://10.139.64.5:43591/files/addedFile8227762715456086999gt_opengis_19_2-45b06.jar -> 1584723969549, spark://10.139.64.5:43591/files/addedFile3442831348035386856jsr_275_1_0_beta_2-80e90.jar -> 1584723970903, spark://10.139.64.5:43591/files/addedFile8748790935058486782jai_core_1_1_3-a9912.jar -> 1584723968122, spark://10.139.64.5:43591/files/addedFile7505194908477353323gt_shapefile_19_2-5444b.jar -> 1584723970678, spark://10.139.64.5:43591/files/addedFile8955604004617450696gt_data_19_2-265c5.jar -> 1584723971512, spark://10.139.64.5:43591/files/addedFile7846286548968320981ejml_core_0_32-f12aa.jar -> 1584723972224, spark://10.139.64.5:43591/files/addedFile5583007216404072865azure_eventhubs_spark_2_11_2_3_5-3c889.jar -> 1584723972394, spark://10.139.64.5:43591/files/addedFile2209756209979350798ejml_ddense_0_32-c7585.jar -> 1584723971596, spark://10.139.64.5:43591/files/addedFile8034574630270231092gt_metadata_19_2-839f0.jar -> 1584723972474, spark://10.139.64.5:43591/files/addedFile289444791567269712proton_j_0_29_0-cd8ac.jar -> 1584723968876, spark://10.139.64.5:43591/files/addedFile2606987707659581323spark_cassandra_connector_2_11_2_0_6-0cc93.jar -> 1584723972106, spark://10.139.64.5:43591/files/addedFile8059279970000382056commons_beanutils_1_9_3-b22d6.jar -> 1584723969457, spark://10.139.64.5:43591/files/addedFile2595472171217879663slf4j_api_1_7_25-784af.jar -> 1584723971144, spark://10.139.64.5:43591/files/addedFile7348553714132626780jgridshift_1_0-b8c6e.jar -> 1584723970185, spark://10.139.64.5:43591/files/addedFile8229105525908896490commons_pool_1_5_4-59fa8.jar -> 1584723969872, spark://10.139.64.5:43591/files/addedFile575577840276477859joda_convert_1_2-84d7c.jar -> 1584723972300, spark://10.139.64.5:43591/files/addedFile3996962909963972797gt_referencing_19_2-00822.jar -> 1584723969051, spark://10.139.64.5:43591/files/addedFile7586850005984797614gt_main_19_2-65791.jar -> 1584723971288, spark://10.139.64.5:43591/files/addedFile4290775979797428014qpid_proton_j_extensions_1_1_0-fce6d.jar -> 1584723971069) 20/03/20 17:18:04 WARN FairSchedulableBuilder: A job was submitted with scheduler pool 8809067319516496537, which has not been configured. This can happen when the file that pools are read from isn't set, or when that file doesn't contain 8809067319516496537. Created 8809067319516496537 with default configuration (schedulingMode: FIFO, minShare: 0, weight: 1) 20/03/20 17:18:04 INFO FairSchedulableBuilder: Added task set TaskSet_0.0 tasks to pool 8809067319516496537 20/03/20 17:18:04 INFO DAGScheduler: Submitting ShuffleMapStage 1 (MapPartitionsRDD[12] at start at TaxiCabReader.scala:202), which has no missing parents 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_12 stored as values in memory (estimated size 93.3 KB, free 3.3 GB) 20/03/20 17:18:04 INFO MemoryStore: Block broadcast_12_piece0 stored as bytes in memory (estimated size 35.3 KB, free 3.3 GB) 20/03/20 17:18:04 INFO BlockManagerInfo: Added broadcast_12_piece0 in memory on 10.139.64.5:38325 (size: 35.3 KB, free: 3.3 GB) 20/03/20 17:18:04 INFO SparkContext: Created broadcast 12 from broadcast at DAGScheduler.scala:1484 20/03/20 17:18:04 INFO DAGScheduler: Submitting 8 missing tasks from ShuffleMapStage 1 (MapPartitionsRDD[12] at start at TaxiCabReader.scala:202) (first 15 tasks are for partitions Vector(0, 1, 2, 3, 4, 5, 6, 7)) 20/03/20 17:18:04 INFO TaskSchedulerImpl: Adding task set 1.0 with 8 tasks 20/03/20 17:18:04 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, 10.139.64.4, executor 0, partition 0, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:04 INFO TaskSetManager: Starting task 7.0 in stage 0.0 (TID 1, 10.139.64.6, executor 1, partition 7, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:04 INFO TaskSetManager: Starting task 1.0 in stage 0.0 (TID 2, 10.139.64.4, executor 0, partition 1, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:04 INFO TaskSetManager: Starting task 2.0 in stage 0.0 (TID 3, 10.139.64.4, executor 0, partition 2, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:04 INFO TaskSetManager: Starting task 3.0 in stage 0.0 (TID 4, 10.139.64.4, executor 0, partition 3, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:04 INFO TaskSetManager: Jars for session None: Map(spark://10.139.64.5:43591/jars/addedFile3442831348035386856jsr_275_1_0_beta_2-80e90.jar -> 1584723970909, spark://10.139.64.5:43591/jars/addedFile3996962909963972797gt_referencing_19_2-00822.jar -> 1584723969059, spark://10.139.64.5:43591/jars/addedFile5583007216404072865azure_eventhubs_spark_2_11_2_3_5-3c889.jar -> 1584723972400, spark://10.139.64.5:43591/jars/addedFile8687218278313632636netty_all_4_0_33_Final-a6cb2.jar -> 1584723969311, spark://10.139.64.5:43591/jars/addedFile8227762715456086999gt_opengis_19_2-45b06.jar -> 1584723969558, spark://10.139.64.5:43591/jars/addedFile2570353732580087347commons_collections_3_2_2-8ae5d.jar -> 1584723970304, spark://10.139.64.5:43591/jars/addedFile3548734869675393587gt_api_19_2-ceffd.jar -> 1584723970533, spark://10.139.64.5:43591/jars/addedFile8229105525908896490commons_pool_1_5_4-59fa8.jar -> 1584723969878, spark://10.139.64.5:43591/jars/addedFile4290775979797428014qpid_proton_j_extensions_1_1_0-fce6d.jar -> 1584723971087, spark://10.139.64.5:43591/jars/addedFile575577840276477859joda_convert_1_2-84d7c.jar -> 1584723972306, spark://10.139.64.5:43591/jars/addedFile8955604004617450696gt_data_19_2-265c5.jar -> 1584723971517, spark://10.139.64.5:43591/jars/addedFile1516452932377227106azure_cosmos_cassandra_spark_helper_1_0_0-9e3cb.jar -> 1584723971007, spark://10.139.64.5:43591/jars/addedFile8184319509357117868GeographicLib_Java_1_44-bb367.jar -> 1584723969381, spark://10.139.64.5:43591/jars/addedFile7505194908477353323gt_shapefile_19_2-5444b.jar -> 1584723970686, spark://10.139.64.5:43591/jars/addedFile1628453214926378049scala_java8_compat_2_11_0_9_0-4b418.jar -> 1584723969691, spark://10.139.64.5:43591/jars/addedFile8748790935058486782jai_core_1_1_3-a9912.jar -> 1584723968169, spark://10.139.64.5:43591/jars/addedFile2595472171217879663slf4j_api_1_7_25-784af.jar -> 1584723971149, spark://10.139.64.5:43591/jars/addedFile7846286548968320981ejml_core_0_32-f12aa.jar -> 1584723972232, spark://10.139.64.5:43591/jars/addedFile2606987707659581323spark_cassandra_connector_2_11_2_0_6-0cc93.jar -> 1584723972119, spark://10.139.64.5:43591/jars/addedFile8034574630270231092gt_metadata_19_2-839f0.jar -> 1584723972481, spark://10.139.64.5:43591/jars/addedFile2209756209979350798ejml_ddense_0_32-c7585.jar -> 1584723971604, spark://10.139.64.5:43591/jars/addedFile6018328576080761864e578b547_b793_4b2c_9b7c_96bdcf532275_azure_databricks_job-daac6.jar -> 1584724672046, spark://10.139.64.5:43591/jars/addedFile8834878213653999262jsr166e_1_1_0-f65fb.jar -> 1584723969994, spark://10.139.64.5:43591/jars/addedFile1824498792517533364azure_eventhubs_1_2_0-a5543.jar -> 1584723970832, spark://10.139.64.5:43591/jars/addedFile7586850005984797614gt_main_19_2-65791.jar -> 1584723971296, spark://10.139.64.5:43591/jars/addedFile6541188595061640819jts_core_1_14_0-ede36.jar -> 1584723971451, spark://10.139.64.5:43591/jars/addedFile2211216886605987311jdom2_2_0_6-02f67.jar -> 1584723970452, spark://10.139.64.5:43591/jars/addedFile289444791567269712proton_j_0_29_0-cd8ac.jar -> 1584723968884, spark://10.139.64.5:43591/jars/addedFile8059279970000382056commons_beanutils_1_9_3-b22d6.jar -> 1584723969463, spark://10.139.64.5:43591/jars/addedFile7348553714132626780jgridshift_1_0-b8c6e.jar -> 1584723970192, spark://10.139.64.5:43591/jars/addedFile1453776487616863254joda_time_2_3-fb321.jar -> 1584723969807) 20/03/20 17:18:04 INFO TaskSetManager: Files for session None: Map(spark://10.139.64.5:43591/files/addedFile8834878213653999262jsr166e_1_1_0-f65fb.jar -> 1584723969987, spark://10.139.64.5:43591/files/addedFile1628453214926378049scala_java8_compat_2_11_0_9_0-4b418.jar -> 1584723969683, spark://10.139.64.5:43591/files/addedFile2211216886605987311jdom2_2_0_6-02f67.jar -> 1584723970446, spark://10.139.64.5:43591/files/addedFile6541188595061640819jts_core_1_14_0-ede36.jar -> 1584723971444, spark://10.139.64.5:43591/files/addedFile1453776487616863254joda_time_2_3-fb321.jar -> 1584723969800, spark://10.139.64.5:43591/files/addedFile8184319509357117868GeographicLib_Java_1_44-bb367.jar -> 1584723969375, spark://10.139.64.5:43591/files/addedFile6018328576080761864e578b547_b793_4b2c_9b7c_96bdcf532275_azure_databricks_job-daac6.jar -> 1584724672040, spark://10.139.64.5:43591/files/addedFile2570353732580087347commons_collections_3_2_2-8ae5d.jar -> 1584723970297, spark://10.139.64.5:43591/files/addedFile3548734869675393587gt_api_19_2-ceffd.jar -> 1584723970518, spark://10.139.64.5:43591/files/addedFile1516452932377227106azure_cosmos_cassandra_spark_helper_1_0_0-9e3cb.jar -> 1584723971000, spark://10.139.64.5:43591/files/addedFile8687218278313632636netty_all_4_0_33_Final-a6cb2.jar -> 1584723969298, spark://10.139.64.5:43591/files/addedFile1824498792517533364azure_eventhubs_1_2_0-a5543.jar -> 1584723970826, spark://10.139.64.5:43591/files/addedFile8227762715456086999gt_opengis_19_2-45b06.jar -> 1584723969549, spark://10.139.64.5:43591/files/addedFile3442831348035386856jsr_275_1_0_beta_2-80e90.jar -> 1584723970903, spark://10.139.64.5:43591/files/addedFile8748790935058486782jai_core_1_1_3-a9912.jar -> 1584723968122, spark://10.139.64.5:43591/files/addedFile7505194908477353323gt_shapefile_19_2-5444b.jar -> 1584723970678, spark://10.139.64.5:43591/files/addedFile8955604004617450696gt_data_19_2-265c5.jar -> 1584723971512, spark://10.139.64.5:43591/files/addedFile7846286548968320981ejml_core_0_32-f12aa.jar -> 1584723972224, spark://10.139.64.5:43591/files/addedFile5583007216404072865azure_eventhubs_spark_2_11_2_3_5-3c889.jar -> 1584723972394, spark://10.139.64.5:43591/files/addedFile2209756209979350798ejml_ddense_0_32-c7585.jar -> 1584723971596, spark://10.139.64.5:43591/files/addedFile8034574630270231092gt_metadata_19_2-839f0.jar -> 1584723972474, spark://10.139.64.5:43591/files/addedFile289444791567269712proton_j_0_29_0-cd8ac.jar -> 1584723968876, spark://10.139.64.5:43591/files/addedFile2606987707659581323spark_cassandra_connector_2_11_2_0_6-0cc93.jar -> 1584723972106, spark://10.139.64.5:43591/files/addedFile8059279970000382056commons_beanutils_1_9_3-b22d6.jar -> 1584723969457, spark://10.139.64.5:43591/files/addedFile2595472171217879663slf4j_api_1_7_25-784af.jar -> 1584723971144, spark://10.139.64.5:43591/files/addedFile7348553714132626780jgridshift_1_0-b8c6e.jar -> 1584723970185, spark://10.139.64.5:43591/files/addedFile8229105525908896490commons_pool_1_5_4-59fa8.jar -> 1584723969872, spark://10.139.64.5:43591/files/addedFile575577840276477859joda_convert_1_2-84d7c.jar -> 1584723972300, spark://10.139.64.5:43591/files/addedFile3996962909963972797gt_referencing_19_2-00822.jar -> 1584723969051, spark://10.139.64.5:43591/files/addedFile7586850005984797614gt_main_19_2-65791.jar -> 1584723971288, spark://10.139.64.5:43591/files/addedFile4290775979797428014qpid_proton_j_extensions_1_1_0-fce6d.jar -> 1584723971069) 20/03/20 17:18:04 INFO FairSchedulableBuilder: Added task set TaskSet_1.0 tasks to pool 8809067319516496537 20/03/20 17:18:04 INFO TaskSetManager: Starting task 1.0 in stage 1.0 (TID 5, 10.139.64.6, executor 1, partition 1, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:04 INFO TaskSetManager: Starting task 2.0 in stage 1.0 (TID 6, 10.139.64.6, executor 1, partition 2, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:04 INFO TaskSetManager: Starting task 3.0 in stage 1.0 (TID 7, 10.139.64.6, executor 1, partition 3, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:05 INFO BlockManagerInfo: Added broadcast_11_piece0 in memory on 10.139.64.6:40557 (size: 18.8 KB, free: 3.6 GB) 20/03/20 17:18:05 INFO BlockManagerInfo: Added broadcast_12_piece0 in memory on 10.139.64.6:40557 (size: 35.3 KB, free: 3.6 GB) 20/03/20 17:18:05 INFO BlockManagerInfo: Added broadcast_11_piece0 in memory on 10.139.64.4:42179 (size: 18.8 KB, free: 3.6 GB) 20/03/20 17:18:09 INFO TaskSetManager: Starting task 6.0 in stage 1.0 (TID 8, 10.139.64.6, executor 1, partition 6, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:09 INFO TaskSetManager: Starting task 7.0 in stage 1.0 (TID 9, 10.139.64.6, executor 1, partition 7, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:09 INFO TaskSetManager: Finished task 2.0 in stage 1.0 (TID 6) in 4736 ms on 10.139.64.6 (executor 1) (1/8) 20/03/20 17:18:09 INFO TaskSetManager: Finished task 3.0 in stage 1.0 (TID 7) in 4742 ms on 10.139.64.6 (executor 1) (2/8) 20/03/20 17:18:09 INFO TaskSetManager: Finished task 1.0 in stage 1.0 (TID 5) in 4751 ms on 10.139.64.6 (executor 1) (3/8) 20/03/20 17:18:09 INFO TaskSetManager: Starting task 4.0 in stage 0.0 (TID 10, 10.139.64.4, executor 0, partition 4, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:09 INFO TaskSetManager: Starting task 5.0 in stage 0.0 (TID 11, 10.139.64.4, executor 0, partition 5, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:09 INFO TaskSetManager: Starting task 6.0 in stage 0.0 (TID 12, 10.139.64.4, executor 0, partition 6, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:09 INFO TaskSetManager: Starting task 0.0 in stage 1.0 (TID 13, 10.139.64.4, executor 0, partition 0, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:09 WARN TaskSetManager: Lost task 2.0 in stage 0.0 (TID 3, 10.139.64.4, executor 0): org.apache.spark.SparkException: Failed to execute user defined function(anonfun$1: (double, double) => string) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage2.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:640) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) at org.apache.spark.scheduler.Task.doRunTask(Task.scala:140) at org.apache.spark.scheduler.Task.run(Task.scala:113) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$13.apply(Executor.scala:537) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1541) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:543) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.NullPointerException at com.microsoft.pnp.GeoFinder.getNeighborhood(GeoFinder.java:52) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:76) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:75) ... 15 more 20/03/20 17:18:09 INFO TaskSetManager: Starting task 2.1 in stage 0.0 (TID 14, 10.139.64.6, executor 1, partition 2, ANY, 5508 bytes) 20/03/20 17:18:09 INFO TaskSetManager: Lost task 1.0 in stage 0.0 (TID 2) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 1] 20/03/20 17:18:09 INFO TaskSetManager: Lost task 3.0 in stage 0.0 (TID 4) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 2] 20/03/20 17:18:09 INFO TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 3] 20/03/20 17:18:10 INFO BlockManagerInfo: Added broadcast_12_piece0 in memory on 10.139.64.4:42179 (size: 35.3 KB, free: 3.6 GB) 20/03/20 17:18:10 INFO TaskSetManager: Starting task 0.1 in stage 0.0 (TID 15, 10.139.64.6, executor 1, partition 0, ANY, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Finished task 6.0 in stage 1.0 (TID 8) in 554 ms on 10.139.64.6 (executor 1) (4/8) 20/03/20 17:18:10 INFO TaskSetManager: Starting task 3.1 in stage 0.0 (TID 16, 10.139.64.6, executor 1, partition 3, ANY, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Finished task 7.0 in stage 1.0 (TID 9) in 735 ms on 10.139.64.6 (executor 1) (5/8) 20/03/20 17:18:10 INFO TaskSetManager: Starting task 1.1 in stage 0.0 (TID 17, 10.139.64.4, executor 0, partition 1, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Lost task 6.0 in stage 0.0 (TID 12) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 4] 20/03/20 17:18:10 INFO TaskSetManager: Starting task 6.1 in stage 0.0 (TID 18, 10.139.64.4, executor 0, partition 6, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Lost task 5.0 in stage 0.0 (TID 11) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 5] 20/03/20 17:18:10 INFO TaskSetManager: Starting task 5.1 in stage 0.0 (TID 19, 10.139.64.4, executor 0, partition 5, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Lost task 4.0 in stage 0.0 (TID 10) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 6] 20/03/20 17:18:10 INFO TaskSetManager: Starting task 4.1 in stage 0.0 (TID 20, 10.139.64.4, executor 0, partition 4, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Lost task 6.1 in stage 0.0 (TID 18) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 7] 20/03/20 17:18:10 INFO TaskSetManager: Starting task 6.2 in stage 0.0 (TID 21, 10.139.64.4, executor 0, partition 6, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Lost task 5.1 in stage 0.0 (TID 19) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 8] 20/03/20 17:18:10 INFO TaskSetManager: Starting task 5.2 in stage 0.0 (TID 22, 10.139.64.4, executor 0, partition 5, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Lost task 1.1 in stage 0.0 (TID 17) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 9] 20/03/20 17:18:10 INFO TaskSetManager: Lost task 3.1 in stage 0.0 (TID 16) on 10.139.64.6, executor 1: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 10] 20/03/20 17:18:10 INFO TaskSetManager: Lost task 2.1 in stage 0.0 (TID 14) on 10.139.64.6, executor 1: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 11] 20/03/20 17:18:10 INFO TaskSetManager: Lost task 7.0 in stage 0.0 (TID 1) on 10.139.64.6, executor 1: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 12] 20/03/20 17:18:10 INFO TaskSetManager: Lost task 0.1 in stage 0.0 (TID 15) on 10.139.64.6, executor 1: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 13] 20/03/20 17:18:10 INFO TaskSetManager: Starting task 7.1 in stage 0.0 (TID 23, 10.139.64.6, executor 1, partition 7, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Starting task 0.2 in stage 0.0 (TID 24, 10.139.64.4, executor 0, partition 0, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:10 INFO TaskSetManager: Lost task 4.1 in stage 0.0 (TID 20) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 14] 20/03/20 17:18:11 INFO TaskSetManager: Lost task 7.1 in stage 0.0 (TID 23) on 10.139.64.6, executor 1: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 15] 20/03/20 17:18:11 INFO TaskSetManager: Starting task 7.2 in stage 0.0 (TID 25, 10.139.64.6, executor 1, partition 7, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:11 INFO TaskSetManager: Starting task 4.2 in stage 0.0 (TID 26, 10.139.64.4, executor 0, partition 4, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:11 INFO TaskSetManager: Lost task 5.2 in stage 0.0 (TID 22) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 16] 20/03/20 17:18:11 INFO TaskSetManager: Starting task 5.3 in stage 0.0 (TID 27, 10.139.64.4, executor 0, partition 5, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:11 INFO TaskSetManager: Lost task 6.2 in stage 0.0 (TID 21) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 17] 20/03/20 17:18:11 INFO TaskSetManager: Lost task 7.2 in stage 0.0 (TID 25) on 10.139.64.6, executor 1: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 18] 20/03/20 17:18:11 INFO TaskSetManager: Starting task 7.3 in stage 0.0 (TID 28, 10.139.64.6, executor 1, partition 7, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:11 INFO TaskSetManager: Starting task 6.3 in stage 0.0 (TID 29, 10.139.64.4, executor 0, partition 6, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:11 INFO TaskSetManager: Lost task 0.2 in stage 0.0 (TID 24) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 19] 20/03/20 17:18:11 INFO TaskSetManager: Starting task 0.3 in stage 0.0 (TID 30, 10.139.64.4, executor 0, partition 0, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:11 INFO TaskSetManager: Lost task 4.2 in stage 0.0 (TID 26) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 20] 20/03/20 17:18:11 INFO TaskSetManager: Starting task 4.3 in stage 0.0 (TID 31, 10.139.64.4, executor 0, partition 4, PROCESS_LOCAL, 5508 bytes) 20/03/20 17:18:11 INFO TaskSetManager: Lost task 6.3 in stage 0.0 (TID 29) on 10.139.64.4, executor 0: org.apache.spark.SparkException (Failed to execute user defined function(anonfun$1: (double, double) => string)) [duplicate 21] 20/03/20 17:18:11 ERROR TaskSetManager: Task 6 in stage 0.0 failed 4 times; aborting job 20/03/20 17:18:11 INFO TaskSchedulerImpl: Cancelling stage 0 20/03/20 17:18:11 INFO TaskSchedulerImpl: Killing all running tasks in stage 0: Stage cancelled 20/03/20 17:18:11 INFO TaskSchedulerImpl: Stage 0 was cancelled 20/03/20 17:18:11 INFO DAGScheduler: ShuffleMapStage 0 (start at TaxiCabReader.scala:202) failed in 6.969 s due to Job aborted due to stage failure: Task 6 in stage 0.0 failed 4 times, most recent failure: Lost task 6.3 in stage 0.0 (TID 29, 10.139.64.4, executor 0): org.apache.spark.SparkException: Failed to execute user defined function(anonfun$1: (double, double) => string) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage2.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:640) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) at org.apache.spark.scheduler.Task.doRunTask(Task.scala:140) at org.apache.spark.scheduler.Task.run(Task.scala:113) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$13.apply(Executor.scala:537) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1541) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:543) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.NullPointerException at com.microsoft.pnp.GeoFinder.getNeighborhood(GeoFinder.java:52) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:76) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:75) ... 15 more Driver stacktrace: 20/03/20 17:18:11 INFO TaskSchedulerImpl: Cancelling stage 1 20/03/20 17:18:11 INFO TaskSchedulerImpl: Killing all running tasks in stage 1: Stage cancelled 20/03/20 17:18:11 INFO TaskSchedulerImpl: Stage 1 was cancelled 20/03/20 17:18:11 INFO DAGScheduler: ShuffleMapStage 1 (start at TaxiCabReader.scala:202) failed in 6.878 s due to Job aborted due to stage failure: Task 6 in stage 0.0 failed 4 times, most recent failure: Lost task 6.3 in stage 0.0 (TID 29, 10.139.64.4, executor 0): org.apache.spark.SparkException: Failed to execute user defined function(anonfun$1: (double, double) => string) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage2.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:640) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) at org.apache.spark.scheduler.Task.doRunTask(Task.scala:140) at org.apache.spark.scheduler.Task.run(Task.scala:113) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$13.apply(Executor.scala:537) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1541) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:543) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.NullPointerException at com.microsoft.pnp.GeoFinder.getNeighborhood(GeoFinder.java:52) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:76) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:75) ... 15 more Driver stacktrace: 20/03/20 17:18:11 WARN TaskSetManager: Lost task 7.3 in stage 0.0 (TID 28, 10.139.64.6, executor 1): TaskKilled (Stage cancelled) 20/03/20 17:18:11 INFO DAGScheduler: Job 0 failed: start at TaxiCabReader.scala:202, took 7.058312 s 20/03/20 17:18:11 ERROR WriteToDataSourceV2Exec: Data source writer org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@24e2d1bf is aborting. 20/03/20 17:18:11 ERROR WriteToDataSourceV2Exec: Data source writer org.apache.spark.sql.execution.streaming.sources.MicroBatchWriter@24e2d1bf aborted. 20/03/20 17:18:11 WARN TaskSetManager: Lost task 5.3 in stage 0.0 (TID 27, 10.139.64.4, executor 0): TaskKilled (Stage cancelled) 20/03/20 17:18:11 WARN TaskSetManager: Lost task 0.0 in stage 1.0 (TID 13, 10.139.64.4, executor 0): TaskKilled (Stage cancelled) 20/03/20 17:18:11 INFO TaskSchedulerImpl: Removed TaskSet 1.0, whose tasks have all completed, from pool 8809067319516496537 20/03/20 17:18:11 WARN TaskSetManager: Lost task 4.3 in stage 0.0 (TID 31, 10.139.64.4, executor 0): TaskKilled (Stage cancelled) 20/03/20 17:18:11 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool 8809067319516496537 20/03/20 17:18:11 WARN TaskSetManager: Lost task 0.3 in stage 0.0 (TID 30, 10.139.64.4, executor 0): TaskKilled (Stage cancelled) 20/03/20 17:18:11 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool 8809067319516496537 20/03/20 17:18:11 ERROR MicroBatchExecution: Query maxAvgFarePerNeighborhood_cassandra_insert [id = 24c3bfd5-3204-406d-99ec-4ae30abce0ea, runId = e10d28b9-3587-4a97-b7bd-e69ffa34fc12] terminated with error org.apache.spark.SparkException: Writing job aborted. at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:92) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:147) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:135) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$5.apply(SparkPlan.scala:188) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:184) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:135) at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:77) at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:86) at org.apache.spark.sql.execution.ResultCacheManager.getOrComputeResult(ResultCacheManager.scala:508) at org.apache.spark.sql.execution.ResultCacheManager.getOrComputeResult(ResultCacheManager.scala:480) at org.apache.spark.sql.execution.SparkPlan.executeCollectResult(SparkPlan.scala:325) at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectResult(Dataset.scala:2890) at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3508) at org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2857) at org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2857) at org.apache.spark.sql.Dataset$$anonfun$54.apply(Dataset.scala:3492) at org.apache.spark.sql.Dataset$$anonfun$54.apply(Dataset.scala:3487) at org.apache.spark.sql.execution.SQLExecution$$anonfun$withCustomExecutionEnv$1.apply(SQLExecution.scala:113) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:242) at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:99) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:172) at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withAction(Dataset.scala:3487) at org.apache.spark.sql.Dataset.collect(Dataset.scala:2857) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$5$$anonfun$apply$17.apply(MicroBatchExecution.scala:572) at org.apache.spark.sql.execution.SQLExecution$$anonfun$withCustomExecutionEnv$1.apply(SQLExecution.scala:113) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:242) at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:99) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:172) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$5.apply(MicroBatchExecution.scala:567) at org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:263) at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:61) at org.apache.spark.sql.execution.streaming.MicroBatchExecution.org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch(MicroBatchExecution.scala:566) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply$mcV$sp(MicroBatchExecution.scala:208) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:176) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:176) at org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:263) at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:61) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1.apply$mcZ$sp(MicroBatchExecution.scala:176) at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56) at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:170) at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:296) at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:208) Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 6 in stage 0.0 failed 4 times, most recent failure: Lost task 6.3 in stage 0.0 (TID 29, 10.139.64.4, executor 0): org.apache.spark.SparkException: Failed to execute user defined function(anonfun$1: (double, double) => string) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage2.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:640) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) at org.apache.spark.scheduler.Task.doRunTask(Task.scala:140) at org.apache.spark.scheduler.Task.run(Task.scala:113) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$13.apply(Executor.scala:537) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1541) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:543) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.NullPointerException at com.microsoft.pnp.GeoFinder.getNeighborhood(GeoFinder.java:52) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:76) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:75) ... 15 more Driver stacktrace: at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:2362) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:2350) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:2349) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2349) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:1102) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:1102) at scala.Option.foreach(Option.scala:257) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1102) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2582) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2529) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2517) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:897) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2280) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:64) ... 42 more Caused by: org.apache.spark.SparkException: Failed to execute user defined function(anonfun$1: (double, double) => string) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage2.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:640) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) at org.apache.spark.scheduler.Task.doRunTask(Task.scala:140) at org.apache.spark.scheduler.Task.run(Task.scala:113) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$13.apply(Executor.scala:537) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1541) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:543) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.NullPointerException at com.microsoft.pnp.GeoFinder.getNeighborhood(GeoFinder.java:52) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:76) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:75) ... 15 more 20/03/20 17:18:11 INFO EventHubsClient: close: Closing EventHubsClient. 20/03/20 17:18:11 INFO ClientConnectionPool: Client returned. EventHub name: taxi-ride-eh. Total clients: 1. Available clients: 1 20/03/20 17:18:11 INFO EventHubsClient: close: Closing EventHubsClient. 20/03/20 17:18:11 INFO ClientConnectionPool: Client returned. EventHub name: taxi-fare-eh. Total clients: 1. Available clients: 1 20/03/20 17:18:11 INFO DatabricksStreamingQueryListener: Query termination received for [id=24c3bfd5-3204-406d-99ec-4ae30abce0ea, runId=e10d28b9-3587-4a97-b7bd-e69ffa34fc12] 20/03/20 17:18:11 ERROR ScalaDriverLocal: User Code Stack Trace: org.apache.spark.sql.streaming.StreamingQueryException: Writing job aborted. === Streaming Query === Identifier: maxAvgFarePerNeighborhood_cassandra_insert [id = 24c3bfd5-3204-406d-99ec-4ae30abce0ea, runId = e10d28b9-3587-4a97-b7bd-e69ffa34fc12] Current Committed Offsets: {} Current Available Offsets: {org.apache.spark.sql.eventhubs.EventHubsSource@462a8889: {"taxi-ride-eh":{"2":1004,"5":981,"4":1013,"7":988,"1":1035,"3":1002,"6":982,"0":992}},org.apache.spark.sql.eventhubs.EventHubsSource@59a6f691: {"taxi-fare-eh":{"2":1004,"5":981,"4":1013,"7":988,"1":1035,"3":1002,"6":982,"0":992}}} Current State: ACTIVE Thread State: RUNNABLE Logical Plan: Project [window#313-T180000ms.start AS start#350, window#313-T180000ms.end AS end#351, pickupNeighborhood#186, rideCount#339L, totalFareAmount#341, totalTipAmount#343] +- Aggregate [window#344-T180000ms, pickupNeighborhood#186], [window#344-T180000ms AS window#313-T180000ms, pickupNeighborhood#186, count(1) AS rideCount#339L, sum(fareAmount#246) AS totalFareAmount#341, sum(tipAmount#249) AS totalTipAmount#343] +- Filter isnotnull(pickupTime#201-T180000ms) +- Project [named_struct(start, precisetimestampconversion(((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) as double) = (cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) THEN (CEIL((cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) END + cast(0 as bigint)) - cast(1 as bigint)) * 60000000) + 0), LongType, TimestampType), end, precisetimestampconversion((((((CASE WHEN (cast(CEIL((cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) as double) = (cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) THEN (CEIL((cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) + cast(1 as bigint)) ELSE CEIL((cast((precisetimestampconversion(pickupTime#201-T180000ms, TimestampType, LongType) - 0) as double) / cast(60000000 as double))) END + cast(0 as bigint)) - cast(1 as bigint)) * 60000000) + 0) + 60000000), LongType, TimestampType)) AS window#344-T180000ms, medallion#198L, hackLicense#199L, vendorId#200, pickupTime#201-T180000ms, rateCode#188, storeAndForwardFlag#189, dropoffTime#190, passengerCount#191, tripTimeInSeconds#192, tripDistanceInMiles#193, pickupLon#194, pickupLat#195, dropoffLon#196, dropoffLat#197, paymentType#245, fareAmount#246, surcharge#247, mtaTax#248, tipAmount#249, tollsAmount#250, totalAmount#251, pickupNeighborhood#186, dropoffNeighborhood#187] +- Project [medallion#198L, hackLicense#199L, vendorId#200, pickupTime#201-T180000ms, rateCode#188, storeAndForwardFlag#189, dropoffTime#190, passengerCount#191, tripTimeInSeconds#192, tripDistanceInMiles#193, pickupLon#194, pickupLat#195, dropoffLon#196, dropoffLat#197, paymentType#245, fareAmount#246, surcharge#247, mtaTax#248, tipAmount#249, tollsAmount#250, totalAmount#251, pickupNeighborhood#186, dropoffNeighborhood#187] +- Project [medallion#198L, hackLicense#199L, vendorId#200, pickupTime#201-T180000ms, rateCode#188, storeAndForwardFlag#189, dropoffTime#190, passengerCount#191, tripTimeInSeconds#192, tripDistanceInMiles#193, pickupLon#194, pickupLat#195, dropoffLon#196, dropoffLat#197, errorMessage#202, messageData#203, pickupNeighborhood#186, dropoffNeighborhood#187, pickupTimeString#244, paymentType#245, fareAmount#246, surcharge#247, mtaTax#248, tipAmount#249, ... 2 more fields] +- Join Inner, ((((medallion#198L = medallion#241L) && (hackLicense#199L = hackLicense#242L)) && (vendorId#200 = vendorId#243)) && (pickupTime#201-T180000ms = pickupTime#235-T180000ms)) :- EventTimeWatermark pickupTime#201: timestamp, interval 3 minutes : +- Project [ride#179.rateCode AS rateCode#188, ride#179.storeAndForwardFlag AS storeAndForwardFlag#189, ride#179.dropoffTime AS dropoffTime#190, ride#179.passengerCount AS passengerCount#191, ride#179.tripTimeInSeconds AS tripTimeInSeconds#192, ride#179.tripDistanceInMiles AS tripDistanceInMiles#193, ride#179.pickupLon AS pickupLon#194, ride#179.pickupLat AS pickupLat#195, ride#179.dropoffLon AS dropoffLon#196, ride#179.dropoffLat AS dropoffLat#197, ride#179.medallion AS medallion#198L, ride#179.hackLicense AS hackLicense#199L, ride#179.vendorId AS vendorId#200, ride#179.pickupTime AS pickupTime#201, ride#179.errorMessage AS errorMessage#202, ride#179.messageData AS messageData#203, if ((isnull(ride#179.pickupLon) || isnull(ride#179.pickupLat))) null else UDF:neighborhoodFinder(ride#179.pickupLon, ride#179.pickupLat) AS pickupNeighborhood#186, if ((isnull(ride#179.dropoffLon) || isnull(ride#179.dropoffLat))) null else UDF:neighborhoodFinder(ride#179.dropoffLon, ride#179.dropoffLat) AS dropoffNeighborhood#187] : +- TypedFilter , interface org.apache.spark.sql.Row, [StructField(messageData,StringType,true), StructField(ride,StructType(StructField(rateCode,IntegerType,true), StructField(storeAndForwardFlag,StringType,true), StructField(dropoffTime,TimestampType,true), StructField(passengerCount,IntegerType,true), StructField(tripTimeInSeconds,DoubleType,true), StructField(tripDistanceInMiles,DoubleType,true), StructField(pickupLon,DoubleType,true), StructField(pickupLat,DoubleType,true), StructField(dropoffLon,DoubleType,true), StructField(dropoffLat,DoubleType,true), StructField(medallion,LongType,true), StructField(hackLicense,LongType,true), StructField(vendorId,StringType,true), StructField(pickupTime,TimestampType,true), StructField(errorMessage,StringType,true), StructField(messageData,StringType,true)),true), StructField(errorMessage,StringType,true)], createexternalrow(messageData#178.toString, if (isnull(ride#179)) null else createexternalrow(if (ride#179.isNullAt) null else ride#179.rateCode, if (ride#179.isNullAt) null else ride#179.storeAndForwardFlag.toString, if (ride#179.isNullAt) null else staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, ride#179.dropoffTime, true, false), if (ride#179.isNullAt) null else ride#179.passengerCount, if (ride#179.isNullAt) null else ride#179.tripTimeInSeconds, if (ride#179.isNullAt) null else ride#179.tripDistanceInMiles, if (ride#179.isNullAt) null else ride#179.pickupLon, if (ride#179.isNullAt) null else ride#179.pickupLat, if (ride#179.isNullAt) null else ride#179.dropoffLon, if (ride#179.isNullAt) null else ride#179.dropoffLat, if (ride#179.isNullAt) null else ride#179.medallion, if (ride#179.isNullAt) null else ride#179.hackLicense, if (ride#179.isNullAt) null else ride#179.vendorId.toString, if (ride#179.isNullAt) null else staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, ride#179.pickupTime, true, false), if (ride#179.isNullAt) null else ride#179.errorMessage.toString, if (ride#179.isNullAt) null else ride#179.messageData.toString, StructField(rateCode,IntegerType,true), StructField(storeAndForwardFlag,StringType,true), StructField(dropoffTime,TimestampType,true), StructField(passengerCount,IntegerType,true), StructField(tripTimeInSeconds,DoubleType,true), StructField(tripDistanceInMiles,DoubleType,true), StructField(pickupLon,DoubleType,true), StructField(pickupLat,DoubleType,true), ... 8 more fields), errorMessage#182.toString, StructField(messageData,StringType,true), StructField(ride,StructType(StructField(rateCode,IntegerType,true), StructField(storeAndForwardFlag,StringType,true), StructField(dropoffTime,TimestampType,true), StructField(passengerCount,IntegerType,true), StructField(tripTimeInSeconds,DoubleType,true), StructField(tripDistanceInMiles,DoubleType,true), StructField(pickupLon,DoubleType,true), StructField(pickupLat,DoubleType,true), StructField(dropoffLon,DoubleType,true), StructField(dropoffLat,DoubleType,true), StructField(medallion,LongType,true), StructField(hackLicense,LongType,true), StructField(vendorId,StringType,true), StructField(pickupTime,TimestampType,true), StructField(errorMessage,StringType,true), StructField(messageData,StringType,true)),true), StructField(errorMessage,StringType,true)) : +- Project [messageData#178, ride#179, CASE WHEN isnull(ride#179) THEN Error decoding JSON ELSE cast(null as string) END AS errorMessage#182] : +- Project [cast(body#133 as string) AS messageData#178, jsontostructs(StructField(rateCode,IntegerType,true), StructField(storeAndForwardFlag,StringType,true), StructField(dropoffTime,TimestampType,true), StructField(passengerCount,IntegerType,true), StructField(tripTimeInSeconds,DoubleType,true), StructField(tripDistanceInMiles,DoubleType,true), StructField(pickupLon,DoubleType,true), StructField(pickupLat,DoubleType,true), StructField(dropoffLon,DoubleType,true), StructField(dropoffLat,DoubleType,true), StructField(medallion,LongType,true), StructField(hackLicense,LongType,true), StructField(vendorId,StringType,true), StructField(pickupTime,TimestampType,true), StructField(errorMessage,StringType,true), StructField(messageData,StringType,true), cast(body#133 as string), Some(Etc/UTC)) AS ride#179] : +- StreamingExecutionRelation org.apache.spark.sql.eventhubs.EventHubsSource@462a8889, [body#133, partition#134, offset#135, sequenceNumber#136L, enqueuedTime#137, publisher#138, partitionKey#139, properties#140, systemProperties#141] +- EventTimeWatermark pickupTime#235: timestamp, interval 3 minutes +- Project [fare#227.medallion AS medallion#241L, fare#227.hackLicense AS hackLicense#242L, fare#227.vendorId AS vendorId#243, fare#227.pickupTimeString AS pickupTimeString#244, fare#227.paymentType AS paymentType#245, fare#227.fareAmount AS fareAmount#246, fare#227.surcharge AS surcharge#247, fare#227.mtaTax AS mtaTax#248, fare#227.tipAmount AS tipAmount#249, fare#227.tollsAmount AS tollsAmount#250, fare#227.totalAmount AS totalAmount#251, pickupTime#235] +- TypedFilter , interface org.apache.spark.sql.Row, [StructField(messageData,StringType,true), StructField(fare,StructType(StructField(medallion,LongType,true), StructField(hackLicense,LongType,true), StructField(vendorId,StringType,true), StructField(pickupTimeString,StringType,true), StructField(paymentType,StringType,true), StructField(fareAmount,DoubleType,true), StructField(surcharge,DoubleType,true), StructField(mtaTax,DoubleType,true), StructField(tipAmount,DoubleType,true), StructField(tollsAmount,DoubleType,true), StructField(totalAmount,DoubleType,true)),true), StructField(errorMessage,StringType,true), StructField(pickupTime,TimestampType,true)], createexternalrow(messageData#226.toString, if (isnull(fare#227)) null else createexternalrow(if (fare#227.isNullAt) null else fare#227.medallion, if (fare#227.isNullAt) null else fare#227.hackLicense, if (fare#227.isNullAt) null else fare#227.vendorId.toString, if (fare#227.isNullAt) null else fare#227.pickupTimeString.toString, if (fare#227.isNullAt) null else fare#227.paymentType.toString, if (fare#227.isNullAt) null else fare#227.fareAmount, if (fare#227.isNullAt) null else fare#227.surcharge, if (fare#227.isNullAt) null else fare#227.mtaTax, if (fare#227.isNullAt) null else fare#227.tipAmount, if (fare#227.isNullAt) null else fare#227.tollsAmount, if (fare#227.isNullAt) null else fare#227.totalAmount, StructField(medallion,LongType,true), StructField(hackLicense,LongType,true), StructField(vendorId,StringType,true), StructField(pickupTimeString,StringType,true), StructField(paymentType,StringType,true), StructField(fareAmount,DoubleType,true), StructField(surcharge,DoubleType,true), StructField(mtaTax,DoubleType,true), StructField(tipAmount,DoubleType,true), StructField(tollsAmount,DoubleType,true), StructField(totalAmount,DoubleType,true)), errorMessage#230.toString, staticinvoke(class org.apache.spark.sql.catalyst.util.DateTimeUtils$, ObjectType(class java.sql.Timestamp), toJavaTimestamp, pickupTime#235, true, false), StructField(messageData,StringType,true), StructField(fare,StructType(StructField(medallion,LongType,true), StructField(hackLicense,LongType,true), StructField(vendorId,StringType,true), StructField(pickupTimeString,StringType,true), StructField(paymentType,StringType,true), StructField(fareAmount,DoubleType,true), StructField(surcharge,DoubleType,true), StructField(mtaTax,DoubleType,true), StructField(tipAmount,DoubleType,true), StructField(tollsAmount,DoubleType,true), StructField(totalAmount,DoubleType,true)),true), StructField(errorMessage,StringType,true), StructField(pickupTime,TimestampType,true)) +- Project [messageData#226, fare#227, errorMessage#230, CASE WHEN isnull(fare#227) THEN cast(null as timestamp) ELSE to_timestamp('fare.pickupTimeString, Some(yyyy-MM-dd HH:mm:ss)) END AS pickupTime#235] +- Project [messageData#226, fare#227, CASE WHEN isnull(fare#227) THEN Error decoding CSV WHEN isnull(to_timestamp('fare.pickupTimeString, Some(yyyy-MM-dd HH:mm:ss))) THEN Error parsing pickupTime ELSE cast(null as string) END AS errorMessage#230] +- Project [cast(body#160 as string) AS messageData#226, from_csv(StructField(medallion,LongType,true), StructField(hackLicense,LongType,true), StructField(vendorId,StringType,true), StructField(pickupTimeString,StringType,true), StructField(paymentType,StringType,true), StructField(fareAmount,DoubleType,true), StructField(surcharge,DoubleType,true), StructField(mtaTax,DoubleType,true), StructField(tipAmount,DoubleType,true), StructField(tollsAmount,DoubleType,true), StructField(totalAmount,DoubleType,true), (header,true), (multiLine,true), cast(body#160 as string), Some(Etc/UTC)) AS fare#227] +- StreamingExecutionRelation org.apache.spark.sql.eventhubs.EventHubsSource@59a6f691, [body#160, partition#161, offset#162, sequenceNumber#163L, enqueuedTime#164, publisher#165, partitionKey#166, properties#167, systemProperties#168] at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:312) at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:208) Caused by: org.apache.spark.SparkException: Writing job aborted. at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:92) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:147) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:135) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$5.apply(SparkPlan.scala:188) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:184) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:135) at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:77) at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:86) at org.apache.spark.sql.execution.ResultCacheManager.getOrComputeResult(ResultCacheManager.scala:508) at org.apache.spark.sql.execution.ResultCacheManager.getOrComputeResult(ResultCacheManager.scala:480) at org.apache.spark.sql.execution.SparkPlan.executeCollectResult(SparkPlan.scala:325) at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectResult(Dataset.scala:2890) at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3508) at org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2857) at org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2857) at org.apache.spark.sql.Dataset$$anonfun$54.apply(Dataset.scala:3492) at org.apache.spark.sql.Dataset$$anonfun$54.apply(Dataset.scala:3487) at org.apache.spark.sql.execution.SQLExecution$$anonfun$withCustomExecutionEnv$1.apply(SQLExecution.scala:113) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:242) at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:99) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:172) at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$withAction(Dataset.scala:3487) at org.apache.spark.sql.Dataset.collect(Dataset.scala:2857) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$5$$anonfun$apply$17.apply(MicroBatchExecution.scala:572) at org.apache.spark.sql.execution.SQLExecution$$anonfun$withCustomExecutionEnv$1.apply(SQLExecution.scala:113) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:242) at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:99) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:172) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch$5.apply(MicroBatchExecution.scala:567) at org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:263) at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:61) at org.apache.spark.sql.execution.streaming.MicroBatchExecution.org$apache$spark$sql$execution$streaming$MicroBatchExecution$$runBatch(MicroBatchExecution.scala:566) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply$mcV$sp(MicroBatchExecution.scala:208) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:176) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1$$anonfun$apply$mcZ$sp$1.apply(MicroBatchExecution.scala:176) at org.apache.spark.sql.execution.streaming.ProgressReporter$class.reportTimeTaken(ProgressReporter.scala:263) at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:61) at org.apache.spark.sql.execution.streaming.MicroBatchExecution$$anonfun$runActivatedStream$1.apply$mcZ$sp(MicroBatchExecution.scala:176) at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:56) at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:170) at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:296) ... 1 more Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 6 in stage 0.0 failed 4 times, most recent failure: Lost task 6.3 in stage 0.0 (TID 29, 10.139.64.4, executor 0): org.apache.spark.SparkException: Failed to execute user defined function(anonfun$1: (double, double) => string) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage2.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:640) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) at org.apache.spark.scheduler.Task.doRunTask(Task.scala:140) at org.apache.spark.scheduler.Task.run(Task.scala:113) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$13.apply(Executor.scala:537) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1541) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:543) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.NullPointerException at com.microsoft.pnp.GeoFinder.getNeighborhood(GeoFinder.java:52) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:76) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:75) ... 15 more Driver stacktrace: at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:2362) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:2350) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:2349) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2349) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:1102) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:1102) at scala.Option.foreach(Option.scala:257) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1102) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2582) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2529) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2517) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49) at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:897) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2280) at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.doExecute(WriteToDataSourceV2Exec.scala:64) ... 42 more Caused by: org.apache.spark.SparkException: Failed to execute user defined function(anonfun$1: (double, double) => string) at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage2.processNext(Unknown Source) at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43) at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$13$$anon$1.hasNext(WholeStageCodegenExec.scala:640) at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:409) at org.apache.spark.shuffle.sort.BypassMergeSortShuffleWriter.write(BypassMergeSortShuffleWriter.java:125) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:55) at org.apache.spark.scheduler.Task.doRunTask(Task.scala:140) at org.apache.spark.scheduler.Task.run(Task.scala:113) at org.apache.spark.executor.Executor$TaskRunner$$anonfun$13.apply(Executor.scala:537) at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1541) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:543) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.lang.NullPointerException at com.microsoft.pnp.GeoFinder.getNeighborhood(GeoFinder.java:52) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:76) at com.microsoft.pnp.TaxiCabReader$$anonfun$1.apply(TaxiCabReader.scala:75) ... 15 more 20/03/20 17:18:11 INFO ProgressReporter$: Removed result fetcher for 8809067319516496537_6674896074123844973_job-1-run-6-action-1 20/03/20 17:18:12 INFO SQLAppStatusListener: Execution ID: 62 Total Executor Run Time: 40274 20/03/20 17:18:12 INFO SQLAppStatusListener: Execution ID: 61 Total Executor Run Time: 0 20/03/20 17:18:12 INFO DriverCorral$: Cleaning the wrapper ReplId-7a401-80dfe-48829-9 (currently in status Idle(ReplId-7a401-80dfe-48829-9)) 20/03/20 17:18:12 INFO DriverCorral$: sending shutdown signal for REPL ReplId-7a401-80dfe-48829-9 20/03/20 17:18:12 INFO DriverCorral$: sending the interrupt signal for REPL ReplId-7a401-80dfe-48829-9 20/03/20 17:18:12 INFO DriverCorral$: waiting for localThread to stop for REPL ReplId-7a401-80dfe-48829-9 20/03/20 17:18:12 INFO DriverCorral$: ReplId-7a401-80dfe-48829-9 successfully discarded 20/03/20 17:20:52 INFO AzureNativeFileSystemStore: WASB request HEAD [https://dbstorageixbipxu7r3yby.blob.core.windows.net/root/1207841053205226-RenamePending.json?sig=REDACTED_AZURE_SAS_SIGNATURE&api-version=2017-07-29&st=2020-03-20T16%3A35%3A58Z&se=2020-03-21T12%3A35%3A58Z&sv=2017-07-29&spr=https&sp=racwdl&sr=c] failed; status: 404, msg: The specified blob does not exist., request date: Fri, 20 Mar 2020 17:20:51 GMT, start date: Fri Mar 20 17:20:52 UTC 2020, stop date: Fri Mar 20 17:20:52 UTC 2020, service request id: 01a7d121-d01e-0011-0adb-fe67d5000000, etag: null, md5: null, target: PRIMARY, headers: {Transfer-Encoding=[chunked], null=[HTTP/1.1 404 The specified blob does not exist.], x-ms-version=[2017-07-29], Server=[Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0], x-ms-error-code=[BlobNotFound], x-ms-request-id=[01a7d121-d01e-0011-0adb-fe67d5000000], Date=[Fri, 20 Mar 2020 17:20:51 GMT]} 20/03/20 17:20:52 INFO DriverCorral: DBFS health check ok 20/03/20 17:20:53 INFO HikariDataSource: metastore-monitor - Starting... 20/03/20 17:20:53 INFO HikariDataSource: metastore-monitor - Start completed. 20/03/20 17:20:53 INFO HikariDataSource: metastore-monitor - Shutdown initiated... 20/03/20 17:20:53 INFO HikariDataSource: metastore-monitor - Shutdown completed. 20/03/20 17:20:53 INFO MetastoreMonitor: Metastore healthcheck successful (connection duration = 128 milliseconds) 20/03/20 17:21:04 INFO HiveMetaStore: 0: get_database: default 20/03/20 17:21:04 INFO audit: ugi=root ip=unknown-ip-addr cmd=get_database: default 20/03/20 17:21:04 INFO DriverCorral: Metastore health check ok 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:sender], ErrorCondition[amqp:connection:forced, The connection was inactive for more than the allowed 300000 milliseconds and is closed by container 'LinkTracker'. TrackingId:88dae1749a524f1b8491214d84b849be_G4, SystemTracker:gateway5, Timestamp:2020-03-20T17:23:01] 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:receiver], ErrorCondition[amqp:connection:forced, The connection was inactive for more than the allowed 300000 milliseconds and is closed by container 'LinkTracker'. TrackingId:88dae1749a524f1b8491214d84b849be_G4, SystemTracker:gateway5, Timestamp:2020-03-20T17:23:01] 20/03/20 17:23:01 INFO SessionHandler: entityName[mgmt-session], condition[Error{condition=null, description='null', info=null}] 20/03/20 17:23:01 INFO ConnectionHandler: onConnectionRemoteClose: hostname[taxieventhubs3.servicebus.windows.net:5671], errorCondition[amqp:connection:forced, The connection was inactive for more than the allowed 300000 milliseconds and is closed by container 'LinkTracker'. TrackingId:88dae1749a524f1b8491214d84b849be_G4, SystemTracker:gateway5, Timestamp:2020-03-20T17:23:01] 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:sender] 20/03/20 17:23:01 INFO SessionHandler: entityName[mgmt-session], condition[Error{condition=null, description='null', info=null}] 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:receiver] 20/03/20 17:23:01 INFO ConnectionHandler: onConnectionLocalClose: hostname[taxieventhubs3.servicebus.windows.net:5671], errorCondition[null, null] 20/03/20 17:23:01 INFO ConnectionHandler: onConnectionUnbound: hostname[taxieventhubs3.servicebus.windows.net:5671], state[CLOSED], remoteState[CLOSED] 20/03/20 17:23:01 INFO SessionHandler: entityName[mgmt-session] 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:sender], ErrorCondition[amqp:connection:forced, The connection was inactive for more than the allowed 300000 milliseconds and is closed by container 'LinkTracker'. TrackingId:d3774ea1a5fa482a98d62b17ff9df38a_G25, SystemTracker:gateway5, Timestamp:2020-03-20T17:23:01] 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:receiver], ErrorCondition[amqp:connection:forced, The connection was inactive for more than the allowed 300000 milliseconds and is closed by container 'LinkTracker'. TrackingId:d3774ea1a5fa482a98d62b17ff9df38a_G25, SystemTracker:gateway5, Timestamp:2020-03-20T17:23:01] 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:sender] 20/03/20 17:23:01 INFO SessionHandler: entityName[mgmt-session], condition[Error{condition=null, description='null', info=null}] 20/03/20 17:23:01 INFO BaseLinkHandler: linkName[mgmt:receiver] 20/03/20 17:23:01 INFO SessionHandler: entityName[mgmt-session], condition[Error{condition=null, description='null', info=null}] 20/03/20 17:23:01 INFO ConnectionHandler: onConnectionRemoteClose: hostname[taxieventhubs3.servicebus.windows.net:5671], errorCondition[amqp:connection:forced, The connection was inactive for more than the allowed 300000 milliseconds and is closed by container 'LinkTracker'. TrackingId:d3774ea1a5fa482a98d62b17ff9df38a_G25, SystemTracker:gateway5, Timestamp:2020-03-20T17:23:01] 20/03/20 17:23:01 INFO ConnectionHandler: onTransportClosed: hostname[taxieventhubs3.servicebus.windows.net:5671], error[n/a] 20/03/20 17:23:01 INFO ConnectionHandler: onConnectionLocalClose: hostname[taxieventhubs3.servicebus.windows.net:5671], errorCondition[null, null] 20/03/20 17:23:01 INFO ConnectionHandler: onConnectionUnbound: hostname[taxieventhubs3.servicebus.windows.net:5671], state[CLOSED], remoteState[CLOSED] 20/03/20 17:23:01 INFO SessionHandler: entityName[mgmt-session]