From a3cc43e1d8a5aa082902bf2792df1e1829c93540 Mon Sep 17 00:00:00 2001 From: Manuel Kiessling Date: Fri, 3 Mar 2017 07:36:37 +0100 Subject: [PATCH] #50: More robust journey duration calculation. By accepting 'yyyy-MM-dd'T'HH:mm:ss.SSSxxx' and 'yyyy-MM-dd'T'HH:mm:ssxxx' as possible startedDateTime values in HARs. --- .../journeymonitor/analyze/spark/SparkApp.scala | 17 ++++++++++++++--- .../analyze/spark/SparkAppSpec.scala | 5 +++-- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/spark/src/main/scala/com/journeymonitor/analyze/spark/SparkApp.scala b/spark/src/main/scala/com/journeymonitor/analyze/spark/SparkApp.scala index f817d29..5370fe3 100644 --- a/spark/src/main/scala/com/journeymonitor/analyze/spark/SparkApp.scala +++ b/spark/src/main/scala/com/journeymonitor/analyze/spark/SparkApp.scala @@ -72,17 +72,28 @@ object HarAnalyzer { private def calculateTotalRequestTime(entries: List[JsonAST.JValue]): Int = { implicit val formats = org.json4s.DefaultFormats - val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSxxx") + val formatterWithMilliseconds = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSxxx") + val formatterWithoutMilliseconds = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssxxx") val starttimesEpochMilli = for { entry <- entries } yield { val startedDateTime = (entry \ "startedDateTime").extract[String] - java.time.ZonedDateTime.parse(startedDateTime, formatter).toInstant.toEpochMilli + try { + java.time.ZonedDateTime.parse(startedDateTime, formatterWithMilliseconds).toInstant.toEpochMilli + } catch { + case e: java.time.format.DateTimeParseException => + java.time.ZonedDateTime.parse(startedDateTime, formatterWithoutMilliseconds).toInstant.toEpochMilli + } } val endtimesEpochMilli = for { entry <- entries } yield { val startedDateTime = (entry \ "startedDateTime").extract[String] val time = (entry \ "time").extract[Int] - java.time.ZonedDateTime.parse(startedDateTime, formatter).toInstant.toEpochMilli + time + try { + java.time.ZonedDateTime.parse(startedDateTime, formatterWithMilliseconds).toInstant.toEpochMilli + time + } catch { + case e: java.time.format.DateTimeParseException => + java.time.ZonedDateTime.parse(startedDateTime, formatterWithoutMilliseconds).toInstant.toEpochMilli + time + } } (endtimesEpochMilli.max - starttimesEpochMilli.min).toInt diff --git a/spark/src/test/scala/com/journeymonitor/analyze/spark/SparkAppSpec.scala b/spark/src/test/scala/com/journeymonitor/analyze/spark/SparkAppSpec.scala index 06dbc56..0bc6d63 100644 --- a/spark/src/test/scala/com/journeymonitor/analyze/spark/SparkAppSpec.scala +++ b/spark/src/test/scala/com/journeymonitor/analyze/spark/SparkAppSpec.scala @@ -17,6 +17,7 @@ object FixtureGenerator { testcaseId = "testcaseId1", testresultId = "testresultId1", datetimeRun = datetimeRun1, + // Note how we expect startedDateTime to sometimes lack milliseconds har = parse( """ |{ @@ -27,7 +28,7 @@ object FixtureGenerator { | "status": 200 | }, | "time": 10, - | "startedDateTime": "2017-03-02T06:10:43.436+01:00" + | "startedDateTime": "2017-03-02T06:10:43+01:00" | }, | { | "response": { @@ -175,7 +176,7 @@ class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers { statistics(0).testresultDatetimeRun.toString.substring(24) should be("2015") statistics(0).numberOfRequestsWithStatus200 should be(1) statistics(0).numberOfRequestsWithStatus400 should be(1) - statistics(0).totalRequestTime should be(2015) + statistics(0).totalRequestTime should be(2451) statistics(1).testcaseId should be("testcaseId1") statistics(1).dayBucket should be("2015-11-18")