Skip to content

Commit

Permalink
#50: Corrected calculation of journey duration.
Browse files Browse the repository at this point in the history
Instead of incorrectly summing up all request durations, we now calculate the difference between the start of the earliest starting request and the end of the latest ending request.
  • Loading branch information
manuelkiessling committed Mar 2, 2017
1 parent a7fb8ca commit d1ee702
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 17 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.journeymonitor.analyze.spark

import java.text.SimpleDateFormat
import java.time.format.DateTimeFormatter
import java.util.Calendar

import com.datastax.spark.connector._
Expand Down Expand Up @@ -70,11 +71,21 @@ object HarAnalyzer {

private def calculateTotalRequestTime(entries: List[JsonAST.JValue]): Int = {
implicit val formats = org.json4s.DefaultFormats
val times = for { entry <- entries } yield (entry \ "time").extract[Int]
if (times.isEmpty) 0 else times.reduce(_ + _)
// This is a "normal" Scala reduce, not an RDD reduce.
// Because this method is called from within testresultsRDD.map, the reduce does not happen in the driver,
// but in the executors

val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSxxx")

val starttimesEpochMilli = for { entry <- entries } yield {
val startedDateTime = (entry \ "startedDateTime").extract[String]
java.time.ZonedDateTime.parse(startedDateTime, formatter).toInstant.toEpochMilli
}

val endtimesEpochMilli = for { entry <- entries } yield {
val startedDateTime = (entry \ "startedDateTime").extract[String]
val time = (entry \ "time").extract[Int]
java.time.ZonedDateTime.parse(startedDateTime, formatter).toInstant.toEpochMilli + time
}

(endtimesEpochMilli.max - starttimesEpochMilli.min).toInt
}

def calculateRequestStatistics(testresultsRDD: RDD[Testresult]): RDD[Statistics] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,15 @@ object FixtureGenerator {
| "response": {
| "status": 200
| },
| "time": 10
| "time": 10,
| "startedDateTime": "2017-03-02T06:10:43.436+01:00"
| },
| {
| "response": {
| "status": 400
| },
| "time": 15
| "time": 15,
| "startedDateTime": "2017-03-02T06:10:45.436+01:00"
| }
| ]
| }
Expand All @@ -52,13 +54,15 @@ object FixtureGenerator {
| "response": {
| "status": 400
| },
| "time": 16
| "time": 2000,
| "startedDateTime": "2017-03-02T06:10:43.436+01:00"
| },
| {
| "response": {
| "status": 400
| },
| "time": 4
| "time": 4,
| "startedDateTime": "2017-03-02T06:10:44.436+01:00"
| }
| ]
| }
Expand Down Expand Up @@ -87,13 +91,15 @@ object FixtureGenerator {
| "response": {
| "status": "foo"
| },
| "time": 10
| "time": 10,
| "startedDateTime": "2017-03-02T06:10:43.436+01:00"
| },
| {
| "response": {
| "status": 400
| },
| "time": 15
| "time": 15,
| "startedDateTime": "2017-03-02T06:10:45.436+01:00"
| }
| ]
| }
Expand All @@ -104,6 +110,7 @@ object FixtureGenerator {
testcaseId = "testcaseId1",
testresultId = "testresultId2",
datetimeRun = datetimeRun2,
// Here the second request starts one second after the first, but also the first ends after the second ends
har = parse(
"""
{
Expand All @@ -113,13 +120,15 @@ object FixtureGenerator {
| "response": {
| "status": 400
| },
| "time": 16
| "time": 2000,
| "startedDateTime": "2017-03-02T06:10:43.436+01:00"
| },
| {
| "response": {
| "status": 400
| },
| "time": 4
| "time": 4,
| "startedDateTime": "2017-03-02T06:10:44.436+01:00"
| }
| ]
| }
Expand Down Expand Up @@ -166,7 +175,7 @@ class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers {
statistics(0).testresultDatetimeRun.toString.substring(24) should be("2015")
statistics(0).numberOfRequestsWithStatus200 should be(1)
statistics(0).numberOfRequestsWithStatus400 should be(1)
statistics(0).totalRequestTime should be(25)
statistics(0).totalRequestTime should be(2015)

statistics(1).testcaseId should be("testcaseId1")
statistics(1).dayBucket should be("2015-11-18")
Expand All @@ -175,7 +184,7 @@ class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers {
statistics(1).testresultDatetimeRun.toString.substring(24) should be("2015")
statistics(1).numberOfRequestsWithStatus200 should be(0)
statistics(1).numberOfRequestsWithStatus400 should be(2)
statistics(1).totalRequestTime should be(20)
statistics(1).totalRequestTime should be(2000)
}

it("should gracefully handle invalid HARs") {
Expand All @@ -190,7 +199,7 @@ class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers {
statistics(0).testresultDatetimeRun.toString.substring(24) should be("2015")
statistics(0).numberOfRequestsWithStatus200 should be(0)
statistics(0).numberOfRequestsWithStatus400 should be(1)
statistics(0).totalRequestTime should be(25)
statistics(0).totalRequestTime should be(2015)

statistics(1).testcaseId should be("testcaseId1")
statistics(1).dayBucket should be("2015-11-18")
Expand All @@ -199,7 +208,7 @@ class SparkExampleSpec extends FunSpec with BeforeAndAfter with Matchers {
statistics(1).testresultDatetimeRun.toString.substring(24) should be("2015")
statistics(1).numberOfRequestsWithStatus200 should be(0)
statistics(1).numberOfRequestsWithStatus400 should be(2)
statistics(1).totalRequestTime should be(20)
statistics(1).totalRequestTime should be(2000)
}

}