Permalink
Browse files

Combination of build fix and WIP :-(

  • Loading branch information...
1 parent 9d1bb7c commit fc1c51dfe54a9c42bf0cf54543cb830a47bcaa20 @bos committed Aug 30, 2012
View
@@ -4,11 +4,13 @@
module Main (main) where
import Control.Applicative ((<$>))
+import Control.Concurrent.MVar
import Control.DeepSeq (rnf)
import Control.Exception (bracket, catch, evaluate, finally)
import Control.Monad (forM_, unless)
import Data.Aeson ((.=), encode, object)
import Data.Char (toLower)
+import Data.Conduit (ResourceT)
import Data.Maybe (catMaybes)
import Data.Text (Text, pack)
import Data.Text.Encoding (encodeUtf8)
@@ -93,7 +95,7 @@ defaultArgs = Args {
&= summary ("Pronk " ++ pronk_version ++
" - a modern HTTP load tester")
-fromArgs :: Args -> E.Request IO -> LoadTest.Config
+fromArgs :: Args -> E.Request (ResourceT IO) -> LoadTest.Config
fromArgs Args{..} req =
LoadTest.Config {
LoadTest.concurrency = concurrency
@@ -108,7 +110,7 @@ main = withSocketsDo $ do
as@Args{..} <- cmdArgs $ defaultArgs &= program "pronk"
validateArgs as
cfg <- fromArgs as <$> createRequest as
- run <- timed "tested" $ LoadTest.run cfg
+ (run,time) <- timed "tested" $ LoadTest.run cfg
case run of
Left [NetworkError err] -> fatal (show err)
Left errs -> do
@@ -117,21 +119,21 @@ main = withSocketsDo $ do
exitWith (ExitFailure 1)
Right results -> do
whenNormal $ T.print "analysing results\n" ()
- analysis <- timed "analysed" $ do
- r <- if bootstrap
- then Right <$> analyseFull results
- else return . Left . analyseBasic $ results
- evaluate $ rnf r
- return r
+ (analysis,_) <- timed "analysed" $ do
+ r <- if bootstrap
+ then Right <$> analyseFull results time
+ else return . Left $ analyseBasic results time
+ evaluate $ rnf r
+ return r
env <- environment
let dump = object [ "config" .= cfg
, "environment" .= env
, "analysis" .= G.toJSON analysis ]
maybeWriteFile json $ \h -> BL.hPut h (BL.append (encode dump) "\n")
maybeWriteFile dump_events $ \h ->
TL.hPutStr h . toLazyText . csvEvents $ results
- maybeWriteFile output $ \h -> either (writeReport template h)
- (writeReport template h) analysis
+ maybeWriteFile output $ \h -> either (writeReport template h time)
+ (writeReport template h time) analysis
whenNormal $ do
reportEvents stdout results
either (reportBasic stdout) (reportFull whenLoud stdout)
@@ -156,7 +158,7 @@ validateArgs Args{..} = do
forM_ problems $ hPutStrLn stderr . ("Error: " ++)
unless (null problems) $ exitWith (ExitFailure 1)
-createRequest :: Args -> IO (E.Request IO)
+createRequest :: Args -> IO (E.Request (ResourceT IO))
createRequest Args{..} = do
req0 <- E.parseUrl url `catch` \(e::E.HttpException) ->
fatal $ "could not parse URL - " ++
@@ -183,11 +185,12 @@ createRequest Args{..} = do
hPutStrLn stderr "Error: --literal and --from-file are mutually exclusive"
exitWith (ExitFailure 1)
-timed :: Text -> IO a -> IO a
+timed :: Text -> IO a -> IO (a,Double)
timed desc act = do
+ t <- newEmptyMVar
startCPU <- getCPUTime
startWall <- getPOSIXTime
- act `finally` do
+ ret <- act `finally` do
endCPU <- getCPUTime
endWall <- getPOSIXTime
let elapsedCPU = fromIntegral (endCPU - startCPU) / 1e12
@@ -202,6 +205,8 @@ timed desc act = do
T.fixed 1 $ 100 * elapsedCPU / elapsedWall)
else T.print "{} in {}\n"
(desc, buildTime 4 elapsedWall)
+ putMVar t elapsedWall
+ ((,) ret) <$> takeMVar t
fatal :: String -> IO a
fatal e = do
@@ -49,24 +49,27 @@ run cfg@Config{..} = do
client :: Config -> Manager -> POSIXTime
-> ResourceT IO (V.Vector Summary)
-client Config{..} mgr interval = loop 0 [] =<< liftIO getPOSIXTime
+client Config{..} mgr interval = loop 0 []
where
- loop !n acc now
+ loop !n acc
| n == numRequests = return (V.fromList acc)
| otherwise = do
+ now <- liftIO getPOSIXTime
!evt <- timedRequest
now' <- liftIO getPOSIXTime
let elapsed = now' - now
!s = Summary {
summEvent = evt
, summElapsed = realToFrac elapsed
- , summStart = realToFrac now'
+ , summStart = realToFrac now
}
when (elapsed < interval) $
liftIO . threadDelay . truncate $ (interval - elapsed) * 1000000
- loop (n+1) (s:acc) =<< liftIO getPOSIXTime
+ loop (n+1) (s:acc)
+ issueRequest :: ResourceT IO (Response L.ByteString)
issueRequest = httpLbs (fromReq request) mgr
`catch` (throwIO . NetworkError)
+ timedRequest :: ResourceT IO Event
timedRequest
| timeout == 0 = respEvent <$> issueRequest
| otherwise = do
@@ -78,6 +81,6 @@ client Config{..} mgr interval = loop 0 [] =<< liftIO getPOSIXTime
respEvent :: Response L.ByteString -> Event
respEvent resp =
HttpResponse {
- respCode = H.statusCode $ statusCode resp
+ respCode = H.statusCode $ responseStatus resp
, respContentLength = fromIntegral . L.length . responseBody $ resp
}
@@ -9,61 +9,35 @@ module Network.HTTP.LoadTest.Analysis
, analyseFull
) where
-import Criterion.Analysis (SampleAnalysis, analyseSample, scale)
-import Network.HTTP.LoadTest.Types (Analysis(..), Basic(..), Summary(..),
- summEnd)
+import Criterion.Analysis (SampleAnalysis, analyseSample)
+import Network.HTTP.LoadTest.Types (Analysis(..), Basic(..), Summary(..))
import Prelude hiding (catch)
import Statistics.Quantile (weightedAvg)
import qualified Data.Vector as V
import qualified Data.Vector.Generic as G
-import qualified Data.Vector.Unboxed as U
import qualified Statistics.Sample as S
-analyseFull :: V.Vector Summary -> IO (Analysis SampleAnalysis)
-analyseFull sumv = do
- let start = summStart . G.head $ sumv
- end = summEnd . G.last $ sumv
- elapsed = end - start
- timeSlice = min elapsed 1 / 200
- slices = U.unfoldrN (round (elapsed / timeSlice)) go (sumv,1)
- where go (v,i) = let (a,b) = G.span (\s -> summStart s <= t) v
- t = start + (i * timeSlice)
- in Just (fromIntegral $ G.length a,(b,i+1))
- ci = 0.95
+analyseFull :: V.Vector Summary -> Double -> IO (Analysis SampleAnalysis)
+analyseFull sumv elapsed = do
+ let ci = 0.95
resamples = 10 * 1000
l <- analyseSample ci (G.convert . G.map summElapsed $ sumv) resamples
- t <- analyseSample ci slices resamples
return Analysis {
latency = l
, latency99 = weightedAvg 99 100 . G.map summElapsed $ sumv
, latency999 = weightedAvg 999 1000 . G.map summElapsed $ sumv
, latValues = sumv
- , throughput = scale (recip timeSlice) t
- , throughput10 = (/ timeSlice) . weightedAvg 10 100 $ slices
- , thrValues = slices
+ , throughput = fromIntegral (G.length sumv) / elapsed
}
-analyseBasic :: V.Vector Summary -> Analysis Basic
-analyseBasic sumv = Analysis {
+analyseBasic :: V.Vector Summary -> Double -> Analysis Basic
+analyseBasic sumv elapsed = Analysis {
latency = Basic {
mean = S.mean . G.map summElapsed $ sumv
, stdDev = S.stdDev . G.map summElapsed $ sumv
}
, latency99 = weightedAvg 99 100 . G.map summElapsed $ sumv
, latency999 = weightedAvg 999 1000 . G.map summElapsed $ sumv
, latValues = sumv
- , throughput = Basic {
- mean = S.mean slices / timeSlice
- , stdDev = S.stdDev slices / timeSlice
- }
- , throughput10 = (/ timeSlice) . weightedAvg 10 100 $ slices
- , thrValues = slices
+ , throughput = fromIntegral (G.length sumv) / elapsed
}
- where start = summStart . G.head $ sumv
- end = summEnd . G.last $ sumv
- elapsed = end - start
- timeSlice = min elapsed 1 / 200
- slices = U.unfoldrN (round (elapsed / timeSlice)) go (sumv,1)
- where go (v,i) = let (a,b) = G.span (\s -> summStart s <= t) v
- t = start + (i * timeSlice)
- in Just (fromIntegral $ G.length a,(b,i+1))
@@ -1,4 +1,4 @@
-{-# LANGUAGE OverloadedStrings, RecordWildCards, RelaxedPolyRec #-}
+{-# LANGUAGE BangPatterns, OverloadedStrings, RecordWildCards, RelaxedPolyRec, ViewPatterns #-}
module Network.HTTP.LoadTest.Report
(
@@ -16,17 +16,19 @@ import Control.Monad (forM_)
import Criterion.Analysis (SampleAnalysis(..), OutlierEffect(..),
OutlierVariance(..))
import Data.Data (Data)
-import Data.List (sort)
+import Data.Function (on)
+import Data.Maybe (fromMaybe)
import Data.Monoid (mappend, mconcat, mempty)
import Data.Text (Text)
import Data.Text.Buildable (build)
import Data.Text.Format (prec, shortest)
import Data.Text.Lazy.Builder (Builder)
import Data.Vector (Vector)
import Network.HTTP.LoadTest.Types (Analysis(..), Basic(..), Event(..),
- Summary(..))
+ Summary(..), summEnd)
import Paths_pronk (getDataFileName)
import Prelude hiding (print)
+import Statistics.Function (sort)
import Statistics.Resampling.Bootstrap (Estimate(..))
import Statistics.Sample.KernelDensity (kde)
import System.IO (Handle)
@@ -36,8 +38,12 @@ import Text.Hastache.Context (mkGenericContext)
import qualified Criterion.Report as R
import qualified Data.ByteString.Lazy as L
import qualified Data.HashMap.Strict as H
+import qualified Data.List as List
+import qualified Data.MeldableHeap as Q
import qualified Data.Text.Format as T
import qualified Data.Vector.Generic as G
+import qualified Data.Vector.Unboxed as U
+import qualified Data.Vector as V
import qualified Text.Hastache as H
reportBasic :: Handle -> Analysis Basic -> IO ()
@@ -47,10 +53,7 @@ reportBasic h Analysis{..} = do
print " mean: {}\n" [time (mean latency)]
print " std dev: {}\n" [time (stdDev latency)]
print " 99%: {}\n 99.9%: {}\n" (time latency99, time latency999)
- print "\nthroughput:\n" ()
- print " mean: {} req/sec\n" [mean throughput]
- print " std dev: {} req/sec\n" [stdDev throughput]
- print " 10%: {} req/sec\n" [throughput10]
+ print "\nthroughput: {}\n" [rate throughput]
reportFull :: (IO () -> IO ()) -> Handle -> Analysis SampleAnalysis -> IO ()
reportFull whenLoud h Analysis{..} = do
@@ -66,17 +69,7 @@ reportFull whenLoud h Analysis{..} = do
print " upper: {}\n" [time (estUpperBound (anStdDev latency))]
effect h (anOutlierVar latency)
print " 99%: {}\n 99.9%: {}\n" (time latency99, time latency999)
- print "\nthroughput:\n" ()
- print " mean: {} req/sec\n" [estPoint (anMean throughput)]
- whenLoud $ do
- print " lower: {}\n" [rate (estLowerBound (anMean throughput))]
- print " upper: {}\n" [rate (estUpperBound (anMean throughput))]
- print " std dev: {}\n" [rate (estPoint (anStdDev throughput))]
- whenLoud $ do
- print " lower: {}\n" [rate (estLowerBound (anStdDev throughput))]
- print " upper: {}\n" [rate (estUpperBound (anStdDev throughput))]
- effect h (anOutlierVar throughput)
- print " 10%: {}\n" [rate throughput10]
+ print "\nthroughput: {}\n" [rate throughput]
time :: Double -> Builder
time = buildTime 4
@@ -109,7 +102,7 @@ reportEvents h sumv = do
classify Timeout = 0
classify HttpResponse{..} = respCode
T.hprint h "responses:\n" ()
- forM_ (sort . H.toList $ evtMap) $ \(e,n) -> do
+ forM_ (List.sort . H.toList $ evtMap) $ \(e,n) -> do
let nameOf 0 = "timeout "
nameOf k = "HTTP " `mappend` build k
T.hprint h " {} {}\n" (nameOf e, T.left 7 ' ' n)
@@ -136,19 +129,63 @@ templateDir :: FilePath
templateDir = unsafePerformIO $ getDataFileName "templates"
{-# NOINLINE templateDir #-}
-writeReport :: (Data a) => FilePath -> Handle -> Analysis a -> IO ()
-writeReport template h a@Analysis{..} = do
+writeReport :: (Data a) => FilePath -> Handle -> Double -> Analysis a -> IO ()
+writeReport template h elapsed a@Analysis{..} = do
let context "include" = MuLambdaM $
R.includeFile [templateDir, R.templateDir]
+ context "elapsed" = MuVariable elapsed
context "latKdeTimes" = R.vector "x" latKdeTimes
context "latKdePDF" = R.vector "x" latKdePDF
context "latKde" = R.vector2 "time" "pdf" latKdeTimes latKdePDF
context "latValues" = MuList . map mkGenericContext . G.toList $ lats
+ context "thrTimes" = R.vector "x" thrTimes
context "thrValues" = R.vector "x" thrValues
+ context "concTimes" = R.vector "x" . U.fromList $ map fstS conc
+ context "concValues" = R.vector "x" . U.fromList $ map sndS conc
context n = mkGenericContext a n
(latKdeTimes,latKdePDF) = kde 128 . G.convert . G.map summElapsed $ latValues
lats = G.map (\s -> s { summStart = summStart s - t }) latValues
- where t = G.minimum . G.map summStart $ latValues
+ where t = summStart . G.head $ latValues
+ (thrTimes,thrValues) = graphThroughput (min (G.length latValues) 50) elapsed latValues
+ conc = graphConcurrency lats
tpl <- R.loadTemplate [".",templateDir] template
bs <- H.hastacheStr H.defaultConfig tpl context
L.hPutStr h bs
+
+data T = T (U.Vector Double) {-# UNPACK #-} !Double
+
+-- | Compute a graph of throughput, requests completed per time
+-- interval.
+graphThroughput :: Int -- ^ Number of time slices.
+ -> Double -- ^ Amount of time elapsed.
+ -> V.Vector Summary -> (U.Vector Double, U.Vector Double)
+graphThroughput slices elapsed sumv =
+ (G.generate slices $ \i -> fromIntegral i * timeSlice,
+ G.unfoldrN slices go (T endv 0))
+ where go (T v i) = Just (fromIntegral (G.length a), T b j)
+ where (a,b) = G.span (<=t) v
+ t = start + (j * timeSlice)
+ j = i+1
+ timeSlice = elapsed / fromIntegral slices
+ start = summStart . G.head $ sumv
+ endv = G.convert . sort . G.map summEnd $ sumv
+
+data S = S {
+ fstS :: {-# UNPACK #-} !Double
+ , sndS :: {-# UNPACK #-} !Int
+ }
+
+-- | Compute a graph of concurrency.
+graphConcurrency :: V.Vector Summary -> [S]
+graphConcurrency = scanl1 f . filter ((/=0) . sndS) . map (foldl1 (flip f)) .
+ List.groupBy ((==) `on` fstS) . go Q.empty . G.toList
+ where
+ f (S _ i) (S t j) = S t (i+j)
+ go q es@(Summary{..}:xs)
+ | summStart < t = S summStart 1 : go insQ xs
+ | otherwise = S t (-1) : go delQ es
+ where (t,delQ) = fromMaybe (1e300,q) $ Q.extractMin q
+ insQ = Q.insert (summStart+summElapsed) q
+ go q _ = drain q
+ where drain (Q.extractMin -> Just (t,q')) = S t (-1) : drain q'
+ drain _ = []
Oops, something went wrong.

0 comments on commit fc1c51d

Please sign in to comment.