stress: handle duplicate unix seconds in data points

This commit is contained in:
Gyu-Ho Lee 2017-02-06 11:57:30 -08:00
parent 797edc0818
commit 08ff29fb2a
No known key found for this signature in database
GPG Key ID: 1DDD39C7EB70C24C
1 changed files with 26 additions and 0 deletions

View File

@ -135,6 +135,32 @@ func (cfg *Config) Stress(databaseID string) error {
}
}
// handle duplicate unix seconds around boundaries
sec2dp := make(map[int64]report.DataPoint)
for _, tss := range combined.TimeSeries {
v, ok := sec2dp[tss.Timestamp]
if !ok {
sec2dp[tss.Timestamp] = tss
}
// two datapoints share the time unix second
if v.MinLatency > tss.MinLatency {
v.MinLatency = tss.MinLatency
}
if v.MaxLatency < tss.MaxLatency {
v.MaxLatency = tss.MaxLatency
}
v.AvgLatency = (v.AvgLatency + tss.AvgLatency) / time.Duration(2)
v.ThroughPut += tss.ThroughPut
sec2dp[tss.Timestamp] = v
}
var fts report.TimeSeries
for _, dp := range sec2dp {
fts = append(fts, dp)
}
sort.Sort(report.TimeSeries(fts))
combined.TimeSeries = fts
combined.Average = combined.AvgTotal / float64(len(combined.Lats))
combined.RPS = float64(len(combined.Lats)) / combined.Total.Seconds()
plog.Printf("got total %d data points and total %f seconds (RPS %f)", len(combined.Lats), combined.Total.Seconds(), combined.RPS)