diff --git a/analyze/analyze.go b/analyze/analyze.go index a1747a59..820e1983 100644 --- a/analyze/analyze.go +++ b/analyze/analyze.go @@ -1,4 +1,4 @@ -// Copyright 2016 CoreOS, Inc. +// Copyright 2017 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -30,36 +30,6 @@ import ( "github.com/spf13/cobra" ) -// Command implements 'analyze' command. -var Command = &cobra.Command{ - Use: "analyze", - Short: "Analyzes test dbtester test results.", - RunE: commandFunc, -} - -var configPath string - -func init() { - Command.PersistentFlags().StringVarP(&configPath, "config", "c", "", "YAML configuration file path.") -} - -var columnsToAggregate = []string{ - "UNIX-TS", "CPU-NUM", "VMRSS-NUM", - "READS-COMPLETED", - "READS-COMPLETED-DIFF", - "SECTORS-READ", - "SECTORS-READ-DIFF", - "WRITES-COMPLETED", - "WRITES-COMPLETED-DIFF", - "SECTORS-WRITTEN", - "SECTORS-WRITTEN-DIFF", - "RECEIVE-BYTES-NUM", - "RECEIVE-BYTES-NUM-DIFF", - "TRANSMIT-BYTES-NUM", - "TRANSMIT-BYTES-NUM-DIFF", - "EXTRA", -} - func commandFunc(cmd *cobra.Command, args []string) error { cfg, err := ReadConfig(configPath) if err != nil { @@ -67,7 +37,7 @@ func commandFunc(cmd *cobra.Command, args []string) error { } println() - plog.Println("Step 1: aggregating each database...") + plog.Println("Step 1: aggregating all system metrics CSV files from each database") for step1Idx, elem := range cfg.Step1 { var ( frames = []dataframe.Frame{} diff --git a/analyze/analyze_1_read_test_data.go b/analyze/analyze_1_read_test_data.go new file mode 100644 index 00000000..49cc4e51 --- /dev/null +++ b/analyze/analyze_1_read_test_data.go @@ -0,0 +1,101 @@ +// Copyright 2017 CoreOS, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analyze + +import ( + "fmt" + "strconv" + + "github.com/gyuho/dataframe" +) + +var sysMetricsColumnsToRead = []string{ + "UNIX-TS", "CPU-NUM", "VMRSS-NUM", + "READS-COMPLETED", + "READS-COMPLETED-DIFF", + "SECTORS-READ", + "SECTORS-READ-DIFF", + "WRITES-COMPLETED", + "WRITES-COMPLETED-DIFF", + "SECTORS-WRITTEN", + "SECTORS-WRITTEN-DIFF", + "RECEIVE-BYTES-NUM", + "RECEIVE-BYTES-NUM-DIFF", + "TRANSMIT-BYTES-NUM", + "TRANSMIT-BYTES-NUM-DIFF", + "EXTRA", +} + +type testData struct { + filePath string + frontUnixTS int64 + lastUnixTS int64 + frame dataframe.Frame +} + +// readSystemMetrics extracts only the columns that we need for analyze. +func readSystemMetrics(fpath string) (data testData, err error) { + originalFrame, err := dataframe.NewFromCSV(nil, fpath) + if err != nil { + return testData{}, err + } + + data.filePath = fpath + data.frame = dataframe.New() + var unixTSColumn dataframe.Column + for _, name := range sysMetricsColumnsToRead { + var column dataframe.Column + column, err = originalFrame.GetColumn(name) + if err != nil { + return testData{}, err + } + if err = data.frame.AddColumn(column); err != nil { + return testData{}, err + } + if name == "UNIX-TS" { + unixTSColumn = column + } + } + + // get first(minimum) unix second + fv, ok := unixTSColumn.FrontNonNil() + if !ok { + return testData{}, fmt.Errorf("FrontNonNil %s has empty Unix time %v", fpath, fv) + } + fs, ok := fv.ToString() + if !ok { + return testData{}, fmt.Errorf("cannot ToString %v", fv) + } + data.frontUnixTS, err = strconv.ParseInt(fs, 10, 64) + if err != nil { + return testData{}, err + } + + // get last(maximum) unix second + bv, ok := unixTSColumn.BackNonNil() + if !ok { + return testData{}, fmt.Errorf("BackNonNil %s has empty Unix time %v", fpath, fv) + } + bs, ok := bv.ToString() + if !ok { + return testData{}, fmt.Errorf("cannot ToString %v", bv) + } + data.lastUnixTS, err = strconv.ParseInt(bs, 10, 64) + if err != nil { + return testData{}, err + } + + return +} diff --git a/analyze/analyze_2_aggregate_analyze_data.go b/analyze/analyze_2_aggregate_analyze_data.go new file mode 100644 index 00000000..dc2def85 --- /dev/null +++ b/analyze/analyze_2_aggregate_analyze_data.go @@ -0,0 +1,145 @@ +// Copyright 2017 CoreOS, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analyze + +import ( + "fmt" + + "github.com/gyuho/dataframe" +) + +type analyzeData struct { + minUnixTS int64 + maxUnixTS int64 + sys []testData + + // aggregated frame within [min,maxUnixTS] from sys + sysAgg dataframe.Frame + + benchMetricsFilePath string + benchMetrics testData + + // aggregated from sysAgg and benchMetrics + sysBenchAgg dataframe.Frame +} + +// readSystemMetricsAll reads all system metric files +// (e.g. if cluster is 3-node, read all 3 files). +// It returns minimum and maximum common unix second and a list of frames. +func readSystemMetricsAll(fpaths ...string) (data *analyzeData, err error) { + data = &analyzeData{} + for i, fpath := range fpaths { + plog.Printf("STEP #1-%d: creating dataframe from %s", i, fpath) + sm, err := readSystemMetrics(fpath) + if err != nil { + return nil, err + } + if i == 0 { + data.minUnixTS = sm.frontUnixTS + data.maxUnixTS = sm.lastUnixTS + } + if data.minUnixTS < sm.frontUnixTS { + data.minUnixTS = sm.frontUnixTS + } + if data.maxUnixTS > sm.lastUnixTS { + data.maxUnixTS = sm.lastUnixTS + } + data.sys = append(data.sys, sm) + } + return +} + +// aggSystemMetrics aggregates all system metrics from 3+ nodes. +func (data *analyzeData) aggSystemMetrics() error { + // monitor CSVs from multiple servers, and want them to have equal number of rows + // Truncate all rows before data.minUnixTS and after data.maxUnixTS + minTS := fmt.Sprintf("%d", data.minUnixTS) + maxTS := fmt.Sprintf("%d", data.maxUnixTS) + data.sysAgg = dataframe.New() + for i := range data.sys { + uc, err := data.sys[i].frame.GetColumn("UNIX-TS") + if err != nil { + return err + } + minTSIdx, ok := uc.FindValue(dataframe.NewStringValue(minTS)) + if !ok { + return fmt.Errorf("%v does not exist in %s", minTS, data.sys[i].filePath) + } + maxTSIdx, ok := uc.FindValue(dataframe.NewStringValue(maxTS)) + if !ok { + return fmt.Errorf("%v does not exist in %s", maxTS, data.sys[i].filePath) + } + + for _, header := range data.sys[i].frame.GetHeader() { + if i > 0 && header == "UNIX-TS" { + // skip for other databases; we want to keep just one UNIX-TS column + continue + } + + var col dataframe.Column + col, err = data.sys[i].frame.GetColumn(header) + if err != nil { + return err + } + // just keep rows from [min,maxUnixTS] + if err = col.KeepRows(minTSIdx, maxTSIdx+1); err != nil { + return err + } + + if header == "UNIX-TS" { + if err = data.sysAgg.AddColumn(col); err != nil { + return err + } + continue + } + + switch header { + case "CPU-NUM": + header = "CPU" + + case "VMRSS-NUM": + header = "VMRSS-MB" + + // convert bytes to mb + colN := col.RowNumber() + for rowIdx := 0; rowIdx < colN; rowIdx++ { + var rowV dataframe.Value + rowV, err = col.GetValue(rowIdx) + if err != nil { + return err + } + fv, _ := rowV.ToNumber() + frv := float64(fv) * 0.000001 + if err = col.SetValue(rowIdx, dataframe.NewStringValue(fmt.Sprintf("%.2f", frv))); err != nil { + return err + } + } + + case "EXTRA": + // dbtester uses psn 'EXTRA' column as 'CLIENT-NUM' + header = "CLIENT-NUM" + } + + // since we are aggregating multiple system-metrics CSV files + // suffix header with the index + col.UpdateHeader(fmt.Sprintf("%s-%d", header, i+1)) + if err = data.sysAgg.AddColumn(col); err != nil { + return err + } + } + } + + return nil +} diff --git a/analyze/analyze_3_add_benchmark_metrics.go b/analyze/analyze_3_add_benchmark_metrics.go new file mode 100644 index 00000000..e180e16f --- /dev/null +++ b/analyze/analyze_3_add_benchmark_metrics.go @@ -0,0 +1,70 @@ +// Copyright 2017 CoreOS, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analyze + +import ( + "fmt" + "strconv" + + "github.com/gyuho/dataframe" +) + +// importBenchMetrics adds benchmark metrics from client-side +// and aggregates this to system metrics by unix timestamps. +func (data *analyzeData) importBenchMetrics(fpath string) (err error) { + plog.Printf("STEP #2: importing benchmark dataframe from %s", fpath) + + data.benchMetricsFilePath = fpath + data.benchMetrics.frame, err = dataframe.NewFromCSV(nil, fpath) + if err != nil { + return + } + + var unixTSColumn dataframe.Column + unixTSColumn, err = data.benchMetrics.frame.GetColumn("UNIX-TS") + if err != nil { + return err + } + + // get first(minimum) unix second + fv, ok := unixTSColumn.FrontNonNil() + if !ok { + return fmt.Errorf("FrontNonNil %s has empty Unix time %v", fpath, fv) + } + fs, ok := fv.ToString() + if !ok { + return fmt.Errorf("cannot ToString %v", fv) + } + data.benchMetrics.frontUnixTS, err = strconv.ParseInt(fs, 10, 64) + if err != nil { + return err + } + + // get last(maximum) unix second + bv, ok := unixTSColumn.BackNonNil() + if !ok { + return fmt.Errorf("BackNonNil %s has empty Unix time %v", fpath, fv) + } + bs, ok := bv.ToString() + if !ok { + return fmt.Errorf("cannot ToString %v", bv) + } + data.benchMetrics.lastUnixTS, err = strconv.ParseInt(bs, 10, 64) + if err != nil { + return err + } + + return +} diff --git a/analyze/analyze_4_aggregate_sys_bench.go b/analyze/analyze_4_aggregate_sys_bench.go new file mode 100644 index 00000000..cef1f8ce --- /dev/null +++ b/analyze/analyze_4_aggregate_sys_bench.go @@ -0,0 +1,132 @@ +package analyze + +import ( + "fmt" + + "strings" + + "github.com/gyuho/dataframe" +) + +// aggSystemBenchMetrics aggregates all system metrics from 3+ nodes. +func (data *analyzeData) aggSystemBenchMetrics() error { + plog.Println("STEP #3: aggregating system metrics and benchmark metrics") + colSys, err := data.sysAgg.GetColumn("UNIX-TS") + if err != nil { + return err + } + + colBench, err := data.benchMetrics.frame.GetColumn("UNIX-TS") + if err != nil { + return err + } + fv, ok := colBench.FrontNonNil() + if !ok { + return fmt.Errorf("FrontNonNil %s has empty Unix time %v", data.benchMetrics.filePath, fv) + } + bv, ok := colBench.BackNonNil() + if !ok { + return fmt.Errorf("BackNonNil %s has empty Unix time %v", data.benchMetrics.filePath, fv) + } + + sysStartIdx, ok := colSys.FindValue(fv) + if !ok { + return fmt.Errorf("%v is not found in system metrics results", fv) + } + sysEndIdx, ok := colSys.FindValue(bv) + if !ok { + return fmt.Errorf("%v is not found in system metrics results", fv) + } + sysRowN := sysEndIdx - sysStartIdx + 1 + + var minBenchEndIdx int + for _, col := range data.benchMetrics.frame.GetColumns() { + if minBenchEndIdx == 0 { + minBenchEndIdx = col.RowNumber() + } + if minBenchEndIdx > col.RowNumber() { + minBenchEndIdx = col.RowNumber() + } + } + minBenchEndIdx-- + + // sysStartIdx 3, sysEndIdx 9, sysRowN 7, minBenchEndIdx 5 (5+1 < 7) + // THEN sysEndIdx = 3 + 5 = 8 + // + // sysStartIdx 3, sysEndIdx 7, sysRowN 5, minBenchEndIdx 5 (5+1 > 5) + // THEN minBenchEndIdx = 7 - 3 = 4 + if minBenchEndIdx+1 < sysRowN { + // benchmark is short of rows + // adjust system-metrics rows to benchmark-metrics + sysEndIdx = sysStartIdx + minBenchEndIdx + } else { + // system-metrics is short of rows + // adjust benchmark-metrics to system-metrics + minBenchEndIdx = sysEndIdx - sysStartIdx + } + + // aggregate all system-metrics and benchmark-metrics + data.sysBenchAgg = dataframe.New() + + // first, add bench metrics data + // UNIX-TS, AVG-LATENCY-MS, AVG-THROUGHPUT + for _, col := range data.benchMetrics.frame.GetColumns() { + // ALWAYS KEEP FROM FIRST ROW OF BENCHMARKS + if err = col.KeepRows(0, minBenchEndIdx); err != nil { + return err + } + if err = data.sysBenchAgg.AddColumn(col); err != nil { + return err + } + } + + for _, col := range data.sysAgg.GetColumns() { + if col.GetHeader() == "UNIX-TS" { + continue + } + if err = col.KeepRows(sysStartIdx, sysEndIdx); err != nil { + return err + } + if err = data.sysBenchAgg.AddColumn(col); err != nil { + return err + } + } + + plog.Println("STEP #4: computing average,cumulative values in system metrics and benchmark") + var ( + requestSum int + cumulativeThroughputCol = dataframe.NewColumn("CUMULATIVE-THROUGHPUT") + + systemMetricsSize = float64(len(data.sys)) + avgCPUCol = dataframe.NewColumn("AVG-CPU") + avgVMRSSMBCol = dataframe.NewColumn("AVG-VMRSS-MB") + ) + // iterate horizontally across all the columns + for rowIdx := 0; rowIdx < minBenchEndIdx; rowIdx++ { + var ( + cpuSum float64 + vmrssMBSum float64 + ) + for _, col := range data.sysBenchAgg.GetColumns() { + rv, err := col.GetValue(rowIdx) + if err != nil { + return err + } + vv, _ := rv.ToNumber() + + switch { + case col.GetHeader() == "AVG-THROUGHPUT": + requestSum += int(vv) + cumulativeThroughputCol.PushBack(dataframe.NewStringValue(requestSum)) + + case strings.HasPrefix(col.GetHeader(), "CPU-"): + cpuSum += vv + + case strings.HasPrefix(col.GetHeader(), "VMRSS-MB-"): + vmrssMBSum += vv + } + } + } + + return nil +} diff --git a/analyze/command.go b/analyze/command.go new file mode 100644 index 00000000..27ab2ac0 --- /dev/null +++ b/analyze/command.go @@ -0,0 +1,30 @@ +// Copyright 2017 CoreOS, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analyze + +import "github.com/spf13/cobra" + +// Command implements 'analyze' command. +var Command = &cobra.Command{ + Use: "analyze", + Short: "Analyzes test dbtester test results.", + RunE: commandFunc, +} + +var configPath string + +func init() { + Command.PersistentFlags().StringVarP(&configPath, "config", "c", "", "YAML configuration file path.") +} diff --git a/analyze/config.go b/analyze/config.go index 630afe94..e57fce5f 100644 --- a/analyze/config.go +++ b/analyze/config.go @@ -1,4 +1,4 @@ -// Copyright 2016 CoreOS, Inc. +// Copyright 2017 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/analyze/config_test.go b/analyze/config_test.go index e13aa4f5..66b6adb1 100644 --- a/analyze/config_test.go +++ b/analyze/config_test.go @@ -1,4 +1,4 @@ -// Copyright 2016 CoreOS, Inc. +// Copyright 2017 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/analyze/doc.go b/analyze/doc.go index 7d10750f..4f3e9e65 100644 --- a/analyze/doc.go +++ b/analyze/doc.go @@ -1,4 +1,4 @@ -// Copyright 2016 CoreOS, Inc. +// Copyright 2017 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/analyze/logger.go b/analyze/logger.go index 540b670d..846b980b 100644 --- a/analyze/logger.go +++ b/analyze/logger.go @@ -1,4 +1,4 @@ -// Copyright 2016 CoreOS, Inc. +// Copyright 2017 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/analyze/util.go b/analyze/util.go index 14cffea5..a56b7516 100644 --- a/analyze/util.go +++ b/analyze/util.go @@ -1,4 +1,4 @@ -// Copyright 2016 CoreOS, Inc. +// Copyright 2017 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/bench-configuration/01-write-2M-keys/analyze.yaml b/bench-configuration/01-write-2M-keys/analyze.yaml index e81f8ded..7b7b4e51 100644 --- a/bench-configuration/01-write-2M-keys/analyze.yaml +++ b/bench-configuration/01-write-2M-keys/analyze.yaml @@ -5,37 +5,37 @@ titles: step1: - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/etcd-v3.1-go1.7.4-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zookeeper-r3.4.9-java8-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/consul-v0.7.2-go1.7.4-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/zetcd-v3.1-go1.7.4-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/01-write-2M-keys/cetcd-v3.1-go1.7.4-aggregated.csv diff --git a/bench-configuration/04-read-2M-keys-linearizable/analyze.yaml b/bench-configuration/04-read-2M-keys-linearizable/analyze.yaml index 9d74e320..1a78b7d4 100644 --- a/bench-configuration/04-read-2M-keys-linearizable/analyze.yaml +++ b/bench-configuration/04-read-2M-keys-linearizable/analyze.yaml @@ -5,37 +5,37 @@ titles: step1: - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/etcd-v3.1-go1.7.4-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zookeeper-r3.4.9-java8-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/consul-v0.7.2-go1.7.4-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/zetcd-v3.1-go1.7.4-aggregated.csv - data_path_list: - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-1-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-2-monitor.csv - - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-3-monitor.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-1-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-2-system-metrics.csv + - 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-3-system-metrics.csv data_benchmark_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-bench-result-timeseries.csv output_path: 2017Q1-01-etcd-zookeeper-consul/04-read-2M-keys-linearizable/cetcd-v3.1-go1.7.4-aggregated.csv