Benchmark overhead: Accumulate and persist results (#3832)
* persist summary and csv back into the results directory * run nightly and don't trigger CI * leverage autocloseable * sort once. * use stringbuilder * factor out writeheader method * add comments for clarity
This commit is contained in:
parent
6a3037df73
commit
b7577bb31e
|
|
@ -4,6 +4,8 @@ on:
|
|||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- 'benchmark-overhead/**'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
name: benchmark-overhead tests
|
||||
on:
|
||||
schedule:
|
||||
# 5am GMT
|
||||
- cron "0 5 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
run-overhead-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2.3.4
|
||||
- name: run tests
|
||||
run: ./gradlew test
|
||||
working-directory: benchmark-overhead
|
||||
- name: commit updated results
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
branch: main
|
||||
commit_message: update test result data
|
||||
file_pattern: benchmark-overhead/results
|
||||
|
|
@ -0,0 +1 @@
|
|||
This directory contains the results data.
|
||||
|
|
@ -15,7 +15,7 @@ import io.opentelemetry.containers.K6Container;
|
|||
import io.opentelemetry.containers.PetClinicRestContainer;
|
||||
import io.opentelemetry.containers.PostgresContainer;
|
||||
import io.opentelemetry.results.AppPerfResults;
|
||||
import io.opentelemetry.results.ConsoleResultsPersister;
|
||||
import io.opentelemetry.results.MainResultsPersister;
|
||||
import io.opentelemetry.results.ResultsCollector;
|
||||
import io.opentelemetry.util.NamingConventions;
|
||||
import java.io.IOException;
|
||||
|
|
@ -67,7 +67,7 @@ public class OverheadTests {
|
|||
}
|
||||
});
|
||||
List<AppPerfResults> results = new ResultsCollector(namingConventions.local).collect(config);
|
||||
new ConsoleResultsPersister().write(results);
|
||||
new MainResultsPersister(config).write(results);
|
||||
}
|
||||
|
||||
void runAppOnce(TestConfig config, Agent agent) throws Exception {
|
||||
|
|
|
|||
|
|
@ -4,49 +4,13 @@
|
|||
*/
|
||||
package io.opentelemetry.results;
|
||||
|
||||
import io.opentelemetry.config.TestConfig;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class ConsoleResultsPersister implements ResultsPersister {
|
||||
class ConsoleResultsPersister implements ResultsPersister {
|
||||
|
||||
@Override
|
||||
public void write(List<AppPerfResults> results) {
|
||||
TestConfig config = results.stream().findFirst().get().config;
|
||||
System.out.println("----------------------------------------------------------");
|
||||
System.out.printf(" %s : %s\n", config.getName(), config.getDescription());
|
||||
System.out.printf(" %d users, %d iterations\n", config.getConcurrentConnections(), config.getTotalIterations());
|
||||
System.out.println("----------------------------------------------------------");
|
||||
|
||||
display(results, "Agent", appPerfResults -> appPerfResults.agent.getName());
|
||||
display(results, "Startup time (ms)", res -> String.valueOf(res.startupDurationMs));
|
||||
display(results, "Total allocated MB", res -> format(res.getTotalAllocatedMB()));
|
||||
display(results, "Heap (min)", res -> String.valueOf(res.heapUsed.min));
|
||||
display(results, "Heap (max)", res -> String.valueOf(res.heapUsed.max));
|
||||
display(results, "Thread switch rate",
|
||||
res -> String.valueOf(res.maxThreadContextSwitchRate));
|
||||
display(results, "GC time", res -> String.valueOf(res.totalGCTime));
|
||||
display(results, "Req. mean", res -> format(res.requestAvg));
|
||||
display(results, "Req. p95", res -> format(res.requestP95));
|
||||
display(results, "Iter. mean", res -> format(res.iterationAvg));
|
||||
display(results, "Iter. p95", res -> format(res.iterationP95));
|
||||
display(results, "Peak threads", res -> String.valueOf(res.peakThreadCount));
|
||||
PrintStreamPersister delegate = new PrintStreamPersister(System.out);
|
||||
delegate.write(results);
|
||||
}
|
||||
|
||||
private void display(List<AppPerfResults> results, String pref,
|
||||
Function<AppPerfResults, String> vs) {
|
||||
System.out.printf("%-20s: ", pref);
|
||||
results.stream()
|
||||
.sorted(Comparator.comparing(AppPerfResults::getAgentName))
|
||||
.forEach(result -> {
|
||||
System.out.printf("%17s", vs.apply(result));
|
||||
});
|
||||
System.out.println();
|
||||
}
|
||||
|
||||
private String format(double d) {
|
||||
return String.format("%.2f", d);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,93 @@
|
|||
/*
|
||||
* Copyright The OpenTelemetry Authors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package io.opentelemetry.results;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
class CsvPersister implements ResultsPersister {
|
||||
|
||||
private final Path resultsFile;
|
||||
|
||||
public CsvPersister(Path resultsFile) {this.resultsFile = resultsFile;}
|
||||
|
||||
@Override
|
||||
public void write(List<AppPerfResults> results) {
|
||||
|
||||
ensureFileCreated(results);
|
||||
|
||||
StringBuilder sb = new StringBuilder().append(System.currentTimeMillis() / 1000);
|
||||
// Don't be confused by the loop -- This generates a single long csv line.
|
||||
// Each result is for a given agent run, and we want all the fields for all agents on the same
|
||||
// line so that we can create a columnar structure that allows us to more easily compare agent
|
||||
// to agent for a given run.
|
||||
doSorted(results, result -> {
|
||||
sb.append(",").append(result.startupDurationMs);
|
||||
sb.append(",").append(result.heapUsed.min);
|
||||
sb.append(",").append(result.heapUsed.max);
|
||||
sb.append(",").append(result.getTotalAllocatedMB());
|
||||
sb.append(",").append(result.totalGCTime);
|
||||
sb.append(",").append(result.maxThreadContextSwitchRate);
|
||||
sb.append(",").append(result.iterationAvg);
|
||||
sb.append(",").append(result.iterationP95);
|
||||
sb.append(",").append(result.requestAvg);
|
||||
sb.append(",").append(result.requestP95);
|
||||
sb.append(",").append(result.peakThreadCount);
|
||||
});
|
||||
sb.append("\n");
|
||||
try {
|
||||
Files.writeString(resultsFile, sb.toString(), StandardOpenOption.APPEND);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error writing csv content", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void ensureFileCreated(List<AppPerfResults> results) {
|
||||
if (Files.exists(resultsFile)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
String headerLine = createHeaderLine(results);
|
||||
Files.writeString(resultsFile, headerLine);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error creating csv output stub", e);
|
||||
}
|
||||
}
|
||||
|
||||
private String createHeaderLine(List<AppPerfResults> results) {
|
||||
StringBuilder sb = new StringBuilder("timestamp");
|
||||
// Don't be confused by the loop -- This generates a single long csv line.
|
||||
// Each result is for a given agent run, and we want all the fields for all agents on the same
|
||||
// line so that we can create a columnar structure that allows us to more easily compare agent
|
||||
// to agent for a given run.
|
||||
doSorted(results, result -> {
|
||||
String agent = result.getAgentName();
|
||||
sb.append(",").append(agent).append(":startupTimeMs");
|
||||
sb.append(",").append(agent).append(":minHeapUsed");
|
||||
sb.append(",").append(agent).append(":maxHeapUsed");
|
||||
sb.append(",").append(agent).append(":totalAllocatedMB");
|
||||
sb.append(",").append(agent).append(":totalGCTime");
|
||||
sb.append(",").append(agent).append(":maxThreadContextSwitchRate");
|
||||
sb.append(",").append(agent).append(":iterationAvg");
|
||||
sb.append(",").append(agent).append(":iterationP95");
|
||||
sb.append(",").append(agent).append(":requestAvg");
|
||||
sb.append(",").append(agent).append(":requestP95");
|
||||
sb.append(",").append(agent).append(":peakThreadCount");
|
||||
});
|
||||
sb.append("\n");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private void doSorted(List<AppPerfResults> results, Consumer<AppPerfResults> consumer) {
|
||||
results.stream()
|
||||
.sorted(Comparator.comparing(AppPerfResults::getAgentName))
|
||||
.forEach(consumer);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright The OpenTelemetry Authors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package io.opentelemetry.results;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.PrintStream;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Writes the summary file of the last run into the results dir.
|
||||
*/
|
||||
class FileSummaryPersister implements ResultsPersister {
|
||||
|
||||
private final Path file;
|
||||
public FileSummaryPersister(Path file) {this.file = file;}
|
||||
|
||||
@Override
|
||||
public void write(List<AppPerfResults> results) {
|
||||
try (PrintStream out = new PrintStream(file.toFile())){
|
||||
new PrintStreamPersister(out).write(results);
|
||||
} catch (FileNotFoundException e) {
|
||||
throw new RuntimeException("Error opening output file for results", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright The OpenTelemetry Authors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package io.opentelemetry.results;
|
||||
|
||||
import io.opentelemetry.config.TestConfig;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
|
||||
public class MainResultsPersister implements ResultsPersister {
|
||||
|
||||
private final TestConfig config;
|
||||
|
||||
public MainResultsPersister(TestConfig config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(List<AppPerfResults> results) {
|
||||
Path outputDir = Paths.get("results", config.getName());
|
||||
ensureCreated(outputDir);
|
||||
new ConsoleResultsPersister().write(results);
|
||||
new FileSummaryPersister(outputDir.resolve("summary.txt")).write(results);
|
||||
new CsvPersister(outputDir.resolve("results.csv")).write(results);
|
||||
}
|
||||
|
||||
private void ensureCreated(Path outputDir) {
|
||||
try {
|
||||
Files.createDirectories(outputDir);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Error creating output directory", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright The OpenTelemetry Authors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
package io.opentelemetry.results;
|
||||
|
||||
import io.opentelemetry.config.TestConfig;
|
||||
import java.io.PrintStream;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
class PrintStreamPersister implements ResultsPersister {
|
||||
|
||||
private final PrintStream out;
|
||||
|
||||
public PrintStreamPersister(PrintStream out) {this.out = out;}
|
||||
|
||||
@Override
|
||||
public void write(List<AppPerfResults> results) {
|
||||
List<AppPerfResults> sorted = results.stream()
|
||||
.sorted(Comparator.comparing(AppPerfResults::getAgentName)).collect(Collectors.toList());
|
||||
TestConfig config = sorted.stream().findFirst().get().config;
|
||||
out.println("----------------------------------------------------------");
|
||||
out.println(" Run at " + new Date());
|
||||
out.printf(" %s : %s\n", config.getName(), config.getDescription());
|
||||
out.printf(" %d users, %d iterations\n", config.getConcurrentConnections(), config.getTotalIterations());
|
||||
out.println("----------------------------------------------------------");
|
||||
|
||||
display(sorted, "Agent", appPerfResults -> appPerfResults.agent.getName());
|
||||
display(sorted, "Startup time (ms)", res -> String.valueOf(res.startupDurationMs));
|
||||
display(sorted, "Total allocated MB", res -> format(res.getTotalAllocatedMB()));
|
||||
display(sorted, "Heap (min)", res -> String.valueOf(res.heapUsed.min));
|
||||
display(sorted, "Heap (max)", res -> String.valueOf(res.heapUsed.max));
|
||||
display(sorted, "Thread switch rate",
|
||||
res -> String.valueOf(res.maxThreadContextSwitchRate));
|
||||
display(sorted, "GC time", res -> String.valueOf(res.totalGCTime));
|
||||
display(sorted, "Req. mean", res -> format(res.requestAvg));
|
||||
display(sorted, "Req. p95", res -> format(res.requestP95));
|
||||
display(sorted, "Iter. mean", res -> format(res.iterationAvg));
|
||||
display(sorted, "Iter. p95", res -> format(res.iterationP95));
|
||||
display(sorted, "Peak threads", res -> String.valueOf(res.peakThreadCount));
|
||||
}
|
||||
|
||||
private void display(List<AppPerfResults> results, String pref,
|
||||
Function<AppPerfResults, String> vs) {
|
||||
out.printf("%-20s: ", pref);
|
||||
results.forEach(result -> {
|
||||
out.printf("%17s", vs.apply(result));
|
||||
});
|
||||
out.println();
|
||||
}
|
||||
|
||||
private String format(double d) {
|
||||
return String.format("%.2f", d);
|
||||
}
|
||||
|
||||
}
|
||||
Loading…
Reference in New Issue