aboutsummaryrefslogtreecommitdiff
path: root/framework/org/linaro/bench/RunBench.java
diff options
context:
space:
mode:
Diffstat (limited to 'framework/org/linaro/bench/RunBench.java')
-rw-r--r--framework/org/linaro/bench/RunBench.java124
1 files changed, 56 insertions, 68 deletions
diff --git a/framework/org/linaro/bench/RunBench.java b/framework/org/linaro/bench/RunBench.java
index 2dae5da..3da4013 100644
--- a/framework/org/linaro/bench/RunBench.java
+++ b/framework/org/linaro/bench/RunBench.java
@@ -22,36 +22,18 @@ import java.util.List;
import java.util.concurrent.TimeUnit;
public class RunBench {
- // Minimum valid calibration time.
- public static final long DEFAULT_CALIBRATION_MIN_TIME_NS =
- TimeUnit.NANOSECONDS.convert(50, TimeUnit.MILLISECONDS);
- // The target benchmark running time.
- public static final long DEFAULT_BENCH_TARGET_RUN_TIME_NS =
- TimeUnit.NANOSECONDS.convert(400, TimeUnit.MILLISECONDS);
-
private SimpleLogger log;
- private long calibrationMinTimeNs;
- private long benchmarkTargetRunTimeNs;
public RunBench() {
this.log = SimpleLogger.getInstance();
- calibrationMinTimeNs = DEFAULT_CALIBRATION_MIN_TIME_NS;
- benchmarkTargetRunTimeNs = DEFAULT_BENCH_TARGET_RUN_TIME_NS;
}
public void setLogLevel(SimpleLogger.LogLevel level) {
- this.log = SimpleLogger.getInstance();
- log.setLogLevel(level);
+ this.log.setLogLevel(level);
}
- public int runBenchSet(String target, boolean verify) {
- if (target == null) {
- return 1;
- }
-
+ public int runBenchSet(Benchmark benchmark, boolean verify) {
try {
- Benchmark benchmark = new Benchmark(target, calibrationMinTimeNs,
- benchmarkTargetRunTimeNs);
Benchmark.Result[] results = benchmark.run();
int verifyFailures = 0;
if (verify) {
@@ -72,78 +54,84 @@ public class RunBench {
}
public static final String helpMessage =
- "Usage: java org.linaro.bench.RunBench [OPTIONS] [Benchmark...]\n"
+ "Usage: java org.linaro.bench.RunBench [OPTIONS] [Benchmark_name|Benchmark_specification]*\n"
+ + "\n"
+ + "Benchmark_specification: <benchmark_class_name>[:<benchmark_method>:<iterations>]+\n"
+ "OPTIONS:\n"
+ "\t--help Print this error message.\n"
+ "\t--verbose Be verbose.\n"
+ "\t--debug Be more verbose than the verbose mode.\n"
+ "\t--list_benchmarks List available benchmarks and exit.\n"
/* TODO: Add a `--list_sub_benchmarks` option. */
- + "\t--benchmark_run_time <time in ms>\n"
+ + "\t--target_running_time <time in ms>\n"
+ "\t Set the target running time for benchmarks.\n"
+ "\t (default: "
- + TimeUnit.MILLISECONDS.convert(DEFAULT_BENCH_TARGET_RUN_TIME_NS, TimeUnit.NANOSECONDS)
+ + Benchmark.DEFAULT_TARGET_RUNNING_TIME_MS
+ ")\n"
- + "\t--calibration_min_time <time in ms>\n"
- + "\t Set the minimum running time for benchmark calibration.\n"
- + "\t (default: "
- + TimeUnit.MILLISECONDS.convert(DEFAULT_CALIBRATION_MIN_TIME_NS, TimeUnit.NANOSECONDS)
- + ")\n";
+ + "\t--calibration_time <time in ms>\n"
+ + "\t Set the time for benchmark calibration.\n"
+ + "\t If it is not provided the time is calculated based on the target\n"
+ + "\t running time. With the target running time "
+ + "\t "
+ + TimeUnit.NANOSECONDS.convert(Benchmark.getTargetRunningTimeNs(), TimeUnit.MILLISECONDS)
+ + "\n"
+ + "\t it is:\n"
+ + "\t "
+ + TimeUnit.NANOSECONDS.convert(Benchmark.calculateCalibrationTimeNs(), TimeUnit.MILLISECONDS)
+ + ".\n";
public int parseCmdlineAndRun(String[] args) {
int errors = 0;
- String subtest = null;
boolean verify = true; // Verify all benchmark results by default.
- List<String> benchmarks = new ArrayList<String>();
+ List<Benchmark> benchmarks = new ArrayList<Benchmark>();
- for (int argIndex = 0; argIndex < args.length; argIndex++) {
- if (args[argIndex].startsWith("--")) {
- String option = args[argIndex].substring(2);
- if (option.equals("help")) {
- System.out.println(helpMessage);
- System.exit(0);
- } else if (option.equals("verbose")) {
- setLogLevel(SimpleLogger.LogLevel.INFO);
- } else if (option.equals("debug")) {
- setLogLevel(SimpleLogger.LogLevel.DEBUG);
- } else if (option.equals("list_benchmarks")) {
- for (int i = 0; i < BenchmarkList.benchmarkList.length; i++) {
- System.out.println(BenchmarkList.benchmarkList[i]);
- }
- System.exit(0);
- } else if (option.equals("benchmark_run_time")) {
- argIndex++;
- if (argIndex < args.length) {
- this.benchmarkTargetRunTimeNs =
- TimeUnit.NANOSECONDS.convert(Long.valueOf(args[argIndex]), TimeUnit.MILLISECONDS);
- } else {
- log.fatal("Require time.");
- }
- } else if (option.equals("calibration_min_time")) {
- argIndex++;
- if (argIndex < args.length) {
- this.calibrationMinTimeNs =
- TimeUnit.NANOSECONDS.convert(Long.valueOf(args[argIndex]), TimeUnit.MILLISECONDS);
- } else {
- log.fatal("Require time.");
- }
- } else if (option.equals("noverify")) {
- verify = false;
+ int argIndex = 0;
+ for (argIndex = 0; argIndex < args.length && args[argIndex].startsWith("--"); ++argIndex) {
+ String option = args[argIndex].substring(2);
+ if (option.equals("help")) {
+ System.out.println(helpMessage);
+ System.exit(0);
+ } else if (option.equals("verbose")) {
+ setLogLevel(SimpleLogger.LogLevel.INFO);
+ } else if (option.equals("debug")) {
+ setLogLevel(SimpleLogger.LogLevel.DEBUG);
+ } else if (option.equals("list_benchmarks")) {
+ for (int i = 0; i < BenchmarkList.benchmarkList.length; i++) {
+ System.out.println(BenchmarkList.benchmarkList[i]);
+ }
+ System.exit(0);
+ } else if (option.equals("target_running_time")) {
+ argIndex++;
+ if (argIndex < args.length) {
+ Benchmark.setTargetRunningTime(Long.valueOf(args[argIndex]));
+ } else {
+ log.fatal("Require time.");
+ }
+ } else if (option.equals("calibration_time")) {
+ argIndex++;
+ if (argIndex < args.length) {
+ Benchmark.setCalibrationTime(Long.valueOf(args[argIndex]));
} else {
- log.error("Unknown option `--" + option + "`.");
- System.out.println(helpMessage);
- System.exit(1);
+ log.fatal("Require time.");
}
+ } else if (option.equals("noverify")) {
+ verify = false;
} else {
- benchmarks.add(args[argIndex]);
+ log.error("Unknown option `--" + option + "`.");
+ System.out.println(helpMessage);
+ System.exit(1);
}
}
+ for (; argIndex < args.length; ++argIndex) {
+ benchmarks.add(Benchmark.fromString(args[argIndex]));
+ }
+
if (benchmarks.size() == 0) {
// No benchmarks were specified on the command line. Run all
// benchmarks available.
for (int i = 0; i < BenchmarkList.benchmarkList.length; i++) {
- benchmarks.add(BenchmarkList.benchmarkList[i]);
+ benchmarks.add(Benchmark.fromString(BenchmarkList.benchmarkList[i]));
}
}
// Run the benchmarks.