return (endRunBenchmark - startRunBenchmark);
}
public static void main(String args[]) throws IOException {
// Logger.getRootLogger().removeAllAppenders();
OptionParser parser = new OptionParser();
parser.accepts(READS, "percentage of --ops-count to be reads; valid values [0-100]")
.withRequiredArg()
.describedAs("read-percent")
.ofType(Integer.class);
parser.accepts(WRITES, "percentage of --ops-count to be writes; valid values [0-100]")
.withRequiredArg()
.describedAs("write-percent")
.ofType(Integer.class);
parser.accepts(DELETES, "percentage of --ops-count to be deletes; valid values [0-100]")
.withRequiredArg()
.describedAs("delete-percent")
.ofType(Integer.class);
parser.accepts(MIXED, "percentage of --ops-count to be updates; valid values [0-100]")
.withRequiredArg()
.describedAs("update-percent")
.ofType(Integer.class);
parser.accepts(SAMPLE_SIZE,
"number of value samples to be obtained from the store for replay based on keys from request-file; 0 means no sample value replay. Default = 0")
.withRequiredArg()
.describedAs("sample-size")
.ofType(Integer.class);
parser.accepts(VERBOSE, "verbose");
parser.accepts(THREADS, "max number concurrent worker threads; Default = " + MAX_WORKERS)
.withRequiredArg()
.describedAs("num-threads")
.ofType(Integer.class);
parser.accepts(NUM_CONNECTIONS_PER_NODE,
"max number of connections to any node; Default = "
+ MAX_CONNECTIONS_PER_NODE)
.withRequiredArg()
.describedAs("num-connections-per-node")
.ofType(Integer.class);
parser.accepts(ITERATIONS, "number of times to repeat benchmark phase; Default = 1")
.withRequiredArg()
.describedAs("num-iter")
.ofType(Integer.class);
parser.accepts(VERIFY, "verify values read; runs only if warm-up phase is included");
parser.accepts(PERCENT_CACHED,
"percentage of requests to come from previously requested keys; valid values are in range [0..100]; 0 means caching disabled. Default = 0")
.withRequiredArg()
.describedAs("percent")
.ofType(Integer.class);
parser.accepts(START_KEY_INDEX, "key index to start warm-up phase from; Default = 0")
.withRequiredArg()
.describedAs("index")
.ofType(Integer.class);
parser.accepts(INTERVAL, "print status at interval seconds; Default = 0")
.withRequiredArg()
.describedAs("sec")
.ofType(Integer.class);
parser.accepts(IGNORE_NULLS, "ignore null values in results");
parser.accepts(PROP_FILE,
"file containing all the properties in key=value format; will override all other command line options specified")
.withRequiredArg()
.describedAs("prop-file");
parser.accepts(STORAGE_CONFIGURATION_CLASS,
"class of the storage engine configuration to use [e.g. voldemort.store.bdb.BdbStorageConfiguration]")
.withRequiredArg()
.describedAs("class-name");
parser.accepts(KEY_TYPE,
"for local tests; key type to support; [ " + IDENTITY_KEY_TYPE + " | "
+ JSONINT_KEY_TYPE + " | " + JSONSTRING_KEY_TYPE + "|"
+ STRING_KEY_TYPE + " <default> ]")
.withRequiredArg()
.describedAs("type");
parser.accepts(REQUEST_FILE,
"file with limited list of keys to be used during benchmark phase; Overrides "
+ RECORD_SELECTION).withRequiredArg();
parser.accepts(VALUE_SIZE,
"size in bytes for random value; used during warm-up phase and write operation of benchmark phase; Default = 1024")
.withRequiredArg()
.describedAs("bytes")
.ofType(Integer.class);
parser.accepts(RECORD_SELECTION,
"record selection distribution [ " + ZIPFIAN_RECORD_SELECTION + " | "
+ LATEST_RECORD_SELECTION + " | " + UNIFORM_RECORD_SELECTION
+ " <default> ]").withRequiredArg();
parser.accepts(TARGET_THROUGHPUT, "fix throughput")
.withRequiredArg()
.describedAs("ops/sec")
.ofType(Integer.class);
parser.accepts(RECORD_COUNT, "number of records inserted during warmup phase")
.withRequiredArg()
.describedAs("count")
.ofType(Integer.class);
parser.accepts(OPS_COUNT, "number of operations to do during benchmark phase")
.withRequiredArg()
.describedAs("count")
.ofType(Integer.class);
parser.accepts(URL, "for remote tests; url of remote server").withRequiredArg();
parser.accepts(STORE_NAME, "for remote tests; store name on the remote " + URL)
.withRequiredArg()
.describedAs("name");
parser.accepts(METRIC_TYPE,
"type of result metric [ " + HISTOGRAM_METRIC_TYPE + " | "
+ SUMMARY_METRIC_TYPE + " <default> ]").withRequiredArg();
parser.accepts(PLUGIN_CLASS,
"classname of implementation of WorkloadPlugin; used to run customized operations ")
.withRequiredArg()
.describedAs("class-name");
parser.accepts(CLIENT_ZONE_ID, "zone id for client; enables zone routing")
.withRequiredArg()
.describedAs("zone-id")
.ofType(Integer.class);
parser.accepts(HELP);
OptionSet options = parser.parse(args);
if(options.has(HELP)) {
parser.printHelpOn(System.out);
System.exit(0);
}
Props mainProps = null;
if(options.has(PROP_FILE)) {
String propFileDestination = (String) options.valueOf(PROP_FILE);
File propertyFile = new File(propFileDestination);
if(!propertyFile.exists()) {
printUsage(parser, "Property file does not exist");
}
try {
mainProps = new Props(propertyFile);
} catch(Exception e) {
printUsage(parser, "Unable to parse the property file");
}
} else {
mainProps = new Props();
if(options.has(REQUEST_FILE)) {
mainProps.put(REQUEST_FILE, (String) options.valueOf(REQUEST_FILE));
mainProps.put(RECORD_SELECTION, FILE_RECORD_SELECTION);
} else {
mainProps.put(RECORD_SELECTION,
CmdUtils.valueOf(options, RECORD_SELECTION, UNIFORM_RECORD_SELECTION));
}
if(options.has(RECORD_COUNT)) {
mainProps.put(RECORD_COUNT, (Integer) options.valueOf(RECORD_COUNT));
} else {
mainProps.put(RECORD_COUNT, 0);
}
if(!options.has(OPS_COUNT)) {
printUsage(parser, "Missing " + OPS_COUNT);
}
mainProps.put(OPS_COUNT, (Integer) options.valueOf(OPS_COUNT));
if(options.has(URL)) {
mainProps.put(URL, (String) options.valueOf(URL));
if(options.has(STORE_NAME)) {
mainProps.put(STORE_NAME, (String) options.valueOf(STORE_NAME));
} else {
printUsage(parser, "Missing store name");
}
} else {
mainProps.put(KEY_TYPE, CmdUtils.valueOf(options, KEY_TYPE, STRING_KEY_TYPE));
mainProps.put(STORAGE_CONFIGURATION_CLASS,
CmdUtils.valueOf(options,
STORAGE_CONFIGURATION_CLASS,
BdbStorageConfiguration.class.getName()));
}
mainProps.put(VERBOSE, getCmdBoolean(options, VERBOSE));
mainProps.put(VERIFY, getCmdBoolean(options, VERIFY));
mainProps.put(IGNORE_NULLS, getCmdBoolean(options, IGNORE_NULLS));
mainProps.put(CLIENT_ZONE_ID, CmdUtils.valueOf(options, CLIENT_ZONE_ID, -1));
mainProps.put(START_KEY_INDEX, CmdUtils.valueOf(options, START_KEY_INDEX, 0));
mainProps.put(VALUE_SIZE, CmdUtils.valueOf(options, VALUE_SIZE, 1024));
mainProps.put(ITERATIONS, CmdUtils.valueOf(options, ITERATIONS, 1));
mainProps.put(THREADS, CmdUtils.valueOf(options, THREADS, MAX_WORKERS));
mainProps.put(NUM_CONNECTIONS_PER_NODE, CmdUtils.valueOf(options,
NUM_CONNECTIONS_PER_NODE,
MAX_CONNECTIONS_PER_NODE));
mainProps.put(PERCENT_CACHED, CmdUtils.valueOf(options, PERCENT_CACHED, 0));
mainProps.put(INTERVAL, CmdUtils.valueOf(options, INTERVAL, 0));
mainProps.put(TARGET_THROUGHPUT, CmdUtils.valueOf(options, TARGET_THROUGHPUT, -1));
mainProps.put(METRIC_TYPE, CmdUtils.valueOf(options, METRIC_TYPE, SUMMARY_METRIC_TYPE));
mainProps.put(READS, CmdUtils.valueOf(options, READS, 0));
mainProps.put(WRITES, CmdUtils.valueOf(options, WRITES, 0));
mainProps.put(DELETES, CmdUtils.valueOf(options, DELETES, 0));
mainProps.put(MIXED, CmdUtils.valueOf(options, MIXED, 0));
mainProps.put(PLUGIN_CLASS, CmdUtils.valueOf(options, PLUGIN_CLASS, ""));
mainProps.put(SAMPLE_SIZE, CmdUtils.valueOf(options, SAMPLE_SIZE, 0));
}
// Start the benchmark
Benchmark benchmark = null;
try {
benchmark = new Benchmark();
benchmark.initialize(mainProps);
benchmark.warmUpAndRun();
benchmark.close();
} catch(Exception e) {
if(options.has(VERBOSE)) {
e.printStackTrace();
}
parser.printHelpOn(System.err);
System.exit(-1);
}
}