outputOpt).withOption(maxHeapSizeOpt).withOption(numGroupsOpt).withOption(methodOpt).withOption(
encodingOpt).withOption(helpOpt).withOption(treeCacheOpt).withOption(recordSplitterOpt).create();
try {
Parser parser = new Parser();
parser.setGroup(group);
CommandLine cmdLine = parser.parse(args);
if (cmdLine.hasOption(helpOpt)) {
CommandLineUtil.printHelp(group);
return;
}
Parameters params = new Parameters();
if (cmdLine.hasOption(minSupportOpt)) {
String minSupportString = (String) cmdLine.getValue(minSupportOpt);
params.set("minSupport", minSupportString);
}
if (cmdLine.hasOption(maxHeapSizeOpt)) {
String maxHeapSizeString = (String) cmdLine.getValue(maxHeapSizeOpt);
params.set("maxHeapSize", maxHeapSizeString);
}
if (cmdLine.hasOption(numGroupsOpt)) {
String numGroupsString = (String) cmdLine.getValue(numGroupsOpt);
params.set("numGroups", numGroupsString);
}
if (cmdLine.hasOption(treeCacheOpt)) {
String numTreeCacheString = (String) cmdLine.getValue(treeCacheOpt);
params.set("treeCacheSize", numTreeCacheString);
}
if (cmdLine.hasOption(recordSplitterOpt)) {
String patternString = (String) cmdLine.getValue(recordSplitterOpt);
params.set("splitPattern", patternString);
}
String encoding = "UTF-8";
if (cmdLine.hasOption(encodingOpt)) {
encoding = (String) cmdLine.getValue(encodingOpt);
}
params.set("encoding", encoding);
String inputDir = (String) cmdLine.getValue(inputDirOpt);
String outputDir = (String) cmdLine.getValue(outputOpt);
params.set("input", inputDir);
params.set("output", outputDir);
String classificationMethod = (String) cmdLine.getValue(methodOpt);
if (classificationMethod.equalsIgnoreCase("sequential")) {
runFPGrowth(params);
} else if (classificationMethod.equalsIgnoreCase("mapreduce")) {
HadoopUtil.overwriteOutput(outputDir);
PFPGrowth.runPFPGrowth(params);