Package org.apache.mahout.common

Examples of org.apache.mahout.common.Parameters


      if (cmdLine.hasOption(helpOpt)) {
        CommandLineUtil.printHelp(group);
        return;
      }
     
      Parameters params = new Parameters();
     
      if (cmdLine.hasOption(minSupportOpt)) {
        String minSupportString = (String) cmdLine.getValue(minSupportOpt);
        params.set("minSupport", minSupportString);
      }
      if (cmdLine.hasOption(maxHeapSizeOpt)) {
        String maxHeapSizeString = (String) cmdLine.getValue(maxHeapSizeOpt);
        params.set("maxHeapSize", maxHeapSizeString);
      }
      if (cmdLine.hasOption(numGroupsOpt)) {
        String numGroupsString = (String) cmdLine.getValue(numGroupsOpt);
        params.set("numGroups", numGroupsString);
      }
     
      if (cmdLine.hasOption(treeCacheOpt)) {
        String numTreeCacheString = (String) cmdLine.getValue(treeCacheOpt);
        params.set("treeCacheSize", numTreeCacheString);
      }
     
      if (cmdLine.hasOption(recordSplitterOpt)) {
        String patternString = (String) cmdLine.getValue(recordSplitterOpt);
        params.set("splitPattern", patternString);
      }
     
      String encoding = "UTF-8";
      if (cmdLine.hasOption(encodingOpt)) {
        encoding = (String) cmdLine.getValue(encodingOpt);
      }
      params.set("encoding", encoding);
      Path inputDir = new Path(cmdLine.getValue(inputDirOpt).toString());
      Path outputDir = new Path(cmdLine.getValue(outputOpt).toString());
     
      params.set("input", inputDir.toString());
      params.set("output", outputDir.toString());
     
      String classificationMethod = (String) cmdLine.getValue(methodOpt);
      if ("sequential".equalsIgnoreCase(classificationMethod)) {
        runFPGrowth(params);
      } else if ("mapreduce".equalsIgnoreCase(classificationMethod)) {
View Full Code Here


 
  @Override
  protected void setup(Context context) throws IOException, InterruptedException {
   
    super.setup(context);
    Parameters params = new Parameters(context.getConfiguration().get(PFPGrowth.PFP_PARAMETERS, ""));
   
    int i = 0;
    for (Pair<String,Long> e : PFPGrowth.deserializeList(params, PFPGrowth.F_LIST, context.getConfiguration())) {
      featureReverseMap.add(e.getFirst());
      fMap.put(e.getFirst(), i++);
     
    }
   
    Map<String,Long> gList = PFPGrowth.deserializeMap(params, PFPGrowth.G_LIST, context.getConfiguration());
   
    for (Entry<String,Long> entry : gList.entrySet()) {
      IntArrayList groupList = groupFeatures.get(entry.getValue());
      Integer itemInteger = fMap.get(entry.getKey());
      if (groupList != null) {
        groupList.add(itemInteger);
      } else {
        groupList = new IntArrayList();
        groupList.add(itemInteger);
        groupFeatures.put(entry.getValue(), groupList);
      }
     
    }
    maxHeapSize = Integer.valueOf(params.get(PFPGrowth.MAX_HEAPSIZE, "50"));
    minSupport = Integer.valueOf(params.get(PFPGrowth.MIN_SUPPORT, "3"));
  }
View Full Code Here

  }
 
  @Override
  protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Parameters params = new Parameters(context.getConfiguration().get(PFPGrowth.PFP_PARAMETERS, ""));
   
    OpenObjectIntHashMap<String> fMap = new OpenObjectIntHashMap<String>();
    int i = 0;
    for (Pair<String,Long> e : PFPGrowth.deserializeList(params, PFPGrowth.F_LIST, context.getConfiguration())) {
      fMap.put(e.getFirst(), i++);
View Full Code Here

  }
 
  @Override
  protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Parameters params = new Parameters(context.getConfiguration().get("pfp.parameters", ""));
    maxHeapSize = Integer.valueOf(params.get("maxHeapSize", "50"));
   
  }
View Full Code Here

      sigmaJSigmaK = stringifier.fromString(sigmaJSigmaKString);
     
      String vocabCountString = job.get("cnaivebayes.vocabCount", stringifier.toString(vocabCount));
      vocabCount = stringifier.fromString(vocabCountString);
     
      Parameters params = new Parameters(job.get("bayes.parameters", ""));
      alphaI = Double.valueOf(params.get("alpha_i", "1.0"));
     
    } catch (IOException ex) {
      log.warn(ex.toString(), ex);
    }
  }
View Full Code Here

  }
 
  @Override
  protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Parameters params = new Parameters(context.getConfiguration().get(PFPGrowth.PFP_PARAMETERS, ""));
   
    int i = 0;
    for (Pair<String,Long> e : PFPGrowth.deserializeList(params, PFPGrowth.F_LIST, context.getConfiguration())) {
      fMap.put(e.getFirst(), i++);
    }
   
    splitter = Pattern.compile(params.get(PFPGrowth.SPLIT_PATTERN, PFPGrowth.SPLITTER.toString()));
   
  }
View Full Code Here

  }
 
  @Override
  protected void setup(Context context) throws IOException, InterruptedException {
    super.setup(context);
    Parameters params = new Parameters(context.getConfiguration().get(PFPGrowth.PFP_PARAMETERS, ""));
    splitter = Pattern.compile(params.get(PFPGrowth.SPLIT_PATTERN, PFPGrowth.SPLITTER.toString()));
  }
View Full Code Here

 
  @Override
  protected void setup(Context context) throws IOException, InterruptedException {
   
    super.setup(context);
    Parameters params = new Parameters(context.getConfiguration().get(PFPGrowth.PFP_PARAMETERS, ""));
   
    for (Pair<String,Long> e : PFPGrowth.readFList(context.getConfiguration())) {
      featureReverseMap.add(e.getFirst());
      freqList.add(e.getSecond());
    }
   
    maxHeapSize = Integer.valueOf(params.get(PFPGrowth.MAX_HEAP_SIZE, "50"));
    minSupport = Integer.valueOf(params.get(PFPGrowth.MIN_SUPPORT, "3"));

    maxPerGroup = params.getInt(PFPGrowth.MAX_PER_GROUP, 0);
    numFeatures = featureReverseMap.size();
    useFP2 = "true".equals(params.get(PFPGrowth.USE_FPG2));
  }
View Full Code Here

    int i = 0;
    for (Pair<String,Long> e : PFPGrowth.readFList(context.getConfiguration())) {
      fMap.put(e.getFirst(), i++);
    }
   
    Parameters params =
      new Parameters(context.getConfiguration().get(PFPGrowth.PFP_PARAMETERS, ""));

    splitter = Pattern.compile(params.get(PFPGrowth.SPLIT_PATTERN,
                                          PFPGrowth.SPLITTER.toString()));
   
    maxPerGroup = params.getInt(PFPGrowth.MAX_PER_GROUP, 0);
  }
View Full Code Here

    if (parseArguments(args) == null) {
      return -1;
    }

    Parameters params = new Parameters();

    if (hasOption("minSupport")) {
      String minSupportString = getOption("minSupport");
      params.set("minSupport", minSupportString);
    }
    if (hasOption("maxHeapSize")) {
      String maxHeapSizeString = getOption("maxHeapSize");
      params.set("maxHeapSize", maxHeapSizeString);
    }
    if (hasOption("numGroups")) {
      String numGroupsString = getOption("numGroups");
      params.set("numGroups", numGroupsString);
    }

    if (hasOption("numTreeCacheEntries")) {
      String numTreeCacheString = getOption("numTreeCacheEntries");
      params.set("treeCacheSize", numTreeCacheString);
    }

    if (hasOption("splitterPattern")) {
      String patternString = getOption("splitterPattern");
      params.set("splitPattern", patternString);
    }

    String encoding = "UTF-8";
    if (hasOption("encoding")) {
      encoding = getOption("encoding");
    }
    params.set("encoding", encoding);

    if (hasOption("useFPG2")) {
      params.set(PFPGrowth.USE_FPG2, "true");
    }

    Path inputDir = getInputPath();
    Path outputDir = getOutputPath();

    params.set("input", inputDir.toString());
    params.set("output", outputDir.toString());

    String classificationMethod = getOption("method");
    if ("sequential".equalsIgnoreCase(classificationMethod)) {
      runFPGrowth(params);
    } else if ("mapreduce".equalsIgnoreCase(classificationMethod)) {
View Full Code Here

TOP

Related Classes of org.apache.mahout.common.Parameters

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.