Package org.apache.mahout.common.iterator

Examples of org.apache.mahout.common.iterator.FileLineIterable


  private final FastByIDMap<TrackData> trackData;

  TrackItemSimilarity(File dataFileDirectory) throws IOException {
    trackData = new FastByIDMap<TrackData>();
    for (String line : new FileLineIterable(KDDCupDataModel.getTrackFile(dataFileDirectory))) {
      TrackData trackDatum = new TrackData(line);
      trackData.put(trackDatum.getTrackID(), trackDatum);
    }
  }
View Full Code Here


    File outputDir2 = getTestTempDir("frequentpatterns2");
    paramsImpl2.set(PFPGrowth.OUTPUT, outputDir2.getAbsolutePath());

    Writer writer = Files.newWriter(input, Charsets.UTF_8);
    try {
      StringRecordIterator it = new StringRecordIterator(new FileLineIterable(Resources.getResource(
        "retail.dat").openStream()), "\\s+");
      Collection<List<String>> transactions = Lists.newArrayList();
     
      while (it.hasNext()) {
        Pair<List<String>,Long> next = it.next();
View Full Code Here

  @Test
  public void testSpecificCaseFromRetailDataMinSup500() throws IOException {
    FPGrowthObj<String> fp = new FPGrowthObj<String>();
   
    StringRecordIterator it = new StringRecordIterator(new FileLineIterable(Resources.getResource(
      "retail.dat").openStream()), "\\s+");
    int pattern_41_36_39 = 0;
    while (it.hasNext()) {
      Pair<List<String>,Long> next = it.next();
      List<String> items = next.getFirst();
      if (items.contains("41") && items.contains("36") && items.contains("39")) {
        pattern_41_36_39++;
      }
    }
   
    final Map<Set<String>,Long> results = Maps.newHashMap();
   
    Set<String> returnableFeatures = Sets.newHashSet();
    returnableFeatures.add("41");
    returnableFeatures.add("36");
    returnableFeatures.add("39");
   
    fp.generateTopKFrequentPatterns(
      new StringRecordIterator(new FileLineIterable(Resources.getResource("retail.dat").openStream()), "\\s+"),

      fp.generateFList(new StringRecordIterator(new FileLineIterable(Resources.getResource("retail.dat")
          .openStream()), "\\s+"), 500), 500, 1000, returnableFeatures,
      new OutputCollector<String,List<Pair<List<String>,Long>>>() {
       
        @Override
        public void collect(String key, List<Pair<List<String>,Long>> value) {
View Full Code Here

    File input = new File(inputDir, "synth_test.txt");
    params.set(PFPGrowth.INPUT, input.getAbsolutePath());
    params.set(PFPGrowth.OUTPUT, outputDir.getAbsolutePath());
    Writer writer = Files.newWriter(input, Charsets.UTF_8);
    try {
      StringRecordIterator it = new StringRecordIterator(new FileLineIterable(Resources.getResource(
        "FPGsynth.dat").openStream()), "\\s+");
      Collection<List<String>> transactions = Lists.newArrayList();
     
      while (it.hasNext()) {
        Pair<List<String>,Long> next = it.next();
View Full Code Here

    final Map<Set<String>,Long> seqResult = Maps.newHashMap();
   
    FPGrowthObj<String> fpSeq = new FPGrowthObj<String>();
    fpSeq.generateTopKFrequentPatterns(
      new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename).openStream()), "\\s+"),

      fpSeq.generateFList(new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename)
           .openStream()), "\\s+"), minSupport), minSupport, 1000000,
      null,
      new OutputCollector<String,List<Pair<List<String>,Long>>>() {
       
        @Override
View Full Code Here

             + "org.apache.hadoop.io.serializer.WritableSerialization");
    // Dont ever forget this. People should keep track of how hadoop conf
    // parameters can make or break a piece of code
   
    Set<String> categories = Sets.newHashSet();
    for (String line : new FileLineIterable(new File(catFile))) {
      categories.add(line.trim().toLowerCase(Locale.ENGLISH));
    }
   
    Stringifier<Set<String>> setStringifier =
        new DefaultStringifier<Set<String>>(conf, GenericsUtil.getClass(categories));
View Full Code Here

    FPGrowthObj<String> fp = new FPGrowthObj<String>();
   
    String inputFilename = "FPGsynth.dat";

    StringRecordIterator it =
        new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename).openStream()), "\\s+");
    int patternCnt_10_13_1669 = 0;
    int patternCnt_10_13 = 0;
    while (it.hasNext()) {
      Pair<List<String>,Long> next = it.next();
      List<String> items = next.getFirst();
      if (items.contains("10") && items.contains("13")) {
        patternCnt_10_13++;
        if (items.contains("1669")) {
          patternCnt_10_13_1669++;
        }
      }
    }

    int minSupport = 50;
    if (patternCnt_10_13_1669 < minSupport) {
      throw new IllegalStateException("the test is broken or data is missing ("
                                          + patternCnt_10_13_1669 + ", "
                                          + patternCnt_10_13 + ')');
    }

    final Map<Set<String>,Long> results = Maps.newHashMap();
   
    Set<String> features_10_13 = Sets.newHashSet();
    features_10_13.add("10");
    features_10_13.add("13");

    Set<String> returnableFeatures = Sets.newHashSet();
    returnableFeatures.add("10");
    returnableFeatures.add("13");
    returnableFeatures.add("1669");
   
    fp.generateTopKFrequentPatterns(new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename).openStream()), "\\s+"),

                                    fp.generateFList(new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename)
                                                                                                   .openStream()), "\\s+"), minSupport), minSupport, 100000,
                                    returnableFeatures,
                                    new OutputCollector<String,List<Pair<List<String>,Long>>>() {
       
                                      @Override
View Full Code Here

    final Map<Set<String>,Long> results1 = Maps.newHashMap();

    String inputFilename = "FPGsynth.dat";
    int minSupport = 100;
    fp1.generateTopKFrequentPatterns(new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename).openStream()), "\\s+"),

                                     fp1.generateFList(new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename)
                                                                                                     .openStream()), "\\s+"), minSupport), minSupport, 1000000,
                                     returnableFeatures,
                                     new OutputCollector<String,List<Pair<List<String>,Long>>>() {
       
                                       @Override
                                         public void collect(String key, List<Pair<List<String>,Long>> value) {
         
                                         for (Pair<List<String>,Long> v : value) {
                                           List<String> l = v.getFirst();
                                           results1.put(Sets.newHashSet(l), v.getSecond());
                                           System.out.println("found pat ["+v.getSecond()+"]: "+ v.getFirst());
                                         }
                                       }
       
                                     }, new StatusUpdater() {
       
                                         @Override
                                           public void update(String status) {}
                                       });

    FPGrowthObj<String> fp2 = new FPGrowthObj<String>();
    final Map<Set<String>,Long> initialResults2 = Maps.newHashMap();
    fp2.generateTopKFrequentPatterns(new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename).openStream()), "\\s+"),

                                     fp2.generateFList(new StringRecordIterator(new FileLineIterable(Resources.getResource(inputFilename)
                                                                                                     .openStream()), "\\s+"), minSupport), minSupport, 1000000,
                                     Sets.<String>newHashSet(),
                                     new OutputCollector<String,List<Pair<List<String>,Long>>>() {
       
                                       @Override
View Full Code Here

     assertEquals(3.5, recommendedItem.getValue(), 0.05);
   }

  static Map<Pair<Long,Long>, Double> readSimilarities(File file) throws IOException {
    Map<Pair<Long,Long>, Double> similarities = Maps.newHashMap();
    for (String line : new FileLineIterable(file)) {
      String[] parts = line.split("\t");
      similarities.put(new Pair<Long,Long>(Long.parseLong(parts[0]), Long.parseLong(parts[1])),
          Double.parseDouble(parts[2]));
    }
    return similarities;
View Full Code Here

    return similarities;
  }

  static Map<Long,List<RecommendedItem>> readRecommendations(File file) throws IOException {
    Map<Long,List<RecommendedItem>> recommendations = Maps.newHashMap();
    for (String line : new FileLineIterable(file)) {

      String[] keyValue = line.split("\t");
      long userID = Long.parseLong(keyValue[0]);
      String[] tokens = keyValue[1].replaceAll("\\[", "")
          .replaceAll("\\]", "").split(",");
View Full Code Here

TOP

Related Classes of org.apache.mahout.common.iterator.FileLineIterable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.