Package org.apache.hadoop.hdfs

Examples of org.apache.hadoop.hdfs.CorruptFileBlockIterator


  }

  @Override
  public RemoteIterator<Path> listCorruptFileBlocks(Path path)
    throws IOException {
    return new CorruptFileBlockIterator(dfs, path);
  }
View Full Code Here


      LOG.info("Namenode has bad files. " + badFiles.size());
      assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting " +
          maxCorruptFileBlocks + ".",
          badFiles.size() == maxCorruptFileBlocks);

      CorruptFileBlockIterator iter = (CorruptFileBlockIterator)
        fs.listCorruptFileBlocks(new Path("/srcdat2"));
      int corruptPaths = countPaths(iter);
      assertTrue("Expected more than " + maxCorruptFileBlocks +
                 " corrupt file blocks but got " + corruptPaths,
                 corruptPaths > maxCorruptFileBlocks);
      assertTrue("Iterator should have made more than 1 call but made " +
                 iter.getCallsMade(),
                 iter.getCallsMade() > 1);

      util.cleanup(fs, "/srcdat2");
    } finally {
      if (cluster != null) { cluster.shutdown(); }
    }
View Full Code Here

   * {@inheritDoc}
   */
  @Override
  public RemoteIterator<Path> listCorruptFileBlocks(Path path)
    throws IOException {
    return new CorruptFileBlockIterator(dfs, path);
  }
View Full Code Here

      LOG.info("Namenode has bad files. " + badFiles.size());
      assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting " +
          maxCorruptFileBlocks + ".",
          badFiles.size() == maxCorruptFileBlocks);

      CorruptFileBlockIterator iter = (CorruptFileBlockIterator)
        fs.listCorruptFileBlocks(new Path("/srcdat2"));
      int corruptPaths = countPaths(iter);
      assertTrue("Expected more than " + maxCorruptFileBlocks +
                 " corrupt file blocks but got " + corruptPaths,
                 corruptPaths > maxCorruptFileBlocks);
      assertTrue("Iterator should have made more than 1 call but made " +
                 iter.getCallsMade(),
                 iter.getCallsMade() > 1);

      util.cleanup(fs, "/srcdat2");
    } finally {
      if (cluster != null) { cluster.shutdown(); }
    }
View Full Code Here

  }

  @Override
  public RemoteIterator<Path> listCorruptFileBlocks(Path path)
    throws IOException {
    return new CorruptFileBlockIterator(dfs, path);
  }
View Full Code Here

      LOG.info("Namenode has bad files. " + badFiles.size());
      assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting " +
          maxCorruptFileBlocks + ".",
          badFiles.size() == maxCorruptFileBlocks);

      CorruptFileBlockIterator iter = (CorruptFileBlockIterator)
        fs.listCorruptFileBlocks(new Path("/srcdat2"));
      int corruptPaths = countPaths(iter);
      assertTrue("Expected more than " + maxCorruptFileBlocks +
                 " corrupt file blocks but got " + corruptPaths,
                 corruptPaths > maxCorruptFileBlocks);
      assertTrue("Iterator should have made more than 1 call but made " +
                 iter.getCallsMade(),
                 iter.getCallsMade() > 1);

      util.cleanup(fs, "/srcdat2");
    } finally {
      if (cluster != null) { cluster.shutdown(); }
    }
View Full Code Here

   * {@inheritDoc}
   */
  @Override
  public RemoteIterator<Path> listCorruptFileBlocks(Path path)
    throws IOException {
    return new CorruptFileBlockIterator(dfs, path);
  }
View Full Code Here

      LOG.info("Namenode has bad files. " + badFiles.size());
      assertTrue("Namenode has " + badFiles.size() + " bad files. Expecting " +
          maxCorruptFileBlocks + ".",
          badFiles.size() == maxCorruptFileBlocks);

      CorruptFileBlockIterator iter = (CorruptFileBlockIterator)
        fs.listCorruptFileBlocks(new Path("/srcdat2"));
      int corruptPaths = countPaths(iter);
      assertTrue("Expected more than " + maxCorruptFileBlocks +
                 " corrupt file blocks but got " + corruptPaths,
                 corruptPaths > maxCorruptFileBlocks);
      assertTrue("Iterator should have made more than 1 call but made " +
                 iter.getCallsMade(),
                 iter.getCallsMade() > 1);

      util.cleanup(fs, "/srcdat2");
    } finally {
      if (cluster != null) { cluster.shutdown(); }
    }
View Full Code Here

  }

  @Override
  public RemoteIterator<Path> listCorruptFileBlocks(Path path)
    throws IOException {
    return new CorruptFileBlockIterator(dfs, path);
  }
View Full Code Here

  }

  @Override
  public RemoteIterator<Path> listCorruptFileBlocks(Path path)
    throws IOException {
    return new CorruptFileBlockIterator(dfs, path);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.CorruptFileBlockIterator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.