Examples of checkFiles()


Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

      util.createFiles(fs, TopDir, (short)3);
      util.waitReplication(fs, TopDir, (short)3);
      util.checkFiles(fs, TopDir);
      cluster.restartDataNodes();
      cluster.waitActive();
      util.checkFiles(fs, TopDir);
    } finally {
      cluster.shutdown();
    }
  }
 
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

      // Here we restart the MiniDFScluster without formatting namenode
      cluster = new MiniDFSCluster.Builder(conf).format(false).numDataNodes(4).build();
      fsn = cluster.getNamesystem();
      FileSystem fs = cluster.getFileSystem();
      assertTrue("Filesystem corrupted after restart.",
                 files.checkFiles(fs, dir));

      final FileStatus newrootstatus = fs.getFileStatus(rootpath);
      assertEquals(rootmtime, newrootstatus.getModificationTime());
      assertEquals(rootstatus.getOwner() + "_XXX", newrootstatus.getOwner());
      assertEquals(rootstatus.getGroup(), newrootstatus.getGroup());
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

    nsInfo = cluster.getNameNode().versionRequest();

    for (DataNode dn : cluster.getDataNodes()) {
      assertEquals(nsInfo.getCTime(), dn.getCTime(nsInfo.getNamespaceID()));
    }
    assertTrue(util.checkFiles(cluster.getFileSystem(), "/"));
  }

  // Create a file with RBWs
  private void createFile(FileSystem fs, FSDataOutputStream out,
      String fileName, int fileLen) throws IOException {
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

      // test finalized replicas
      final String TopDir = "/test";
      DFSTestUtil util = new DFSTestUtil("TestCrcCorruption", 2, 3, 8 * 1024);
      util.createFiles(fs, TopDir, (short) 3);
      util.waitReplication(fs, TopDir, (short) 3);
      util.checkFiles(fs, TopDir);
      cluster.restartDataNodes();
      cluster.waitActive();
      util.checkFiles(fs, TopDir);
    } finally {
      cluster.shutdown();
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

      util.createFiles(fs, TopDir, (short) 3);
      util.waitReplication(fs, TopDir, (short) 3);
      util.checkFiles(fs, TopDir);
      cluster.restartDataNodes();
      cluster.waitActive();
      util.checkFiles(fs, TopDir);
    } finally {
      cluster.shutdown();
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

          LOG.info("Deliberately corrupting file " + blocks[idx].getName() +
              " at offset " + position + " length " + length);

          // read all files to trigger detection of corrupted replica
          try {
            util.checkFiles(fs, "/srcdat10");
            // we should get a ChecksumException or a BlockMissingException
          } catch (BlockMissingException e) {
            System.out.println("Received BlockMissingException as expected.");
          } catch (ChecksumException e) {
            System.out.println("Received ChecksumException as expected.");
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

          LOG.info("Deliberately corrupting file " + blocks[idx].getName() +
              " at offset " + position + " length " + length);

          // read all files to trigger detection of corrupted replica
          try {
            util.checkFiles(fs, "/srcdat10");
          } catch (BlockMissingException e) {
            System.out.println("Received BlockMissingException as expected.");
          } catch (ChecksumException e) {
            System.out.println("Received ChecksumException as expected.");
          } catch (IOException e) {
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

      }

     
      // read all files to trigger detection of corrupted replica
      try {
        util.checkFiles(fs, "/srcdat10");
      } catch (BlockMissingException e) {
        System.out.println("Received BlockMissingException as expected.");
      } catch (ChecksumException e) {
        System.out.println("Received ChecksumException as expected.");
      } catch (IOException e) {
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

          LOG.info("Deliberately corrupting file " + blocks[idx].getName() +
              " at offset " + position + " length " + length);

          // read all files to trigger detection of corrupted replica
          try {
            util.checkFiles(fs, "/srcdat10");
          } catch (BlockMissingException e) {
            System.out.println("Received BlockMissingException as expected.");
          } catch (ChecksumException e) {
            System.out.println("Received ChecksumException as expected.");
          } catch (IOException e) {
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSTestUtil.checkFiles()

      } catch (InterruptedException ignore) {
      }

      // read all files to trigger detection of corrupted replica
      try {
        util.checkFiles(fs, "/srcdat10");
      } catch (BlockMissingException e) {
        System.out.println("Received BlockMissingException as expected.");
      } catch (ChecksumException e) {
        System.out.println("Received ChecksumException as expected.");
      } catch (IOException e) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.