Package org.apache.hadoop.tools

Examples of org.apache.hadoop.tools.DistCp$DuplicationException


      cluster = new MiniDFSCluster(conf, 1, true, null);
      final FileSystem hdfs = cluster.getFileSystem();
      final String namenode = FileSystem.getDefaultUri(conf).toString();
      if (namenode.startsWith("hdfs://")) {
        MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-log",
                                         "/logs",
                                         namenode+"/srcdat",
                                         "file:///"+TEST_ROOT_DIR+"/destdat"});
        assertTrue("Source and destination directories do not match.",
View Full Code Here


      cluster = new MiniDFSCluster(conf, 2, true, null);
      final FileSystem hdfs = cluster.getFileSystem();
      final String namenode = hdfs.getUri().toString();
      if (namenode.startsWith("hdfs://")) {
        MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-p",
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/srcdat",
                                         namenode+"/destdat"});
        assertTrue("Source and destination directories do not match.",
                   checkFiles(hdfs, "/destdat", files));
        FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf);
        assertTrue("Log directory does not exist.",
                    fs.exists(new Path(namenode+"/logs")));

        FileStatus[] dchkpoint = getFileStatus(hdfs, "/destdat", files);
        final int nupdate = NFILES>>2;
        updateFiles(cluster.getFileSystem(), "/srcdat", files, nupdate);
        deldir(hdfs, "/logs");

        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-p",
                                         "-update",
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/srcdat",
                                         namenode+"/destdat"});
        assertTrue("Source and destination directories do not match.",
                   checkFiles(hdfs, "/destdat", files));
        assertTrue("Update failed to replicate all changes in src",
                 checkUpdate(hdfs, dchkpoint, "/destdat", files, nupdate));

        deldir(hdfs, "/logs");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-p",
                                         "-overwrite",
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/srcdat",
View Full Code Here

  /** copy files from local file system to local file system */
  public void testCopyFromLocalToLocal() throws Exception {
    Configuration conf = new Configuration();
    FileSystem localfs = FileSystem.get(LOCAL_FS, conf);
    MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
    ToolRunner.run(new DistCp(new Configuration()),
                           new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                                         "file:///"+TEST_ROOT_DIR+"/destdat"});
    assertTrue("Source and destination directories do not match.",
               checkFiles(localfs, TEST_ROOT_DIR+"/destdat", files));
    deldir(localfs, TEST_ROOT_DIR+"/destdat");
View Full Code Here

      cluster = new MiniDFSCluster(conf, 2, true, null);
      final FileSystem hdfs = cluster.getFileSystem();
      namenode = FileSystem.getDefaultUri(conf).toString();
      if (namenode.startsWith("hdfs://")) {
        MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/srcdat",
                                         namenode+"/destdat"});
        assertTrue("Source and destination directories do not match.",
View Full Code Here

      if (namenode.startsWith("hdfs://")) {
       
        FileSystem fs = FileSystem.get(URI.create(namenode), new Configuration());
        fs.mkdirs(new Path("/empty"));

        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/empty",
                                         namenode+"/dest"});
        fs = FileSystem.get(URI.create(namenode+"/destdat"), conf);
View Full Code Here

      cluster = new MiniDFSCluster(conf, 1, true, null);
      final FileSystem hdfs = cluster.getFileSystem();
      final String namenode = hdfs.getUri().toString();
      if (namenode.startsWith("hdfs://")) {
        MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-log",
                                         namenode+"/logs",
                                         "file:///"+TEST_ROOT_DIR+"/srcdat",
                                         namenode+"/destdat"});
        assertTrue("Source and destination directories do not match.",
View Full Code Here

      cluster = new MiniDFSCluster(conf, 1, true, null);
      final FileSystem hdfs = cluster.getFileSystem();
      final String namenode = FileSystem.getDefaultUri(conf).toString();
      if (namenode.startsWith("hdfs://")) {
        MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-log",
                                         "/logs",
                                         namenode+"/srcdat",
                                         "file:///"+TEST_ROOT_DIR+"/destdat"});
        assertTrue("Source and destination directories do not match.",
View Full Code Here

      cluster = new MiniDFSCluster(conf, 2, true, null);
      final FileSystem hdfs = cluster.getFileSystem();
      final String namenode = hdfs.getUri().toString();
      if (namenode.startsWith("hdfs://")) {
        MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-p",
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/srcdat",
                                         namenode+"/destdat"});
        assertTrue("Source and destination directories do not match.",
                   checkFiles(hdfs, "/destdat", files));
        FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf);
        assertTrue("Log directory does not exist.",
                    fs.exists(new Path(namenode+"/logs")));

        FileStatus[] dchkpoint = getFileStatus(hdfs, "/destdat", files);
        final int nupdate = NFILES>>2;
        updateFiles(cluster.getFileSystem(), "/srcdat", files, nupdate);
        deldir(hdfs, "/logs");

        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-prbugp", // no t to avoid preserving mod. times
                                         "-update",
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/srcdat",
                                         namenode+"/destdat"});
        assertTrue("Source and destination directories do not match.",
                   checkFiles(hdfs, "/destdat", files));
        assertTrue("Update failed to replicate all changes in src",
                 checkUpdate(hdfs, dchkpoint, "/destdat", files, nupdate));

        deldir(hdfs, "/logs");
        ToolRunner.run(new DistCp(conf), new String[] {
                                         "-prbugp", // no t to avoid preserving mod. times
                                         "-overwrite",
                                         "-log",
                                         namenode+"/logs",
                                         namenode+"/srcdat",
View Full Code Here

        out = fs.create(destPath, true);
        out.writeUTF(destData);
        out.close();
       
        // Run with -skipcrccheck option
        ToolRunner.run(new DistCp(conf), new String[] {
          "-p",
          "-update",
          "-skipcrccheck",
          "-log",
          namenode+"/logs",
          namenode+"/srcdat",
          namenode+"/destdat"});
       
        // File should not be overwritten
        FSDataInputStream in = hdfs.open(destPath);
        String s = in.readUTF();
        System.out.println("Dest had: " + s);
        assertTrue("Dest got over written even with skip crc",
            s.equalsIgnoreCase(destData));
        in.close();
       
        deldir(hdfs, "/logs");

        // Run without the option       
        ToolRunner.run(new DistCp(conf), new String[] {
          "-p",
          "-update",
          "-log",
          namenode+"/logs",
          namenode+"/srcdat",
View Full Code Here

   
  public void testCopyDuplication() throws Exception {
    final FileSystem localfs = FileSystem.get(LOCAL_FS, new Configuration());
    try {   
      MyFile[] files = createFiles(localfs, TEST_ROOT_DIR+"/srcdat");
      ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                        "file:///"+TEST_ROOT_DIR+"/src2/srcdat"});
      assertTrue("Source and destination directories do not match.",
                 checkFiles(localfs, TEST_ROOT_DIR+"/src2/srcdat", files));
 
      assertEquals(DistCp.DuplicationException.ERROR_CODE,
          ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                        "file:///"+TEST_ROOT_DIR+"/src2/srcdat",
                        "file:///"+TEST_ROOT_DIR+"/destdat",}));
    }
    finally {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.tools.DistCp$DuplicationException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.