Package org.apache.hadoop.util

Examples of org.apache.hadoop.util.Progressable


  protected HRegion instantiateRegion(final HRegionInfo regionInfo)
      throws IOException {
    HRegion r = HRegion.newHRegion(HTableDescriptor.getTableDir(rootDir, regionInfo
        .getTableDesc().getName()), this.hlog, this.fs, conf, regionInfo,
        this.cacheFlusher);
    r.initialize(null,  new Progressable() {
      public void progress() {
        addProcessingMessage(regionInfo);
      }
    });
    return r;
View Full Code Here


        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(Path, Progressable)");
        fout = deleteFs.create(file, new Progressable(){
            @Override
            public void progress() {
            }
        });
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(Path, boolean)");
        fout = deleteFs.create(file, true);
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(Path, short)");
        fout = deleteFs.create(file, (short) 1);
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(Path, boolean, int)");
        fout = deleteFs.create(file, true, 4096);
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(FileSystem, Path, FsPermission)");
        fout = deleteFs.create(deleteFs, file, FsPermission.getDefault());
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(FileSystem, short, Progressable)");
        fout = deleteFs.create(file, (short)1, new Progressable(){
            @Override
            public void progress() {
            }
        });
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(FileSystem, boolean, int, Progressable)");
        fout = deleteFs.create(file, true, 4096, new Progressable(){
            @Override
            public void progress() {
            }
        });
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(FileSystem, boolean, int, short, long)");
        fout = deleteFs.create(file, true, 4096, (short)1, 100000000);
        assertNotNull(fout);
        fout.close();
        files = meta.list(Collections.singletonList(file.getParent()));
        assertEquals(1, files.size());
        deleteFs.delete(file.getParent(), true);
        janitor.clearPath(testPath);
       
        System.out.println("create(FileSystem, boolean, int, short, long, Progressable)");
        fout = deleteFs.create(file, true, 4096, (short)1, 100000000,new Progressable(){
            @Override
            public void progress() {
            }
        });
        assertNotNull(fout);
View Full Code Here

                jobConf,
                new Path(filePath),
                Text.class,
                compressionCodec != null,
                tableProperties,
                new Progressable()
                {
                    @Override
                    public void progress()
                    {
                    }
View Full Code Here

                jobConf,
                new Path(filePath),
                Text.class,
                compressionCodec != null,
                tableProperties,
                new Progressable()
                {
                    @Override
                    public void progress()
                    {
                    }
View Full Code Here

                jobConf,
                new Path(filePath),
                Text.class,
                compressionCodec != null,
                tableProperties,
                new Progressable()
                {
                    @Override
                    public void progress()
                    {
                    }
View Full Code Here

      int initialRefCount = ClientAdapter.getRefCount(client);
      String filename = "/file1";
      DFSClient.DFSOutputStream out = (DFSClient.DFSOutputStream)
        ((DistributedFileSystem) fileSystem).getClient().create(
          filename, FsPermission.getDefault(), true, (short) 5, 1024,
          new Progressable() {
            @Override
            public void progress() {
            }
          },
          64 * 1024
 
View Full Code Here

    String filename = "/testFileForceSync";
    boolean forceSync = true;
    DFSClient dfsClient = ((DistributedFileSystem) fileSystem).getClient();
    DFSClient.DFSOutputStream out = (DFSClient.DFSOutputStream)dfsClient.create(
        filename, FsPermission.getDefault(), true, true, REPLICATION_NUM, BLOCK_SIZE,
        new Progressable() {
          @Override
          public void progress() {
          }
        },
        BUFFER_SIZE,
View Full Code Here

    String filename = "/testFileParallelWrite";
    boolean doParallelWrites = true;
    DFSClient dfsClient = ((DistributedFileSystem) fileSystem).getClient();
    DFSClient.DFSOutputStream out = (DFSClient.DFSOutputStream)dfsClient.create(
        filename, FsPermission.getDefault(), true, true, REPLICATION_NUM, BLOCK_SIZE,
        new Progressable() {
          @Override
          public void progress() {
          }
        },
        BUFFER_SIZE,
View Full Code Here

                jobConf,
                new Path(filePath),
                Text.class,
                compressionCodec != null,
                tableProperties,
                new Progressable()
                {
                    @Override
                    public void progress()
                    {
                    }
View Full Code Here

        readsCounter, writesCounter, mergePhase);
    Assert.assertEquals(1.0f, mergeQueue.getProgress().get());
  }

  private Progressable getReporter() {
    Progressable reporter = new Progressable() {
      @Override
      public void progress() {
      }
    };
    return reporter;
View Full Code Here

TOP

Related Classes of org.apache.hadoop.util.Progressable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.