Examples of DFSDataInputStream


Examples of org.apache.hadoop.dfs.DFSClient.DFSDataInputStream

    FSDataInputStream in = fs.open(path);
    in.readFully(arr);
  }
 
  Block getFirstBlock(FileSystem fs, Path path) throws IOException {
    DFSDataInputStream in =
      (DFSDataInputStream) ((DistributedFileSystem)fs).open(path);
    in.readByte();
    return in.getCurrentBlock();
  }
View Full Code Here

Examples of org.apache.hadoop.dfs.DFSClient.DFSDataInputStream

    FSDataInputStream in = fs.open(path);
    in.readFully(arr);
  }
 
  Block getFirstBlock(FileSystem fs, Path path) throws IOException {
    DFSDataInputStream in =
      (DFSDataInputStream) ((DistributedFileSystem)fs).open(path);
    in.readByte();
    return in.getCurrentBlock();
  }
View Full Code Here

Examples of org.apache.hadoop.dfs.DFSClient.DFSDataInputStream

    fs.delete(root);
    files = null;
  }
 
  static Block getFirstBlock(FileSystem fs, Path path) throws IOException {
    DFSDataInputStream in =
      (DFSDataInputStream) ((DistributedFileSystem)fs).open(path);
    in.readByte();
    return in.getCurrentBlock();
 
View Full Code Here

Examples of org.apache.hadoop.dfs.DFSClient.DFSDataInputStream

    fs.delete(root, true);
    files = null;
  }
 
  static Block getFirstBlock(FileSystem fs, Path path) throws IOException {
    DFSDataInputStream in =
      (DFSDataInputStream) ((DistributedFileSystem)fs).open(path);
    in.readByte();
    return in.getCurrentBlock();
 
View Full Code Here

Examples of org.apache.hadoop.dfs.DFSClient.DFSDataInputStream

    fs.delete(root, true);
    files = null;
  }
 
  static Block getFirstBlock(FileSystem fs, Path path) throws IOException {
    DFSDataInputStream in =
      (DFSDataInputStream) ((DistributedFileSystem)fs).open(path);
    in.readByte();
    return in.getCurrentBlock();
 
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream

      assertTrue(status.getLen() == fileSize);
      assertEquals(fs.makeQualified(file1).toString(),
          status.getPath().toString());

      // test getVisbileLen
      DFSDataInputStream fin = (DFSDataInputStream)fs.open(file1);
      assertEquals(status.getLen(), fin.getVisibleLength());
     
      // test listStatus on a file
      FileStatus[] stats = fs.listStatus(file1);
      assertEquals(1, stats.length);
      status = stats[0];
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream

    fs.delete(root, true);
    files = null;
  }
 
  public static Block getFirstBlock(FileSystem fs, Path path) throws IOException {
    DFSDataInputStream in =
      (DFSDataInputStream) ((DistributedFileSystem)fs).open(path);
    in.readByte();
    return in.getCurrentBlock();
 
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream

    this.streams = new InputStream[streams.length];
    this.endOffsets = new long[streams.length];
    for (int i = 0; i < streams.length; i++) {
      this.streams[i] = streams[i];
      if (this.streams[i] instanceof DFSDataInputStream) {
        DFSDataInputStream stream = (DFSDataInputStream)this.streams[i];
        // in directory raiding, the block size for each input stream
        // might be different, so we need to determine the endOffset of
        // each stream by their own block size.
        List<LocatedBlock> blocks = stream.getAllBlocks();
        if (blocks.size() == 0) {
          this.endOffsets[i] = Long.MAX_VALUE;
        } else {
          this.endOffsets[i] = stream.getPos() + blocks.get(0).getBlockSize();
        }
      } else {
        this.endOffsets[i] = Long.MAX_VALUE;
      }
      streams[i] = null; // Take over ownership of streams.
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream

  public void testRead() throws Exception{
    for(int i = 0; i < TEST_FILE_NUM; ++i) {
      String file = "/tmp" + i +".txt";
      DFSTestUtil.createFile(fs, new Path(file), FILE_LEN, (short)5, 1L);
     
      DFSDataInputStream in = (DFSDataInputStream)fs.open(new Path(file));
      int numOfRead = 0;
      while(in.read() > 0){
        numOfRead ++;
      }
      assertEquals(FILE_LEN * (i+1),
          metrics.readSize.getCurrentIntervalValue());
      assertEquals(numOfRead * (i+1),
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream

    switch(op.getValue()) {
    case OPEN:
    {
      final int b = bufferSize.getValue(conf);
      final DFSClient dfsclient = new DFSClient(nnRpcAddr, conf);
      DFSDataInputStream in = null;
      try {
        in = new DFSClient.DFSDataInputStream(
        dfsclient.open(fullpath, b, true, null));
        in.seek(offset.getValue());
      } catch(IOException ioe) {
        IOUtils.cleanup(LOG, in);
        IOUtils.cleanup(LOG, dfsclient);
        throw ioe;
      }
      final DFSDataInputStream dis = in;
      final StreamingOutput streaming = new StreamingOutput() {
        @Override
        public void write(final OutputStream out) throws IOException {
          final Long n = length.getValue();
          DFSDataInputStream dfsin = dis;
          DFSClient client = dfsclient;
          try {
            if (n == null) {
              IOUtils.copyBytes(dfsin, out, b);
            } else {
              IOUtils.copyBytes(dfsin, out, n, b, false);
            }
            dfsin.close();
            dfsin = null;
            client.close();
            client = null;
          } finally {
            IOUtils.cleanup(LOG, dfsin);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.