Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSDataInputStream


  }

  @Override
  public FSDataInputStream open(Path path, int bufferSize) throws IOException {
    INode inode = checkFile(path);
    return new FSDataInputStream(new S3InputStream(getConf(), store, inode,
                                                   statistics));
  }
View Full Code Here


   * @throws IOException
   */
  public static Credentials readTokenStorageFile(Path filename,
                                                 Configuration conf
                                                 ) throws IOException {
    FSDataInputStream in = null;
    Credentials credentials = new Credentials();
    try {
      in = filename.getFileSystem(conf).open(filename);
      credentials.readTokenStorageStream(in);
      in.close();
      return credentials;
    } catch(IOException ioe) {
      IOUtils.cleanup(LOG, in);
      throw new IOException("Exception reading " + filename, ioe);
    }
View Full Code Here

    out.close();

    assertTrue("Exists", fs.exists(path));
    assertEquals("Length", len, fs.getFileStatus(path).getLen());

    FSDataInputStream in = fs.open(path);
    byte[] buf = new byte[len];
    in.readFully(0, buf);
    in.close();

    assertEquals(len, buf.length);
    for (int i = 0; i < buf.length; i++) {
      assertEquals("Position " + i, data[i], buf[i]);
    }
View Full Code Here

  public void testInputStreamClosedTwice() throws IOException {
    //HADOOP-4760 according to Closeable#close() closing already-closed
    //streams should have no effect.
    Path src = path("/test/hadoop/file");
    createFile(src);
    FSDataInputStream in = fs.open(src);
    in.close();
    in.close();
  }
View Full Code Here

    //Configuration conf = new Configuration();
    Path root = new Path(topdir);
   
    for (int idx = 0; idx < nFiles; idx++) {
      Path fPath = new Path(root, files[idx].getName());
      FSDataInputStream in = fs.open(fPath);
      byte[] toRead = new byte[files[idx].getSize()];
      byte[] toCompare = new byte[files[idx].getSize()];
      Random rb = new Random(files[idx].getSeed());
      rb.nextBytes(toCompare);
      in.readFully(0, toRead);
      in.close();
      for (int i = 0; i < toRead.length; i++) {
        if (toRead[i] != toCompare[i]) {
          return false;
        }
      }
View Full Code Here

                                            (short)repl, (long)blockSize);
    return stm;
  }

  private void checkFile(FileSystem fs, Path name, int len) throws IOException {
    FSDataInputStream stm = fs.open(name);
    byte[] actual = new byte[len];
    stm.readFully(0, actual);
    checkData(actual, 0, fileContents, "Read 2");
    stm.close();
  }
View Full Code Here

    if (!exists(f)) {
      throw new FileNotFoundException(f.toString());
    }
    Path absolutePath = makeAbsolute(f);
    String key = pathToKey(absolutePath);
    return new FSDataInputStream(new BufferedFSInputStream(
        new NativeS3FsInputStream(store, statistics, store.retrieve(key), key), bufferSize));
  }
View Full Code Here

    for (int idx = 0; idx < files.length; idx++) {
      Path fPath = new Path(root, files[idx].getName());
      try {
        fs.getFileStatus(fPath);
        FSDataInputStream in = fs.open(fPath);
        byte[] toRead = new byte[files[idx].getSize()];
        byte[] toCompare = new byte[files[idx].getSize()];
        Random rb = new Random(files[idx].getSeed());
        rb.nextBytes(toCompare);
        assertEquals("Cannnot read file.", toRead.length, in.read(toRead));
        in.close();
        for (int i = 0; i < toRead.length; i++) {
          if (toRead[i] != toCompare[i]) {
            return false;
          }
        }
View Full Code Here

            }
        } finally {
            output.close();
        }
        FileStatus status = fs.getFileStatus(new Path(temp.toURI()));
        FSDataInputStream src = fs.open(status.getPath());
        try {
            OutputStream dst = writer.openNext(status);
            byte[] buf = new byte[256];
            while (true) {
                int read = src.read(buf);
                if (read < 0) {
                    break;
                }
                dst.write(buf, 0, read);
            }
            dst.close();
        } finally {
            src.close();
        }
    }
View Full Code Here

            return open(f, 4096);
        }

        @Override
        public FSDataInputStream open(Path f, int bufferSize) throws IOException {
            return new FSDataInputStream(
                    new WrappedInputStream(
                            new BufferedInputStream(input, bufferSize)));
        }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FSDataInputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.