Package org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface

Examples of org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface.MetaDataInputStream


          IOUtils.closeStream(out);
        }
      }
    }

    final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block);
    final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(
        metadataIn, BUFFER_SIZE));

    updateThreadName("getting checksum for block " + block);
    try {
      //read metadata file
      final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
      final DataChecksum checksum = header.getChecksum();
      final int bytesPerCRC = checksum.getBytesPerChecksum();
      final long crcPerBlock = (metadataIn.getLength()
          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
     
      //compute block checksum
      final MD5Hash md5 = MD5Hash.digest(checksumIn);
View Full Code Here


        new ReadMetadataHeader(versionAndOpcode);
    readMetadataHeader.readFields(in);
    final int namespaceId = readMetadataHeader.getNamespaceId();
    Block block = new Block(readMetadataHeader.getBlockId(), 0,
        readMetadataHeader.getGenStamp());
    MetaDataInputStream checksumIn = null;
    DataOutputStream out = null;
    updateCurrentThreadName("reading metadata for block " + block);
    try {
      checksumIn = datanode.data.getMetaDataInputStream(namespaceId, block);
     
      long fileSize = checksumIn.getLength();

      if (fileSize >= 1L<<31 || fileSize <= 0) {
          throw new IOException("Unexpected size for checksumFile of block" +
                  block);
      }
View Full Code Here

    final int namespaceId = blockChecksumHeader.getNamespaceId();
    final Block block = new Block(blockChecksumHeader.getBlockId(), 0,
            blockChecksumHeader.getGenStamp());

    DataOutputStream out = null;
    final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(namespaceId, block);
    final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(
        metadataIn, BUFFER_SIZE));

    updateCurrentThreadName("getting checksum for block " + block);
    try {
      //read metadata file
      final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
      final DataChecksum checksum = header.getChecksum();
      final int bytesPerCRC = checksum.getBytesPerChecksum();
      final long crcPerBlock = (metadataIn.getLength()
          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
     
      //compute block checksum
      final MD5Hash md5 = MD5Hash.digest(checksumIn);
View Full Code Here

   * Reads the metadata and sends the data in one 'DATA_CHUNK'.
   * @param in
   */
  void readMetadata(DataInputStream in) throws IOException {
    Block block = new Block( in.readLong(), 0 , in.readLong());
    MetaDataInputStream checksumIn = null;
    DataOutputStream out = null;
   
    try {

      checksumIn = datanode.data.getMetaDataInputStream(block);
     
      long fileSize = checksumIn.getLength();

      if (fileSize >= 1L<<31 || fileSize <= 0) {
          throw new IOException("Unexpected size for checksumFile of block" +
                  block);
      }
View Full Code Here

   */
  void getBlockChecksum(DataInputStream in) throws IOException {
    final Block block = new Block(in.readLong(), 0 , in.readLong());

    DataOutputStream out = null;
    final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block);
    final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(
        metadataIn, BUFFER_SIZE));

    try {
      //read metadata file
      final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
      final DataChecksum checksum = header.getChecksum();
      final int bytesPerCRC = checksum.getBytesPerChecksum();
      final long crcPerBlock = (metadataIn.getLength()
          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
     
      //compute block checksum
      final MD5Hash md5 = MD5Hash.digest(checksumIn);

View Full Code Here

          IOUtils.closeStream(out);
        }
      }
    }

    final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block);
    final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(
        metadataIn, BUFFER_SIZE));

    try {
      //read metadata file
      final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
      final DataChecksum checksum = header.getChecksum();
      final int bytesPerCRC = checksum.getBytesPerChecksum();
      final long crcPerBlock = (metadataIn.getLength()
          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
     
      //compute block checksum
      final MD5Hash md5 = MD5Hash.digest(checksumIn);
View Full Code Here

   * Reads the metadata and sends the data in one 'DATA_CHUNK'.
   * @param in
   */
  void readMetadata(DataInputStream in) throws IOException {
    Block block = new Block( in.readLong(), 0 , in.readLong());
    MetaDataInputStream checksumIn = null;
    DataOutputStream out = null;
   
    try {

      checksumIn = datanode.data.getMetaDataInputStream(block);
     
      long fileSize = checksumIn.getLength();

      if (fileSize >= 1L<<31 || fileSize <= 0) {
          throw new IOException("Unexpected size for checksumFile of block" +
                  block);
      }
View Full Code Here

   */
  void getBlockChecksum(DataInputStream in) throws IOException {
    final Block block = new Block(in.readLong(), 0 , in.readLong());

    DataOutputStream out = null;
    final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block);
    final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(
        metadataIn, BUFFER_SIZE));

    try {
      //read metadata file
      final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
      final DataChecksum checksum = header.getChecksum();
      final int bytesPerCRC = checksum.getBytesPerChecksum();
      final long crcPerBlock = (metadataIn.getLength()
          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
     
      //compute block checksum
      final MD5Hash md5 = MD5Hash.digest(checksumIn);

View Full Code Here

   * Reads the metadata and sends the data in one 'DATA_CHUNK'.
   * @param in
   */
  void readMetadata(DataInputStream in) throws IOException {
    Block block = new Block( in.readLong(), 0 , in.readLong());
    MetaDataInputStream checksumIn = null;
    DataOutputStream out = null;
   
    try {

      checksumIn = datanode.data.getMetaDataInputStream(block);
     
      long fileSize = checksumIn.getLength();

      if (fileSize >= 1L<<31 || fileSize <= 0) {
          throw new IOException("Unexpected size for checksumFile of block" +
                  block);
      }
View Full Code Here

   */
  void getBlockChecksum(DataInputStream in) throws IOException {
    final Block block = new Block(in.readLong(), 0 , in.readLong());

    DataOutputStream out = null;
    final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block);
    final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream(
        metadataIn, BUFFER_SIZE));

    try {
      //read metadata file
      final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn);
      final DataChecksum checksum = header.getChecksum();
      final int bytesPerCRC = checksum.getBytesPerChecksum();
      final long crcPerBlock = (metadataIn.getLength()
          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
     
      //compute block checksum
      final MD5Hash md5 = MD5Hash.digest(checksumIn);

View Full Code Here

TOP

Related Classes of org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface.MetaDataInputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.