Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.MD5Hash$Comparator


   * Load the image namespace from the given image file, verifying
   * it against the MD5 sum stored in its associated .md5 file.
   */
  private void loadFSImage(File imageFile, FSNamesystem target,
      MetaRecoveryContext recovery) throws IOException {
    MD5Hash expectedMD5 = MD5FileUtils.readStoredMd5ForFile(imageFile);
    if (expectedMD5 == null) {
      throw new IOException("No MD5 file found corresponding to image file "
          + imageFile);
    }
    loadFSImage(imageFile, expectedMD5, target, recovery);
View Full Code Here


    loader.load(curFile);
    target.setBlockPoolId(this.getBlockPoolID());

    // Check that the image digest we loaded matches up with what
    // we expected
    MD5Hash readImageMd5 = loader.getLoadedImageMd5();
    if (expectedMd5 != null &&
        !expectedMd5.equals(readImageMd5)) {
      throw new IOException("Image file " + curFile +
          " is corrupt with MD5 checksum of " + readImageMd5 +
          " but expecting " + expectedMd5);
View Full Code Here

      final int bytesPerCRC = checksum.getBytesPerChecksum();
      final long crcPerBlock = (metadataIn.getLength()
          - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize();
     
      //compute block checksum
      final MD5Hash md5 = MD5Hash.digest(checksumIn);

      if (LOG.isDebugEnabled()) {
        LOG.debug("block=" + block + ", bytesPerCRC=" + bytesPerCRC
            + ", crcPerBlock=" + crcPerBlock + ", md5=" + md5);
      }

      //write reply
      out = new DataOutputStream(
          NetUtils.getOutputStream(s, datanode.socketWriteTimeout));
      out.writeShort(DataTransferProtocol.OP_STATUS_SUCCESS);
      out.writeInt(bytesPerCRC);
      out.writeLong(crcPerBlock);
      md5.write(out);
      out.flush();
    } finally {
      IOUtils.closeStream(out);
      IOUtils.closeStream(checksumIn);
      IOUtils.closeStream(metadataIn);
View Full Code Here

      } finally {
        in.close();
      }

      if (digester != null)
        imgDigest = new MD5Hash(digester.digest());
      loaded = true;
     
      LOG.info("Image file of size " + curFile.length() + " loaded in "
          + (now() - startTime)/1000 + " seconds.");
    }
View Full Code Here

        out.close();
      }

      saved = true;
      // set md5 of the saved image
      savedDigest = new MD5Hash(digester.digest());

      LOG.info("Image file: " + newFile + " of size " + newFile.length()
          + " saved in " + (now() - startTime)/1000 + " seconds.");
    }
View Full Code Here

    DeleteDuplicates dedup = new DeleteDuplicates(conf);
    dedup.dedup(new Path[]{index2});
    FsDirectory dir = new FsDirectory(fs, new Path(index2, "part-0000"), false, conf);
    IndexReader reader = IndexReader.open(dir);
    assertEquals("only one doc left", reader.numDocs(), 1);
    MD5Hash hash = MD5Hash.digest("2");
    for (int i = 0; i < reader.maxDoc(); i++) {
      if (reader.isDeleted(i)) {
        System.out.println("-doc " + i + " deleted");
        continue;
      }
      Document doc = reader.document(i);
      // make sure we got the right one
      assertEquals("check hash", hash.toString(), doc.get("digest"));
      System.out.println(doc);
    }
    reader.close();
  }
View Full Code Here

    }
    reader.close();
    dir = new FsDirectory(fs, new Path(index2, "part-0000"), false, conf);
    reader = IndexReader.open(dir);
    assertEquals("only one doc left", reader.numDocs(), 1);
    MD5Hash hash = MD5Hash.digest("2");
    for (int i = 0; i < reader.maxDoc(); i++) {
      if (reader.isDeleted(i)) {
        System.out.println("-doc " + i + " deleted");
        continue;
      }
      Document doc = reader.document(i);
      // make sure we got the right one
      assertEquals("check hash", hash.toString(), doc.get("digest"));
      System.out.println(doc);
    }
    reader.close();
  }
View Full Code Here

    if (layoutVersion <= -26) {
      if (sMd5 == null) {
        throw new InconsistentFSStateException(sd.getRoot(),
            "file " + STORAGE_FILE_VERSION + " does not have MD5 image digest.");
      }
      this.setImageDigest(new MD5Hash(sMd5));
    } else if (sMd5 != null) {
      throw new InconsistentFSStateException(sd.getRoot(),
          "file " + STORAGE_FILE_VERSION +
          " has image MD5 digest when version is " + layoutVersion);
    }
View Full Code Here

    assert curFile != null : "curFile is null";

    FSImageFormat.Loader loader = new FSImageFormat.Loader(namesystem.getConf(), namesystem, this);
    loader.load(curFile, null);
    editLog.setStartTransactionId(loader.getLoadedImageTxId() + 1);
    MD5Hash readImageMd5 = loader.getLoadedImageMd5();
   
    if (this.newImageDigest) {
      this.setImageDigest(readImageMd5); // set this fsimage's checksum
    } else if (!this.getImageDigest().equals(readImageMd5)) {
      throw new IOException("Image file " + curFile + " is corrupt!");
View Full Code Here

   * Reopens the new edits file.
   *
   * @param newImageSignature the signature of the new image
   */
  void rollFSImage(CheckpointSignature newImageSignature) throws IOException {
    MD5Hash newImageDigest = newImageSignature.getImageDigest();
    if (!newImageDigest.equals(checkpointImageDigest)) {
      throw new IOException(
          "Checkpoint image is corrupt: expecting an MD5 checksum of" +
          newImageDigest + " but is " + checkpointImageDigest);
    }
    rollFSImage(newImageSignature.getImageDigest());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.MD5Hash$Comparator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.