Examples of MD5MD5CRC32FileChecksum


Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

      return Response.ok(streaming).type(
          MediaType.APPLICATION_OCTET_STREAM).build();
    }
    case GETFILECHECKSUM:
    {
      MD5MD5CRC32FileChecksum checksum = null;
      DFSClient dfsclient = new DFSClient(nnRpcAddr, conf);
      try {
        checksum = dfsclient.getFileChecksum(fullpath);
        dfsclient.close();
        dfsclient = null;
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

      return Response.ok(new OpenEntity(in, n, dfsclient)).type(
          MediaType.APPLICATION_OCTET_STREAM).build();
    }
    case GETFILECHECKSUM:
    {
      MD5MD5CRC32FileChecksum checksum = null;
      DFSClient dfsclient = new DFSClient(nnRpcAddr, conf);
      try {
        checksum = dfsclient.getFileChecksum(fullpath);
        dfsclient.close();
        dfsclient = null;
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

     
      try {
        final DFSClient dfs = DatanodeJspHelper.getDFSClient(request,
            datanode, conf, getUGI(request, conf));
        final ClientProtocol nnproxy = dfs.getNamenode();
        final MD5MD5CRC32FileChecksum checksum = DFSClient.getFileChecksum(
            path, nnproxy, socketFactory, socketTimeout);
        MD5MD5CRC32FileChecksum.write(xml, checksum);
      } catch(IOException ioe) {
        writeXml(ioe, path, xml);
      } catch (InterruptedException e) {
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

      }
      out1.close();
      out2.close();

      // the two checksums must be different.
      MD5MD5CRC32FileChecksum sum1 =
          (MD5MD5CRC32FileChecksum)dfs.getFileChecksum(path1);
      MD5MD5CRC32FileChecksum sum2 =
          (MD5MD5CRC32FileChecksum)dfs.getFileChecksum(path2);
      assertFalse(sum1.equals(sum2));

      // check the individual params
      assertEquals(DataChecksum.Type.CRC32C, sum1.getCrcType());
      assertEquals(DataChecksum.Type.CRC32,  sum2.getCrcType());

    } finally {
      if (cluster != null) {
        cluster.getFileSystem().delete(testBasePath, true);
        cluster.shutdown();
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

      }
    }

    //compute file MD5
    final MD5Hash fileMD5 = MD5Hash.digest(md5out.getData());
    return new MD5MD5CRC32FileChecksum(bytesPerCRC, crcPerBlock, fileMD5);
  }
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

        long lFileSize = 0;
        int iBlockCount = 0;
        DataOutputBuffer md5outDataBuffer = new DataOutputBuffer();
        DataChecksum chksm = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512);
        InputStream in = null;
        MD5MD5CRC32FileChecksum returnChecksum = null;
        long crc_per_block = lBlockSize / bytesPerCRC;

        java.io.File file = new java.io.File(strPath);

        // FileStatus f_stats = srcFs.getFileStatus( srcPath );
        lFileSize = file.length();

        iBlockCount = (int) Math.ceil((double) lFileSize / (double) lBlockSize);

        // System.out.println( "Debug > getLen == " + f_stats.getLen() +
        // " bytes" );
        // System.out.println( "Debug > iBlockCount == " + iBlockCount );

        if (file.isDirectory())
        {
            throw new IOException("Cannot compute local hdfs hash, " + strPath
                                  + " is a directory! ");
        }

        try
        {
            in = new FileInputStream(file);
            long lTotalBytesRead = 0;

            for (int x = 0; x < iBlockCount; x++) {

                ByteArrayOutputStream ar_CRC_Bytes = new ByteArrayOutputStream();

                byte crc[] = new byte[4];
                byte buf[] = new byte[512];

                try {

                    int bytesRead = 0;

                    while ((bytesRead = in.read(buf)) > 0) {

                        lTotalBytesRead += bytesRead;

                        chksm.reset();
                        chksm.update(buf, 0, bytesRead);
                        chksm.writeValue(crc, 0, true);
                        ar_CRC_Bytes.write(crc);

                        if (lTotalBytesRead >= (x + 1) * lBlockSize) {

                            break;
                        }

                    } // while

                    DataInputStream inputStream = new DataInputStream(
                                                                      new ByteArrayInputStream(ar_CRC_Bytes.toByteArray()));

                    // this actually computes one ---- run on the server
                    // (DataXceiver) side
                    final MD5Hash md5_dataxceiver = MD5Hash.digest(inputStream);
                    md5_dataxceiver.write(md5outDataBuffer);

                } catch (IOException e) {

                    e.printStackTrace();

                } catch (Exception e) {

                    e.printStackTrace();

                }

            } // for

            // this is in 0.19.0 style with the extra padding bug
            final MD5Hash md5_of_md5 = MD5Hash.digest(md5outDataBuffer
                                                      .getData());
            returnChecksum = new MD5MD5CRC32FileChecksum(bytesPerCRC,
                                                         crc_per_block, md5_of_md5);

        } catch (IOException e) {
            e.printStackTrace();
        } catch (Exception e) {
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

    final DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
    final int bytesPerCRC = in.readInt();
    final long crcPerBlock = in.readLong();
    final MD5Hash md5 = MD5Hash.read(in);
    final MD5MD5CRC32FileChecksum checksum = new MD5MD5CRC32FileChecksum(
        bytesPerCRC, crcPerBlock, md5);

    //check algorithm name
    final String alg = "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC + "CRC32";
    if (!alg.equals(algorithm)) {
      throw new IOException("Algorithm not matched: algorithm=" + algorithm
          + ", crcPerBlock=" + crcPerBlock
          + ", bytesPerCRC=" + bytesPerCRC);
    }
    //check length
    if (length != checksum.getLength()) {
      throw new IOException("Length not matched: length=" + length
          + ", checksum.getLength()=" + checksum.getLength());
    }

    return checksum;
  }
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

          public ClientProtocol run() throws IOException {
            return DFSClient.createNamenode(conf);
          }
        });
       
        final MD5MD5CRC32FileChecksum checksum = DFSClient.getFileChecksum(
            filename, nnproxy, socketFactory, socketTimeout);
        MD5MD5CRC32FileChecksum.write(xml, checksum);
      } catch(IOException ioe) {
        writeXml(ioe, filename, xml);
      } catch (InterruptedException e) {
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

    final byte[] bytes = StringUtils.hexStringToByte((String)m.get("bytes"));

    final DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
    final DataChecksum.Type crcType =
        MD5MD5CRC32FileChecksum.getCrcTypeFromAlgorithmName(algorithm);
    final MD5MD5CRC32FileChecksum checksum;

    // Recreate what DFSClient would have returned.
    switch(crcType) {
      case CRC32:
        checksum = new MD5MD5CRC32GzipFileChecksum();
        break;
      case CRC32C:
        checksum = new MD5MD5CRC32CastagnoliFileChecksum();
        break;
      default:
        throw new IOException("Unknown algorithm: " + algorithm);
    }
    checksum.readFields(in);

    //check algorithm name
    if (!checksum.getAlgorithmName().equals(algorithm)) {
      throw new IOException("Algorithm not matched. Expected " + algorithm
          + ", Received " + checksum.getAlgorithmName());
    }
    //check length
    if (length != checksum.getLength()) {
      throw new IOException("Length not matched: length=" + length
          + ", checksum.getLength()=" + checksum.getLength());
    }

    return checksum;
  }
View Full Code Here

Examples of org.apache.hadoop.fs.MD5MD5CRC32FileChecksum

    final DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
    final int bytesPerCRC = in.readInt();
    final long crcPerBlock = in.readLong();
    final MD5Hash md5 = MD5Hash.read(in);
    final MD5MD5CRC32FileChecksum checksum = new MD5MD5CRC32FileChecksum(
        bytesPerCRC, crcPerBlock, md5);

    //check algorithm name
    final String alg = "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC + "CRC32";
    if (!alg.equals(algorithm)) {
      throw new IOException("Algorithm not matched: algorithm=" + algorithm
          + ", crcPerBlock=" + crcPerBlock
          + ", bytesPerCRC=" + bytesPerCRC);
    }
    //check length
    if (length != checksum.getLength()) {
      throw new IOException("Length not matched: length=" + length
          + ", checksum.getLength()=" + checksum.getLength());
    }

    return checksum;
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.