Package java.util.zip

Examples of java.util.zip.Adler32


        long nextTxId = recoveryFile.readLong();
        long expectedChecksum = recoveryFile.readLong();
        int pageCounter = recoveryFile.readInt();

        recoveryFile.seek(RECOVERY_FILE_HEADER_SIZE);
        Checksum checksum = new Adler32();
        LinkedHashMap<Long, byte[]> batch = new LinkedHashMap<Long, byte[]>();
        try {
            for (int i = 0; i < pageCounter; i++) {
                long offset = recoveryFile.readLong();
                byte[] data = new byte[pageSize];
                if (recoveryFile.read(data, 0, pageSize) != pageSize) {
                    // Invalid recovery record, Could not fully read the data". Probably due to a partial write to the recovery buffer
                    return nextTxId;
                }
                checksum.update(data, 0, pageSize);
                batch.put(offset, data);
            }
        } catch (Exception e) {
            // If an error occurred it was cause the redo buffer was not full written out correctly.. so don't redo it.
            // as the pages should still be consistent.
            LOG.debug("Redo buffer was not fully intact: ", e);
            return nextTxId;
        }

        recoveryPageCount = pageCounter;

        // If the checksum is not valid then the recovery buffer was partially written to disk.
        if (checksum.getValue() != expectedChecksum) {
            return nextTxId;
        }

        // Re-apply all the writes in the recovery buffer.
        for (Map.Entry<Long, byte[]> e : batch.entrySet()) {
View Full Code Here


                // Now we can fill in the batch control record properly.
                buff.reset();
                buff.skip(5+Journal.BATCH_CONTROL_RECORD_MAGIC.length);
                buff.writeInt(sequence.getLength()-Journal.BATCH_CONTROL_RECORD_SIZE);
                if( journal.isChecksum() ) {
                    Checksum checksum = new Adler32();
                    checksum.update(sequence.getData(), sequence.getOffset()+Journal.BATCH_CONTROL_RECORD_SIZE, sequence.getLength()-Journal.BATCH_CONTROL_RECORD_SIZE);
                    buff.writeLong(checksum.getValue());
                }

                // Now do the 1 big write.
                file.seek(wb.offset);
                if (maxStat > 0) {
View Full Code Here

        dob.writeInt(0);                                    // all flags 0
        dob.writeInt(0x81A4);                               // mode
        dob.writeInt((int)(System.currentTimeMillis() / 1000)); // mtime
        dob.writeInt(0);                                    // gmtdiff ignored
        dob.writeByte(0);                                   // no filename
        Adler32 headerChecksum = new Adler32();
        headerChecksum.update(dob.getData(), 0, dob.getLength());
        int hc = (int)headerChecksum.getValue();
        dob.writeInt(hc);
        out.write(LZO_MAGIC);
        out.write(dob.getData(), 0, dob.getLength());
      } finally {
        dob.close();
View Full Code Here

      }
      if (!Arrays.equals(buf, LZO_MAGIC)) {
        throw new IOException("Invalid LZO header");
      }
      Arrays.fill(buf, (byte)0);
      Adler32 adler = new Adler32();
      CRC32 crc32 = new CRC32();
      int hitem = readHeaderItem(in, buf, 2, adler, crc32); // lzop version
      if (hitem > LZOP_VERSION) {
        LOG.debug("Compressed with later version of lzop: " +
            Integer.toHexString(hitem) + " (expected 0x" +
            Integer.toHexString(LZOP_VERSION) + ")");
      }
      hitem = readHeaderItem(in, buf, 2, adler, crc32); // lzo library version
      if (hitem > LzoDecompressor.LZO_LIBRARY_VERSION) {
        throw new IOException("Compressed with incompatible lzo version: 0x" +
            Integer.toHexString(hitem) + " (expected 0x" +
            Integer.toHexString(LzoDecompressor.LZO_LIBRARY_VERSION) + ")");
      }
      hitem = readHeaderItem(in, buf, 2, adler, crc32); // lzop extract version
      if (hitem > LZOP_VERSION) {
        throw new IOException("Compressed with incompatible lzop version: 0x" +
            Integer.toHexString(hitem) + " (expected 0x" +
            Integer.toHexString(LZOP_VERSION) + ")");
      }
      hitem = readHeaderItem(in, buf, 1, adler, crc32); // method
      if (hitem < 1 || hitem > 3) {
          throw new IOException("Invalid strategy: " +
              Integer.toHexString(hitem));
      }
      readHeaderItem(in, buf, 1, adler, crc32); // ignore level

      // flags
      hitem = readHeaderItem(in, buf, 4, adler, crc32);
      try {
        for (DChecksum f : dflags) {
          if (0 == (f.getHeaderMask() & hitem)) {
            dflags.remove(f);
          } else {
            dcheck.put(f, (int)f.getChecksumClass().newInstance().getValue());
          }
        }
        for (CChecksum f : cflags) {
          if (0 == (f.getHeaderMask() & hitem)) {
            cflags.remove(f);
          } else {
            ccheck.put(f, (int)f.getChecksumClass().newInstance().getValue());
          }
        }
      } catch (InstantiationException e) {
        throw new RuntimeException("Internal error", e);
      } catch (IllegalAccessException e) {
        throw new RuntimeException("Internal error", e);
      }
      ((LzopDecompressor)decompressor).initHeaderFlags(dflags, cflags);
      boolean useCRC32 = 0 != (hitem & 0x00001000);   // F_H_CRC32
      boolean extraField = 0 != (hitem & 0x00000040); // F_H_EXTRA_FIELD
      if (0 != (hitem & 0x400)) {                     // F_MULTIPART
        throw new IOException("Multipart lzop not supported");
      }
      if (0 != (hitem & 0x800)) {                     // F_H_FILTER
        throw new IOException("lzop filter not supported");
      }
      if (0 != (hitem & 0x000FC000)) {                // F_RESERVED
        throw new IOException("Unknown flags in header");
      }
      // known !F_H_FILTER, so no optional block

      readHeaderItem(in, buf, 4, adler, crc32); // ignore mode
      readHeaderItem(in, buf, 4, adler, crc32); // ignore mtime
      readHeaderItem(in, buf, 4, adler, crc32); // ignore gmtdiff
      hitem = readHeaderItem(in, buf, 1, adler, crc32); // fn len
      if (hitem > 0) {
        // skip filename
        readHeaderItem(in, new byte[hitem], hitem, adler, crc32);
      }
      int checksum = (int)(useCRC32 ? crc32.getValue() : adler.getValue());
      hitem = readHeaderItem(in, buf, 4, adler, crc32); // read checksum
      if (hitem != checksum) {
        throw new IOException("Invalid header checksum: " +
            Long.toHexString(checksum) + " (expected 0x" +
            Integer.toHexString(hitem) + ")");
      }
      if (extraField) { // lzop 1.08 ultimately ignores this
        LOG.debug("Extra header field not processed");
        adler.reset();
        crc32.reset();
        hitem = readHeaderItem(in, buf, 4, adler, crc32);
        readHeaderItem(in, new byte[hitem], hitem, adler, crc32);
        checksum = (int)(useCRC32 ? crc32.getValue() : adler.getValue());
        if (checksum != readHeaderItem(in, buf, 4, adler, crc32)) {
          throw new IOException("Invalid checksum for extra header field");
        }
      }
    }
View Full Code Here

                // Since we preallocate, we define EOF to be an
                // empty transaction
                System.out.println("EOF reached after " + count + " txns.");
                return;
            }
            Checksum crc = new Adler32();
            crc.update(bytes, 0, bytes.length);
            if (crcValue != crc.getValue()) {
                throw new IOException("CRC doesn't match " + crcValue +
                        " vs " + crc.getValue());
            }
            InputArchive iab = BinaryInputArchive
                                .getArchive(new ByteArrayInputStream(bytes));
            TxnHeader hdr = new TxnHeader();
            SerializeUtils.deserializeTxn(iab, hdr);
View Full Code Here

    private boolean getCheckSum(FileSnap snap, File snapFile) throws IOException {
        DataTree dt = new DataTree();
        Map<Long, Integer> sessions = new ConcurrentHashMap<Long, Integer>();
        InputStream snapIS = new BufferedInputStream(new FileInputStream(
                snapFile));
        CheckedInputStream crcIn = new CheckedInputStream(snapIS, new Adler32());
        InputArchive ia = BinaryInputArchive.getArchive(crcIn);
        try {
            snap.deserialize(dt, sessions, ia);
        } catch (IOException ie) {
            // we failed on the most recent snapshot
View Full Code Here

            InputStream snapIS = null;
            CheckedInputStream crcIn = null;
            try {
                LOG.info("Reading snapshot " + snap);
                snapIS = new BufferedInputStream(new FileInputStream(snap));
                crcIn = new CheckedInputStream(snapIS, new Adler32());
                InputArchive ia = BinaryInputArchive.getArchive(crcIn);
                deserialize(dt,sessions, ia);
                long checkSum = crcIn.getChecksum().getValue();
                long val = ia.readLong("val");
                if (val != checkSum) {
View Full Code Here

     */
    public synchronized void serialize(DataTree dt, Map<Long, Integer> sessions, File snapShot)
            throws IOException {
        if (!close) {
            OutputStream sessOS = new BufferedOutputStream(new FileOutputStream(snapShot));
            CheckedOutputStream crcOut = new CheckedOutputStream(sessOS, new Adler32());
            //CheckedOutputStream cout = new CheckedOutputStream()
            OutputArchive oa = BinaryOutputArchive.getArchive(crcOut);
            FileHeader header = new FileHeader(SNAP_MAGIC, VERSION, dbId);
            serialize(dt,sessions,oa, header);
            long val = crcOut.getChecksum().getValue();
View Full Code Here

    /**
     * creates a checksum alogrithm to be used
     * @return the checksum used for this txnlog
     */
    protected Checksum makeChecksumAlgorithm(){
        return new Adler32();
    }
View Full Code Here

        /**
         * create a checksum algorithm
         * @return the checksum algorithm
         */
        protected Checksum makeChecksumAlgorithm(){
            return new Adler32();
        }
View Full Code Here

TOP

Related Classes of java.util.zip.Adler32

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.