Examples of FileSummary


Examples of com.sleepycat.je.cleaner.FileSummary

    /**
     * Creates an empty LN to be filled in from the log.
     */
    public FileSummaryLN() {
        baseSummary = new FileSummary();
        obsoleteOffsets = new PackedOffsets();
    }
View Full Code Here

Examples of com.sleepycat.je.cleaner.FileSummary

  Iterator iter = map.entrySet().iterator();
  while (iter.hasNext()) {
      Map.Entry entry = (Map.Entry) iter.next();
      Long fileNum = (Long) entry.getKey();
      FileSummary fs = (FileSummary) entry.getValue();
            Summary summary = new Summary(fileNum, fs);
            if (summaries != null) {
                summaries[fileIndex] = summary;
            }
            if (details) {
View Full Code Here

Examples of com.sleepycat.je.cleaner.FileSummary

    /**
     * Creates an empty LN to be filled in from the log.
     */
    public FileSummaryLN()
        throws DatabaseException {
        baseSummary = new FileSummary();
        obsoleteOffsets = new PackedOffsets();
    }
View Full Code Here

Examples of com.sleepycat.je.cleaner.FileSummary

        Iterator<Map.Entry<Long,FileSummary>> iter = map.entrySet().iterator();
        while (iter.hasNext()) {
            Map.Entry<Long,FileSummary> entry = iter.next();
            Long fileNum = entry.getKey();
            FileSummary fs = entry.getValue();
            FileSummary recalcFs = null;
            if (recalcMap != null) {
                 recalcFs = recalcMap.get(fileNum);
            }
            Summary summary = new Summary(fileNum, fs, recalcFs);
            if (summaries != null) {
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary

    private void loadInternal(RandomAccessFile raFile, FileInputStream fin)
        throws IOException {
      if (!FSImageUtil.checkFileFormat(raFile)) {
        throw new IOException("Unrecognized file format");
      }
      FileSummary summary = FSImageUtil.loadSummary(raFile);

      FileChannel channel = fin.getChannel();

      FSImageFormatPBINode.Loader inodeLoader = new FSImageFormatPBINode.Loader(
          fsn, this);
      FSImageFormatPBSnapshot.Loader snapshotLoader = new FSImageFormatPBSnapshot.Loader(
          fsn, this);

      ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary
          .getSectionsList());
      Collections.sort(sections, new Comparator<FileSummary.Section>() {
        @Override
        public int compare(FileSummary.Section s1, FileSummary.Section s2) {
          SectionName n1 = SectionName.fromString(s1.getName());
          SectionName n2 = SectionName.fromString(s2.getName());
          if (n1 == null) {
            return n2 == null ? 0 : -1;
          } else if (n2 == null) {
            return -1;
          } else {
            return n1.ordinal() - n2.ordinal();
          }
        }
      });

      StartupProgress prog = NameNode.getStartupProgress();
      /**
       * beginStep() and the endStep() calls do not match the boundary of the
       * sections. This is because that the current implementation only allows
       * a particular step to be started for once.
       */
      Step currentStep = null;

      for (FileSummary.Section s : sections) {
        channel.position(s.getOffset());
        InputStream in = new BufferedInputStream(new LimitInputStream(fin,
            s.getLength()));

        in = FSImageUtil.wrapInputStreamForCompression(conf,
            summary.getCodec(), in);

        String n = s.getName();

        switch (SectionName.fromString(n)) {
        case NS_INFO:
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary

      // We use the underlyingOutputStream to write the header. Therefore flush
      // the buffered stream (which is potentially compressed) first.
      flushSectionOutputStream();

      FileSummary summary = b.build();
      saveFileSummary(underlyingOutputStream, summary);
      underlyingOutputStream.close();
      savedDigest = new MD5Hash(digester.digest());
    }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary

    private void loadInternal(RandomAccessFile raFile, FileInputStream fin)
        throws IOException {
      if (!FSImageUtil.checkFileFormat(raFile)) {
        throw new IOException("Unrecognized file format");
      }
      FileSummary summary = FSImageUtil.loadSummary(raFile);

      FileChannel channel = fin.getChannel();

      FSImageFormatPBINode.Loader inodeLoader = new FSImageFormatPBINode.Loader(
          fsn, this);
      FSImageFormatPBSnapshot.Loader snapshotLoader = new FSImageFormatPBSnapshot.Loader(
          fsn, this);

      ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary
          .getSectionsList());
      Collections.sort(sections, new Comparator<FileSummary.Section>() {
        @Override
        public int compare(FileSummary.Section s1, FileSummary.Section s2) {
          SectionName n1 = SectionName.fromString(s1.getName());
          SectionName n2 = SectionName.fromString(s2.getName());
          if (n1 == null) {
            return n2 == null ? 0 : -1;
          } else if (n2 == null) {
            return -1;
          } else {
            return n1.ordinal() - n2.ordinal();
          }
        }
      });

      StartupProgress prog = NameNode.getStartupProgress();
      /**
       * beginStep() and the endStep() calls do not match the boundary of the
       * sections. This is because that the current implementation only allows
       * a particular step to be started for once.
       */
      Step currentStep = null;

      for (FileSummary.Section s : sections) {
        channel.position(s.getOffset());
        InputStream in = new BufferedInputStream(new LimitInputStream(fin,
            s.getLength()));

        in = FSImageUtil.wrapInputStreamForCompression(conf,
            summary.getCodec(), in);

        String n = s.getName();

        switch (SectionName.fromString(n)) {
        case NS_INFO:
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary

      // We use the underlyingOutputStream to write the header. Therefore flush
      // the buffered stream (which is potentially compressed) first.
      flushSectionOutputStream();

      FileSummary summary = b.build();
      saveFileSummary(underlyingOutputStream, summary);
      underlyingOutputStream.close();
      savedDigest = new MD5Hash(digester.digest());
    }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary

    file.seek(fileLength - FILE_LENGTH_FIELD_SIZE - summaryLength);

    byte[] summaryBytes = new byte[summaryLength];
    file.readFully(summaryBytes);

    FileSummary summary = FileSummary
        .parseDelimitedFrom(new ByteArrayInputStream(summaryBytes));
    if (summary.getOndiskVersion() != FILE_VERSION) {
      throw new IOException("Unsupported file version "
          + summary.getOndiskVersion());
    }

    if (!NameNodeLayoutVersion.supports(Feature.PROTOBUF_FORMAT,
        summary.getLayoutVersion())) {
      throw new IOException("Unsupported layout version "
          + summary.getLayoutVersion());
    }
    return summary;
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary

  public void visit(RandomAccessFile file) throws IOException {
    if (!FSImageUtil.checkFileFormat(file)) {
      throw new IOException("Unrecognized FSImage");
    }

    FileSummary summary = FSImageUtil.loadSummary(file);
    FileInputStream fin = null;
    try {
      fin = new FileInputStream(file.getFD());
      out.print("<?xml version=\"1.0\"?>\n<fsimage>");

      ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary
          .getSectionsList());
      Collections.sort(sections, new Comparator<FileSummary.Section>() {
        @Override
        public int compare(FileSummary.Section s1, FileSummary.Section s2) {
          SectionName n1 = SectionName.fromString(s1.getName());
          SectionName n2 = SectionName.fromString(s2.getName());
          if (n1 == null) {
            return n2 == null ? 0 : -1;
          } else if (n2 == null) {
            return -1;
          } else {
            return n1.ordinal() - n2.ordinal();
          }
        }
      });

      for (FileSummary.Section s : sections) {
        fin.getChannel().position(s.getOffset());
        InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
            summary.getCodec(), new BufferedInputStream(new LimitInputStream(
                fin, s.getLength())));

        switch (SectionName.fromString(s.getName())) {
        case NS_INFO:
          dumpNameSection(is);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.