Examples of SegmentWriter


Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

    public MemoryStore(NodeState root) {
        NodeBuilder builder = EMPTY_NODE.builder();
        builder.setChildNode("root", root);

        SegmentWriter writer = tracker.getWriter();
        this.head = writer.writeNode(builder.getNodeState());
        writer.flush();
    }
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

    public MemoryStore(NodeState root) {
        NodeBuilder builder = EMPTY_NODE.builder();
        builder.setChildNode("root", root);

        SegmentWriter writer = tracker.getWriter();
        this.head = writer.writeNode(builder.getNodeState());
        writer.flush();
    }
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

    public MemoryJournal(SegmentStore store, NodeState root) {
        this.store = checkNotNull(store);
        this.parent = null;

        SegmentWriter writer = new SegmentWriter(store);
        RecordId id = writer.writeNode(root).getRecordId();
        writer.flush();

        this.base = id;
        this.head = id;
    }
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

    public synchronized void merge() {
        if (parent != null) {
            NodeState before = new SegmentNodeState(store, base);
            NodeState after = new SegmentNodeState(store, head);

            SegmentWriter writer = new SegmentWriter(store);
            while (!parent.setHead(base, head)) {
                RecordId newBase = parent.getHead();
                NodeBuilder builder =
                        new SegmentNodeState(store, newBase).builder();
                after.compareAgainstBaseState(before, new MergeDiff(builder));
                NodeState state = builder.getNodeState();
                RecordId newHead = writer.writeNode(state).getRecordId();
                writer.flush();

                base = newBase;
                head = newHead;
            }
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

    public void compact() {
        long start = System.nanoTime();
        log.info("TarMK compaction started");

        SegmentWriter writer = new SegmentWriter(this, tracker);
        Compactor compactor = new Compactor(writer);

        SegmentNodeState before = getHead();
        SegmentNodeState after = compactor.compact(EMPTY_NODE, before);
        writer.flush();
        while (!setHead(before, after)) {
            // Some other concurrent changes have been made.
            // Rebase (and compact) those changes on top of the
            // compacted state before retrying to set the head.
            SegmentNodeState head = getHead();
            after = compactor.compact(before, head);
            before = head;
            writer.flush();
        }
        tracker.setCompactionMap(compactor.getCompactionMap());

        // Drop the SegmentWriter caches and flush any existing state
        // in an attempt to prevent new references to old pre-compacted
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

        this.name = "root";

        DBObject id = new BasicDBObject("_id", "root");
        state = journals.findOne(id, null, primaryPreferred());
        if (state == null) {
            SegmentWriter writer = new SegmentWriter(store);
            RecordId headId = writer.writeNode(head).getRecordId();
            writer.flush();
            state = new BasicDBObject(of(
                    "_id""root",
                    "head", headId.toString()));
            try {
                journals.insert(state, concern);
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

            NodeState before = new SegmentNodeState(store, base);
            NodeState after = new SegmentNodeState(store, head);

            Journal parent = store.getJournal(state.get("parent").toString());
            SegmentWriter writer = new SegmentWriter(store);
            while (!parent.setHead(base, head)) {
                RecordId newBase = parent.getHead();
                NodeBuilder builder =
                        new SegmentNodeState(store, newBase).builder();
                after.compareAgainstBaseState(before, new MergeDiff(builder));
                RecordId newHead =
                        writer.writeNode(builder.getNodeState()).getRecordId();
                writer.flush();

                base = newBase;
                head = newHead;
            }
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

    public MemoryJournal(SegmentStore store, NodeState head) {
        this.store = checkNotNull(store);
        this.parent = null;

        SegmentWriter writer = new SegmentWriter(store);
        RecordId id = writer.writeNode(head).getRecordId();
        writer.flush();

        this.base = id;
        this.head = id;
    }
View Full Code Here

Examples of org.apache.jackrabbit.oak.plugins.segment.SegmentWriter

    public synchronized void merge() {
        if (parent != null) {
            NodeState before = new SegmentNodeState(store, base);
            NodeState after = new SegmentNodeState(store, head);

            SegmentWriter writer = new SegmentWriter(store);
            while (!parent.setHead(base, head)) {
                RecordId newBase = parent.getHead();
                NodeBuilder builder =
                        new SegmentNodeState(store, newBase).builder();
                after.compareAgainstBaseState(before, new MergeDiff(builder));
                NodeState state = builder.getNodeState();
                RecordId newHead = writer.writeNode(state).getRecordId();
                writer.flush();

                base = newBase;
                head = newHead;
            }
View Full Code Here

Examples of org.apache.nutch.segment.SegmentWriter

      outDirs.add(outDir);
      LOG.info("* Merging all segments into " + output.getName());
      s1 = System.currentTimeMillis();
      delta = s1;
      nfs.mkdirs(outDir);
      SegmentWriter sw = new SegmentWriter(nfs, outDir, true);
      LOG.fine(" - opening first output segment in " + outDir.getName());
      FetcherOutput fo = new FetcherOutput();
      Content co = new Content();
      ParseText pt = new ParseText();
      ParseData pd = new ParseData();
      int outputCnt = 0;
      for (int n = 0; n < ir.maxDoc(); n++) {
        if (ir.isDeleted(n)) {
          //System.out.println("-del");
          continue;
        }
        Document doc = ir.document(n);
        String segDoc = doc.get("sd");
        int idx = segDoc.indexOf('|');
        String segName = segDoc.substring(0, idx);
        String docName = segDoc.substring(idx + 1);
        SegmentReader sr = (SegmentReader) readers.get(segName);
        long docid;
        try {
          docid = Long.parseLong(docName);
        } catch (Exception e) {
          continue;
        }
        try {
          // get data from the reader
          sr.get(docid, fo, co, pt, pd);
        } catch (Throwable thr) {
          // don't break the loop, because only one of the segments
          // may be corrupted...
          LOG.fine(" - corrupt record no. " + docid + " in segment " + sr.segmentDir.getName() + " - skipping.");
          continue;
        }
        sw.append(fo, co, pt, pd);
        outputCnt++;
        processedRecords++;
        if (processedRecords > 0 && (processedRecords % LOG_STEP == 0)) {
          LOG.info(" Processed " + processedRecords + " records (" +
                  (float)(LOG_STEP * 1000)/(float)(System.currentTimeMillis() - delta) + " rec/s)");
          delta = System.currentTimeMillis();
        }
        if (processedRecords % maxCount == 0) {
          sw.close();
          outDir = new File(output, SegmentWriter.getNewSegmentName());
          LOG.fine(" - starting next output segment in " + outDir.getName());
          nfs.mkdirs(outDir);
          sw = new SegmentWriter(nfs, outDir, true);
          outDirs.add(outDir);
        }
      }
      LOG.info("* Merging took " + (System.currentTimeMillis() - s1) + " ms");
      ir.close();
      sw.close();
      FileUtil.fullyDelete(fsmtIndexDir);
      for (Iterator iter = readers.keySet().iterator(); iter.hasNext();) {
        SegmentReader sr = (SegmentReader) readers.get(iter.next());
        sr.close();
      }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.