Package org.apache.jackrabbit.oak.plugins.segment.file

Examples of org.apache.jackrabbit.oak.plugins.segment.file.FileStore$SegmentReference


        if (properties.get(DIRECTORY) != null) {
            String directory = properties.get(DIRECTORY).toString();

            mongo = null;
            store = new FileStore(directory);
        } else {
            String host = String.valueOf(properties.get(HOST));
            int port = Integer.parseInt(String.valueOf(properties.get(PORT)));
            String db = String.valueOf(properties.get(DB));
            int cache = Integer.parseInt(String.valueOf(properties.get(CACHE)));
View Full Code Here


    }

    private static void backup(String[] args) throws IOException {
        if (args.length == 2) {
            // TODO: enable backup for other node store implementations
            FileStore store = new FileStore(new File(args[0]), 256, false);
            FileStoreBackup.backup(new SegmentNodeStore(store), new File(args[1]));
            store.close();
        } else {
            System.err.println("usage: backup <repository> <backup>");
            System.exit(1);
        }
    }
View Full Code Here

            System.exit(1);
        } else {
            // TODO: enable debug information for other node store implementations
            System.out.println("Debug " + args[0]);
            File file = new File(args[0]);
            FileStore store = new FileStore(file, 256, false);
            try {
                if (args.length == 1) {
                    Map<SegmentId, List<SegmentId>> idmap = Maps.newHashMap();

                    int dataCount = 0;
                    long dataSize = 0;
                    int bulkCount = 0;
                    long bulkSize = 0;
                    for (SegmentId id : store.getSegmentIds()) {
                        if (id.isDataSegmentId()) {
                            Segment segment = id.getSegment();
                            dataCount++;
                            dataSize += segment.size();
                            idmap.put(id, segment.getReferencedIds());
                            System.out.println(id + " -> " + idmap.get(id));
                        } else if (id.isBulkSegmentId()) {
                            bulkCount++;
                            bulkSize += id.getSegment().size();
                            idmap.put(id, Collections.<SegmentId>emptyList());
                        }
                    }
                    System.out.println("Total size:");
                    System.out.format(
                            "%6dMB in %6d data segments%n",
                            dataSize / (1024 * 1024), dataCount);
                    System.out.format(
                            "%6dMB in %6d bulk segments%n",
                            bulkSize / (1024 * 1024), bulkCount);

                    Set<SegmentId> garbage = newHashSet(idmap.keySet());
                    Queue<SegmentId> queue = Queues.newArrayDeque();
                    queue.add(store.getHead().getRecordId().getSegmentId());
                    while (!queue.isEmpty()) {
                        SegmentId id = queue.remove();
                        if (garbage.remove(id)) {
                            queue.addAll(idmap.get(id));
                        }
                    }
                    dataCount = 0;
                    dataSize = 0;
                    bulkCount = 0;
                    bulkSize = 0;
                    for (SegmentId id : garbage) {
                        if (id.isDataSegmentId()) {
                            dataCount++;
                            dataSize += id.getSegment().size();
                        } else if (id.isBulkSegmentId()) {
                            bulkCount++;
                            bulkSize += id.getSegment().size();
                        }
                    }
                    System.out.println("Available for garbage collection:");
                    System.out.format(
                            "%6dMB in %6d data segments%n",
                            dataSize / (1024 * 1024), dataCount);
                    System.out.format(
                            "%6dMB in %6d bulk segments%n",
                            bulkSize / (1024 * 1024), bulkCount);
                } else {
                    Pattern pattern = Pattern.compile(
                            "([0-9a-f-]+)|([0-9a-f-]+:[0-9a-f]+)?(/.*)?");
                    for (int i = 1; i < args.length; i++) {
                        Matcher matcher = pattern.matcher(args[i]);
                        if (!matcher.matches()) {
                            System.err.println("Unknown argument: " + args[i]);
                        } else if (matcher.group(1) != null) {
                            UUID uuid = UUID.fromString(matcher.group(1));
                            SegmentId id = store.getTracker().getSegmentId(
                                    uuid.getMostSignificantBits(),
                                    uuid.getLeastSignificantBits());
                            System.out.println(id.getSegment());
                        } else {
                            RecordId id = store.getHead().getRecordId();
                            if (matcher.group(2) != null) {
                                id = RecordId.fromString(
                                        store.getTracker(), matcher.group(2));
                            }
                            String path = "/";
                            if (matcher.group(3) != null) {
                                path = matcher.group(3);
                            }
                            NodeState node = new SegmentNodeState(id);
                            System.out.println("/ -> " + node);
                            for (String name : PathUtils.elements(path)) {
                                node = node.getChildNode(name);
                                System.out.println(" " + name  + " -> " + node);
                            }
                        }
                    }
                }
            } finally {
                store.close();
            }
        }
    }
View Full Code Here

                        }
                    } finally {
                        client.close();
                    }
                } else {
                    FileStore store = new FileStore(new File(dst), 256);
                    try {
                        NodeStore target = new SegmentNodeStore(store);
                        RepositoryUpgrade upgrade =
                                new RepositoryUpgrade(source, target);
                        upgrade.setCopyBinariesByReference(
                                options.has("datastore"));
                        upgrade.copy(null);
                    } finally {
                        store.close();
                    }
                }
            } finally {
                source.getRepository().shutdown();
            }
View Full Code Here

        FileUtils.cleanDirectory(getWorkDir());
    }

    protected SegmentNodeStore getNodeStore(BlobStore blobStore) throws IOException {
        if (nodeStore == null) {
            store = new FileStore(blobStore, getWorkDir(), 256, false);
            nodeStore = new SegmentNodeStore(store);
        }
        return nodeStore;
    }
View Full Code Here

    }

    private static void backup(String[] args) throws IOException {
        if (args.length == 2) {
            // TODO: enable backup for other node store implementations
            FileStore store = new FileStore(new File(args[0]), 256, false);
            FileStoreBackup.backup(new SegmentNodeStore(store), new File(args[1]));
            store.close();
        } else {
            System.err.println("usage: backup <repository> <backup>");
            System.exit(1);
        }
    }
View Full Code Here

            System.exit(1);
        } else {
            // TODO: enable debug information for other node store implementations
            System.out.println("Debug " + args[0]);
            File file = new File(args[0]);
            FileStore store = new FileStore(file, 256, false);
            try {
                if (args.length == 1) {
                    Map<SegmentId, List<SegmentId>> idmap = Maps.newHashMap();

                    int dataCount = 0;
                    long dataSize = 0;
                    int bulkCount = 0;
                    long bulkSize = 0;
                    for (SegmentId id : store.getSegmentIds()) {
                        if (id.isDataSegmentId()) {
                            Segment segment = id.getSegment();
                            dataCount++;
                            dataSize += segment.size();
                            idmap.put(id, segment.getReferencedIds());
                            System.out.println(id + " -> " + idmap.get(id));
                        } else if (id.isBulkSegmentId()) {
                            bulkCount++;
                            bulkSize += id.getSegment().size();
                            idmap.put(id, Collections.<SegmentId>emptyList());
                        }
                    }
                    System.out.println("Total size:");
                    System.out.format(
                            "%6dMB in %6d data segments%n",
                            dataSize / (1024 * 1024), dataCount);
                    System.out.format(
                            "%6dMB in %6d bulk segments%n",
                            bulkSize / (1024 * 1024), bulkCount);

                    Set<SegmentId> garbage = newHashSet(idmap.keySet());
                    Queue<SegmentId> queue = Queues.newArrayDeque();
                    queue.add(store.getHead().getRecordId().getSegmentId());
                    while (!queue.isEmpty()) {
                        SegmentId id = queue.remove();
                        if (garbage.remove(id)) {
                            queue.addAll(idmap.get(id));
                        }
                    }
                    dataCount = 0;
                    dataSize = 0;
                    bulkCount = 0;
                    bulkSize = 0;
                    for (SegmentId id : garbage) {
                        if (id.isDataSegmentId()) {
                            dataCount++;
                            dataSize += id.getSegment().size();
                        } else if (id.isBulkSegmentId()) {
                            bulkCount++;
                            bulkSize += id.getSegment().size();
                        }
                    }
                    System.out.println("Available for garbage collection:");
                    System.out.format(
                            "%6dMB in %6d data segments%n",
                            dataSize / (1024 * 1024), dataCount);
                    System.out.format(
                            "%6dMB in %6d bulk segments%n",
                            bulkSize / (1024 * 1024), bulkCount);
                } else {
                    Pattern pattern = Pattern.compile(
                            "([0-9a-f-]+)|([0-9a-f-]+:[0-9a-f]+)?(/.*)?");
                    for (int i = 1; i < args.length; i++) {
                        Matcher matcher = pattern.matcher(args[i]);
                        if (!matcher.matches()) {
                            System.err.println("Unknown argument: " + args[i]);
                        } else if (matcher.group(1) != null) {
                            UUID uuid = UUID.fromString(matcher.group(1));
                            SegmentId id = store.getTracker().getSegmentId(
                                    uuid.getMostSignificantBits(),
                                    uuid.getLeastSignificantBits());
                            System.out.println(id.getSegment());
                        } else {
                            RecordId id = store.getHead().getRecordId();
                            if (matcher.group(2) != null) {
                                id = RecordId.fromString(
                                        store.getTracker(), matcher.group(2));
                            }
                            String path = "/";
                            if (matcher.group(3) != null) {
                                path = matcher.group(3);
                            }
                            NodeState node = new SegmentNodeState(id);
                            System.out.println("/ -> " + node);
                            for (String name : PathUtils.elements(path)) {
                                node = node.getChildNode(name);
                                System.out.println(" " + name  + " -> " + node);
                            }
                        }
                    }
                }
            } finally {
                store.close();
            }
        }
    }
View Full Code Here

                        }
                    } finally {
                        client.close();
                    }
                } else {
                    FileStore store = new FileStore(new File(dst), 256);
                    try {
                        NodeStore target = new SegmentNodeStore(store);
                        RepositoryUpgrade upgrade =
                                new RepositoryUpgrade(source, target);
                        upgrade.setCopyBinariesByReference(
                                options.has("datastore"));
                        upgrade.copy();
                    } finally {
                        store.close();
                    }
                }
            } finally {
                source.getRepository().shutdown();
            }
View Full Code Here

        FileUtils.cleanDirectory(getWorkDir());
    }

    protected SegmentNodeStore getNodeStore(BlobStore blobStore) throws IOException {
        if (nodeStore == null) {
            store = new FileStore(blobStore, getWorkDir(), 256, false);
            nodeStore = new SegmentNodeStore(store);
        }
        return nodeStore;
    }
View Full Code Here

            @Override
            protected Repository[] internalSetUpCluster(int n) throws Exception {
                Repository[] cluster = new Repository[n];
                stores = new FileStore[cluster.length];
                for (int i = 0; i < cluster.length; i++) {
                    stores[i] = new FileStore(
                            new File(base, unique),
                            maxFileSizeMB, cacheSizeMB, memoryMapping);
                    Oak oak = new Oak(new SegmentNodeStore(stores[i]));
                    cluster[i] = new Jcr(oak).createRepository();
                }
View Full Code Here

TOP

Related Classes of org.apache.jackrabbit.oak.plugins.segment.file.FileStore$SegmentReference

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.