Package org.exist.storage.io

Examples of org.exist.storage.io.VariableByteInput


    @Test
    public void readLegacyFormat() throws EXistException, IOException {

        SymbolTable symbolTable = new SymbolTable(null, tmpDir);
        VariableByteInput mockIs = EasyMock.createMock(VariableByteInput.class);

        /* readLegacy expectations */

        //max and nsMax
        expect(mockIs.readShort()).andReturn((short)1);
        expect(mockIs.readShort()).andReturn((short)1);

        //localnames
        expect(mockIs.readInt()).andReturn(1);
        expect(mockIs.readUTF()).andReturn("local-name");
        expect(mockIs.readShort()).andReturn((short)67);

        //namespaces
        expect(mockIs.readInt()).andReturn(1);
        expect(mockIs.readUTF()).andReturn("http://some/or/other");
        expect(mockIs.readShort()).andReturn((short)77);

        //default mappings
        expect(mockIs.readInt()).andReturn(1);
        expect(mockIs.readUTF()).andReturn("mapping");
        expect(mockIs.readShort()).andReturn((short)87);

        //mimetypes
        expect(mockIs.readInt()).andReturn(1);
        expect(mockIs.readUTF()).andReturn("some/other");
        expect(mockIs.readInt()).andReturn(97);

        //replay
        replay(mockIs);

        //action
View Full Code Here


                NGramQNameKey value = new NGramQNameKey(currentDoc.getCollection().getId(), key.qname,
                        index.getBrokerPool().getSymbols(), key.term);
                boolean changed = false;
                os.clear();
                VariableByteInput is = index.db.getAsStream(value);
                if (is == null)
                    continue;
                while (is.available() > 0) {
                    int storedDocId = is.readInt();
                    byte nameType = is.readByte();
                    int occurrences = is.readInt();
                    //Read (variable) length of node IDs + frequency + offsets
                    int length = is.readFixedInt();
                    if (storedDocId != currentDoc.getDocId()) {
                        // data are related to another document:
                        // copy them to any existing data
                        os.writeInt(storedDocId);
                        os.writeByte(nameType);
                        os.writeInt(occurrences);
                        os.writeFixedInt(length);
                        is.copyRaw(os, length);
                    } else {
                        // data are related to our document:
                        if (mode == StreamListener.REMOVE_ALL_NODES) {
                            // skip them
                            is.skipBytes(length);
                        } else {
                            // removing nodes: need to filter out the node ids to be removed
                            // feed the new list with the GIDs

                            NodeId previous = null;
                            OccurrenceList newOccurrences = new OccurrenceList();
                            for (int m = 0; m < occurrences; m++) {
                                NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is);
                                previous = nodeId;
                                int freq = is.readInt();
                                // add the node to the new list if it is not
                                // in the list of removed nodes
                                if (!occurencesList.contains(nodeId)) {
                                    for (int n = 0; n < freq; n++) {
                                        newOccurrences.add(nodeId, is.readInt());
                                    }
                                } else {
                                    is.skip(freq);
                                }
                            }
                            // append the data from the new list
                            if(newOccurrences.getSize() > 0) {
                                //Don't forget this one
View Full Code Here

        @Override
        public boolean indexInfo(Value key, long pointer) throws TerminatedException {
            String ngram = new String(key.getData(), NGramQNameKey.NGRAM_OFFSET, key.getLength() - NGramQNameKey.NGRAM_OFFSET, UTF_8);

            VariableByteInput is;
            try {
                is = index.db.getAsStream(pointer);
                //Does the token already has data in the index ?
                if (is == null)
                    return true;
                while (is.available() > 0) {
                    int storedDocId = is.readInt();
                    is.readByte();
                    int occurrences = is.readInt();
                    //Read (variable) length of node IDs + frequency + offsets
                    int length = is.readFixedInt();
                    DocumentImpl storedDocument = docs.getDoc(storedDocId);
                    //Exit if the document is not concerned
                    if (storedDocument == null) {
                        is.skipBytes(length);
                        continue;
                    }
                    NodeId previous = null;
                    for (int m = 0; m < occurrences; m++) {
                        NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is);
                        previous = nodeId;
                        int freq = is.readInt();
                        NodeProxy storedNode = new NodeProxy(storedDocument, nodeId);
                        // if a context set is specified, we can directly check if the
                        // matching node is a descendant of one of the nodes
                        // in the context set.
                        if (contextSet != null) {
                            int sizeHint = contextSet.getSizeHint(storedDocument);
                            if (returnAncestor) {
                                NodeProxy parentNode = contextSet.parentWithChild(storedNode, false, true, NodeProxy.UNKNOWN_NODE_LEVEL);
                                if (parentNode != null) {
                                    readMatches(ngram, is, nodeId, freq, parentNode);
                                    resultSet.add(parentNode, sizeHint);
                                } else
                                    is.skip(freq);
                            } else {
                                readMatches(ngram, is, nodeId, freq, storedNode);
                                resultSet.add(storedNode, sizeHint);
                            }
                            // otherwise, we add all text nodes without check
View Full Code Here

         */
        @Override
        public boolean indexInfo(Value key, long pointer) throws TerminatedException {
            String term = new String(key.getData(), NGramQNameKey.NGRAM_OFFSET, key.getLength() - NGramQNameKey.NGRAM_OFFSET, UTF_8);

            VariableByteInput is;
            try {
                is = index.db.getAsStream(pointer);
            } catch (IOException e) {
                LOG.error(e.getMessage(), e);
                return true;
            }
            try {
                while (is.available() > 0) {
                    boolean docAdded = false;
                    int storedDocId = is.readInt();
                    byte nameType = is.readByte();
                    int occurrences = is.readInt();
                    //Read (variable) length of node IDs + frequency + offsets
                    int length = is.readFixedInt();
                    DocumentImpl storedDocument = docs.getDoc(storedDocId);
                    //Exit if the document is not concerned
                    if (storedDocument == null) {
                        is.skipBytes(length);
                        continue;
                    }
                    NodeId previous = null;
                    for (int m = 0; m < occurrences; m++) {
                        NodeId nodeId = index.getBrokerPool().getNodeFactory().createFromStream(previous, is);
                        previous = nodeId;
                        int freq = is.readInt();
                        is.skip(freq);
                        boolean include = true;
                        //TODO : revisit
                        if (contextSet != null) {
                            NodeProxy parentNode = contextSet.parentWithChild(storedDocument, nodeId, false, true);
                            include = (parentNode != null);
View Full Code Here

     * @throws EXistException
     */
    private synchronized void loadSymbols() throws EXistException {
        try {
            final FileInputStream fis = new FileInputStream(getFile());
            final VariableByteInput is = new VariableByteInputStream(fis);
            final int magic = is.readFixedInt();
            if(magic == LEGACY_FILE_FORMAT_VERSION_ID) {
                LOG.info("Converting legacy symbols.dbx to new format...");
                readLegacy(is);
                saveSymbols();
            } else if(magic != FILE_FORMAT_VERSION_ID) {
View Full Code Here

                final Lock lock = collectionsDb.getLock();
                try {
                    lock.acquire(Lock.READ_LOCK);

                    final Value key = new CollectionStore.CollectionKey(uri.toString());
                    final VariableByteInput is = collectionsDb.getAsStream(key);
                    if(is == null) {
                        LOG.warn("Could not read collection entry for: " + uri);
                        return;
                    }
View Full Code Here

            collection = collectionsCache.get(uri);
            if(collection == null) {
                final Lock lock = collectionsDb.getLock();
                try {
                    lock.acquire(Lock.READ_LOCK);
                    VariableByteInput is;
                    if(address == BFile.UNKNOWN_ADDRESS) {
                        final Value key = new CollectionStore.CollectionKey(uri.toString());
                        is = collectionsDb.getAsStream(key);
                    } else {
                        is = collectionsDb.getAsStream(address);
View Full Code Here

            } else {
                for(final Value collectionDbKey : collectionsDb.getKeys()) {
                    if(collectionDbKey.data()[0] == CollectionStore.KEY_TYPE_COLLECTION) {
                        //Value collectionDbValue = collectionsDb.get(collectionDbKey);

                        final VariableByteInput vbi = collectionsDb.getAsStream(collectionDbKey);
                        final int id = vbi.readInt();
                        //check if the collection id matches (first 4 bytes)
                        if(collectionId == id) {
                            collectionUri = new String(Arrays.copyOfRange(collectionDbKey.data(), 1, collectionDbKey.data().length));
                            break;
                        }
                    }
                }
            }

            //get the resource uri
            final Value key = new CollectionStore.DocumentKey(collectionId, resourceType, documentId);
            final VariableByteInput vbi = collectionsDb.getAsStream(key);
            vbi.readInt(); //skip doc id
            final String resourceUri = vbi.readUTF();

            //get the resource
            uri = XmldbURI.createInternal(collectionUri + "/" + resourceUri);

        } catch(final TerminatedException te) {
View Full Code Here

    public void getResourceMetadata(final DocumentImpl document) {
        final Lock lock = collectionsDb.getLock();
        try {
            lock.acquire(Lock.READ_LOCK);
            final Value key = new CollectionStore.DocumentKey(document.getCollection().getId(), document.getResourceType(), document.getDocId());
            final VariableByteInput is = collectionsDb.getAsStream(key);
            if(is != null) {
                document.readDocumentMeta(is);
            }
        } catch(final LockException e) {
            LOG.warn("Failed to acquire lock on " + collectionsDb.getFile().getName());
View Full Code Here

        /* (non-Javadoc)
         * @see org.dbxml.core.filer.BTreeCallback#indexInfo(org.dbxml.core.data.Value, long)
         */
        public boolean indexInfo( Value value, long pointer ) throws TerminatedException
        {
            VariableByteInput is;

            try {
                is = dbValues.getAsStream( pointer );
            }
            catch( final IOException e ) {
                LOG.error( e.getMessage(), e );
                return( true );
            }

            try {

                while( is.available() > 0 ) {
                    final int          storedDocId    = is.readInt();
                    final int          gidsCount      = is.readInt();
                    final int          size           = is.readFixedInt();
                    final DocumentImpl storedDocument = docs.getDoc( storedDocId );

                    //Exit if the document is not concerned
                    if( storedDocument == null ) {
                        is.skipBytes( size );
                        continue;
                    }

                    //Process the nodes
                    NodeId    previous   = null;
View Full Code Here

TOP

Related Classes of org.exist.storage.io.VariableByteInput

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.