Package org.exist.storage.io

Examples of org.exist.storage.io.VariableByteInput


        final int ownerId = new Random().nextInt();
        final int mode = 0700;
        final int ownerGroupId = new Random().nextInt();

        final VariableByteOutputStream mockOstream = EasyMock.createMock(VariableByteOutputStream.class);
        final VariableByteInput mockIstream = EasyMock.createMock(VariableByteInput.class);

        final TestableUnixStylePermission permission = new TestableUnixStylePermission(mockSecurityManager, ownerId, ownerGroupId, mode);
       
        final long permissionVector = permission.getVector_testable();
       
        //expectations
        mockOstream.writeLong(permissionVector);
        expect(mockIstream.readLong()).andReturn(permissionVector);

        replay(mockSecurityManager, mockOstream, mockIstream);

        permission.write(mockOstream);
        permission.read(mockIstream);
View Full Code Here


            int             collectionId = CollectionStore.DocumentKey.getCollectionId( key );
            final int             docId        = CollectionStore.DocumentKey.getDocumentId( key );

            try {
                final byte              type    = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE];
                final VariableByteInput istream = store.getAsStream( pointer );
                DocumentImpl      doc     = null;

                if( type == DocumentImpl.BINARY_FILE ) {
                    doc = new BinaryDocument( broker.getBrokerPool() );
                } else {
View Full Code Here

        @Override
        public boolean indexInfo(final Value key, final long pointer) throws TerminatedException {

            try {
                final byte type = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE];
                final VariableByteInput is = collectionsDb.getAsStream(pointer);

                final DocumentImpl doc;
                if(type == DocumentImpl.BINARY_FILE) {
                    doc = new BinaryDocument(pool);
                } else {
View Full Code Here

            this.qname = qname;
            this.axis = axis;
        }

        public boolean indexInfo(Value key, long pointer) throws TerminatedException {
            VariableByteInput is;
            try {
                is = dbTokens.getAsStream(pointer);
            } catch (final IOException e) {
                LOG.error(e.getMessage(), e);
                return true;
            }
            word.reuse();
            if (qname == null)
                {WordRef.decode(key, word);}
            else
                {QNameWordRef.decode(key, word);}
            if (matcher.matches(word)) {
                try {
                    while (is.available() > 0) {
                        if(context != null)
                            {context.proceed();}
                        final int storedDocId = is.readInt();
                        final byte storedSection = is.readByte();
                        final int termCount = is.readInt();
                        //Read (variable) length of node IDs + frequency + offsets
                        final int length = is.readFixedInt();
                        final DocumentImpl storedDocument = docs.getDoc(storedDocId);
                        //Exit if the document is not concerned
                        if (storedDocument == null) {
                            is.skipBytes(length);
                            continue;
                        }
                        NodeId previous = null;
                        for (int m = 0; m < termCount; m++) {
                            NodeId nodeId = broker.getBrokerPool().getNodeFactory().createFromStream(previous, is);
                            previous = nodeId;
                            final int freq = is.readInt();
                            NodeProxy storedNode;
                            switch (storedSection) {
                            case TEXT_SECTION :
                                storedNode = new NodeProxy(storedDocument, nodeId, Node.TEXT_NODE);
                                break;
                            case ATTRIBUTE_SECTION :
                                storedNode = new NodeProxy(storedDocument, nodeId, Node.ATTRIBUTE_NODE);
                                break;
                            case QNAME_SECTION :
                                storedNode = new NodeProxy(storedDocument, nodeId,
                                    qname.getNameType() == ElementValue.ATTRIBUTE ?
                                    Node.ATTRIBUTE_NODE : Node.ELEMENT_NODE);
                                    break;
                            default :
                                throw new IllegalArgumentException("Invalid section type in '" + dbTokens.getFile().getName() + "'");
                            }
                            if (contextSet != null) {
                                NodeProxy parentNode;
                                switch (storedSection) {
                                case TEXT_SECTION :
                                case QNAME_SECTION:
                                    parentNode = contextSet.parentWithChild(storedNode,
                                        false, true, NodeProxy.UNKNOWN_NODE_LEVEL);
                                    break;
                                case ATTRIBUTE_SECTION :
                                    if (contextSet instanceof VirtualNodeSet) {
                                        parentNode = contextSet.parentWithChild(storedNode,
                                            false, true, NodeProxy.UNKNOWN_NODE_LEVEL);
                                        if (parentNode != null && parentNode.getNodeId().equals(nodeId))
                                            {parentNode = null;}
                                    } else {
                                        parentNode = contextSet.get(storedNode);
                                    }
                                    break;
                                default :
                                    throw new IllegalArgumentException("Invalid section type in '" +
                                        dbTokens.getFile().getName() + "'");
                                }
                                if (parentNode != null) {
                                    final Match match = new FTMatch(-1, nodeId, word.toString(), freq);
                                    readOccurrences(freq, is, match, word.length());
                                    final int sizeHint = contextSet.getSizeHint(storedDocument);
                                    if (axis == NodeSet.ANCESTOR) {
                                        parentNode.addMatch(match);
                                        result.add(parentNode, sizeHint);
                                    } else {
                                        storedNode.addMatch(match);
                                        result.add(storedNode, sizeHint);
                                    }
                                } else
                                    {is.skip(freq);}
                            } else {
                                final Match match = new FTMatch(-1, nodeId, word.toString(), freq);
                                readOccurrences(freq, is, match, word.length());
                                storedNode.addMatch(match);
                                result.add(storedNode, Constants.NO_SIZE_HINT);
View Full Code Here

            if (byQName)
                {QNameWordRef.decode(key, word);}
            else
                {WordRef.decode(key, word);}
            final String term = word.toString();
            VariableByteInput is;
            try {
                is = dbTokens.getAsStream(pointer);
            } catch (final IOException e) {
                LOG.error(e.getMessage(), e);
                //TODO : throw exception ? -pb
                return true;
            }
            try {
                while (is.available() > 0) {
                    boolean docAdded = false;
                    final int storedDocId = is.readInt();
                    final byte storedSection = is.readByte();
                    final int termCount = is.readInt();
                    //Read (variable) length of node IDs + frequency + offsets
                    final int length = is.readFixedInt();
                    final DocumentImpl storedDocument = docs.getDoc(storedDocId);
                    //Exit if the document is not concerned
                    if (storedDocument == null) {
                        is.skipBytes(length);
                        continue;
                    }
                    NodeId previous = null;
                    for (int m = 0; m < termCount; m++) {
                        NodeId nodeId = broker.getBrokerPool().getNodeFactory()
                            .createFromStream(previous, is);
                        previous = nodeId;
                        final int freq = is.readInt();
                        is.skip(freq);
                        if (contextSet != null) {
                            boolean include = false;
                            final NodeProxy parentNode = contextSet.parentWithChild(storedDocument,
                                nodeId, false, true);
                            switch (storedSection) {
View Full Code Here

                key = new QNameWordRef(collectionId, qname, token, broker.getBrokerPool().getSymbols());
            }
            final Lock lock = dbTokens.getLock();
            try {
                lock.acquire(Lock.READ_LOCK);
                final VariableByteInput is = dbTokens.getAsStream(key);
                //Does the token already has data in the index ?
                if (is == null)
                    {continue;}
                while (is.available() > 0) {
                    final int storedDocId = is.readInt();
                    final int storedSection = is.readByte();
                    final int gidsCount = is.readInt();
                    //Read (variable) length of node IDs + frequency + offsets      
                    final int length = is.readFixedInt();
                    final DocumentImpl storedDocument = docs.getDoc(storedDocId);
                    //Exit if the document is not concerned
                    if (storedDocument == null) {
                        is.skipBytes(length);
                        continue;
                    }
                    //Process the nodes
                    NodeId previous = null;
                    for (int m = 0; m < gidsCount; m++) {
                        NodeId nodeId = broker.getBrokerPool().getNodeFactory().createFromStream(previous, is);
                        previous = nodeId;
                        final int freq = is.readInt();
                        NodeProxy storedNode;
                        switch (storedSection) {
                        case ATTRIBUTE_SECTION :
                            storedNode = new NodeProxy(storedDocument, nodeId, Node.ATTRIBUTE_NODE);
                            break;
                        case TEXT_SECTION :
                            storedNode = new NodeProxy(storedDocument, nodeId, Node.TEXT_NODE);
                            break;
                        case QNAME_SECTION :
                            storedNode = new NodeProxy(storedDocument, nodeId,
                                qname.getNameType() == ElementValue.ATTRIBUTE ?
                                    Node.ATTRIBUTE_NODE : Node.ELEMENT_NODE);
                            break;
                        default :
                            throw new IllegalArgumentException("Invalid section type in '" +
                                dbTokens.getFile().getName() + "'");
                        }
                        // if a context set is specified, we can directly check if the
                        // matching text node is a descendant of one of the nodes
                        // in the context set.
                        if (contextSet != null) {
                            NodeProxy parent;
                            switch(storedSection) {
                            case ATTRIBUTE_SECTION :
                                if (contextSet instanceof VirtualNodeSet) {
                                    parent = contextSet.parentWithChild(storedNode,
                                        false, true, NodeProxy.UNKNOWN_NODE_LEVEL);
                                    if (parent != null && !parent.getNodeId().equals(storedNode.getNodeId()))
                                        {parent = null;}
                                } else
                                    {parent = contextSet.get(storedNode);}
                                break;
                            case QNAME_SECTION:
                            case TEXT_SECTION :
                                parent = contextSet.parentWithChild(storedNode,
                                    false, true, NodeProxy.UNKNOWN_NODE_LEVEL);
                                break;
                            default :
                                throw new IllegalArgumentException("Invalid section type in '" + dbTokens.getFile().getName() + "'");
                            }
                            if (parent != null) {
                                final Match match = new FTMatch(-1, nodeId, token, freq);
                                readOccurrences(freq, is, match, token.length());
                                if (axis == NodeSet.ANCESTOR) {
                                    parent.addMatch(match);
                                    final int sizeHint = contextSet.getSizeHint(storedDocument);
                                    result.add(parent, sizeHint);
                                } else {
                                    storedNode.addMatch(match);
                                    final int sizeHint = contextSet.getSizeHint(storedDocument);
                                    result.add(storedNode, sizeHint);
                                }
                            } else {
                                is.skip(freq);
                            }
                        //Otherwise, we add all text nodes without check
                        } else {
                            final Match match = new FTMatch(-1, nodeId, token, freq);
                            readOccurrences(freq, is, match, token.length());
View Full Code Here

                    os.clear();
                    try {
                        lock.acquire(Lock.WRITE_LOCK);
                        boolean changed = false;
                        os.clear();
                        final VariableByteInput is = dbTokens.getAsStream(key);
                        //Does the token already has data in the index ?
                        if (is == null)
                            {continue;}
                        //try {
                        while (is.available() > 0) {
                            final int storedDocId = is.readInt();
                            final byte section = is.readByte();
                            final int gidsCount = is.readInt();
                            //Read (variable) length of node IDs + frequency + offsets
                            final int length = is.readFixedInt();
                            if (storedDocId != document.getDocId()) {
                                // data are related to another document:
                                // copy them to any existing data
                                os.writeInt(storedDocId);
                                os.writeByte(section);
                                os.writeInt(gidsCount);
                                os.writeFixedInt(length);
                                is.copyRaw(os, length);
                            } else {
                                // data are related to our document:
                                // skip them
                                changed = true;
                                is.skipBytes(length);
                            }
                        }
                        //Store new data, if relevant
                        if (changed) {
                            //Well, nothing to store : remove the existing data
View Full Code Here

                    if( callback != null ) {
                        callback.startCollection( uri );
                    }
                    final Collection        collection = new Collection(broker, XmldbURI.createInternal( uri ) );
                    final VariableByteInput istream    = store.getAsStream( pointer );
                    collection.read( broker, istream );
                    BackupDescriptor bd = null;

                    if( prevBackup != null ) {
                        bd = prevBackup.getBackupDescriptor( uri );
View Full Code Here

            if( !exportedDocs.contains( docId ) ) {

                try {
                    final byte              type    = key.data()[key.start() + Collection.LENGTH_COLLECTION_ID + DocumentImpl.LENGTH_DOCUMENT_TYPE];
                    final VariableByteInput istream = store.getAsStream( pointer );
                    DocumentImpl      doc     = null;

                    if( type == DocumentImpl.BINARY_FILE ) {
                        doc = new BinaryDocument( broker.getBrokerPool() );
                    } else {
View Full Code Here

        SymbolTable symbolTable = new SymbolTable(null, tmpDir);
        symbolTable.getSymbol("some-name");

        VariableByteOutputStream mockOs = EasyMock.createMock(VariableByteOutputStream.class);
        VariableByteInput mockIs = EasyMock.createMock(VariableByteInput.class);

        final Capture<Byte> byteCapture = new Capture<Byte>();
        final Capture<Integer> intCapture = new Capture<Integer>();
        final Capture<String> strCapture = new Capture<String>();

        //write expectations
        mockOs.writeByte(captureByte(byteCapture));
        mockOs.writeInt(captureInt(intCapture));
        mockOs.writeUTF(capture(strCapture));

        replay(mockOs);

        symbolTable.localNameSymbols.write(mockOs);

        verify(mockOs);

        //read expectations
        expect(mockIs.available()).andReturn(1);
        expect(mockIs.readByte()).andReturn(byteCapture.getValue());
        expect(mockIs.readInt()).andReturn(intCapture.getValue());
        expect(mockIs.readUTF()).andReturn(strCapture.getValue());
        expect(mockIs.available()).andReturn(0);

        replay(mockIs);

        symbolTable.read(mockIs);
View Full Code Here

TOP

Related Classes of org.exist.storage.io.VariableByteInput

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.