Package org.apache.activemq.util

Examples of org.apache.activemq.util.DataByteArrayOutputStream


        DataByteArrayInputStream dataIn = new DataByteArrayInputStream(sequence);
        return marshaller.readPayload(dataIn);
    }

    public StoreLocation storeDataItem(Marshaller marshaller, Object payload) throws IOException {
        final DataByteArrayOutputStream buffer = new DataByteArrayOutputStream();
        marshaller.writePayload(payload, buffer);
        ByteSequence data = buffer.toByteSequence();
        return convertToStoreLocation(dataManager.write(data, (byte)1, false));
    }
View Full Code Here


    public void force() throws IOException {
        dataManager.write(FORCE_COMMAND, (byte)2, true);
    }

    public void updateItem(StoreLocation location, Marshaller marshaller, Object payload) throws IOException {
        final DataByteArrayOutputStream buffer = new DataByteArrayOutputStream();
        marshaller.writePayload(payload, buffer);
        ByteSequence data = buffer.toByteSequence();
        dataManager.update(convertFromStoreLocation(location), data, false);
    }
View Full Code Here

                capacity <<= 1;
            }
            this.bins = new HashBin[capacity];
            keysPerPage = pageSize / keySize;
            dataIn = new DataByteArrayInputStream();
            dataOut = new DataByteArrayOutputStream(pageSize);
            readBuffer = new byte[pageSize];
            try {
                openIndexFile();
                long offset = 0;
                while ((offset + pageSize) <= indexFile.length()) {
View Full Code Here

    public void load() {
        if (loaded.compareAndSet(false, true)) {
            keysPerPage = pageSize / keySize;
            dataIn = new DataByteArrayInputStream();
            dataOut = new DataByteArrayOutputStream(pageSize);
            readBuffer = new byte[pageSize];
            try {
                openIndexFile();
                long offset = 0;
                while ((offset + pageSize) <= indexFile.length()) {
View Full Code Here

    protected void processQueue() {
        DataFile dataFile = null;
        RandomAccessFile file = null;
        try {

            DataByteArrayOutputStream buff = new DataByteArrayOutputStream(maxWriteBatchSize);
            while (true) {

                Object o = null;

                // Block till we get a command.
                synchronized (enqueueMutex) {
                    while (true) {
                        if (shutdown) {
                            o = SHUTDOWN_COMMAND;
                            break;
                        }
                        if (nextWriteBatch != null) {
                            o = nextWriteBatch;
                            nextWriteBatch = null;
                            break;
                        }
                        enqueueMutex.wait();
                    }
                    enqueueMutex.notify();
                }

                if (o == SHUTDOWN_COMMAND) {
                    break;
                }

                WriteBatch wb = (WriteBatch)o;
                if (dataFile != wb.dataFile) {
                    if (file != null) {
                        dataFile.closeRandomAccessFile(file);
                    }
                    dataFile = wb.dataFile;
                    file = dataFile.openRandomAccessFile(true);
                }

                WriteCommand write = wb.first;

                // Write all the data.
                // Only need to seek to first location.. all others
                // are in sequence.
                file.seek(write.location.getOffset());

                //
                // is it just 1 big write?
                if (wb.size == write.location.getSize()) {

                    // Just write it directly..
                    file.writeInt(write.location.getSize());
                    file.writeByte(write.location.getType());
                    file.write(RESERVED_SPACE);
                    file.write(AsyncDataManager.ITEM_HEAD_SOR);
                    file.write(write.data.getData(), write.data.getOffset(), write.data.getLength());
                    file.write(AsyncDataManager.ITEM_HEAD_EOR);

                } else {

                    // Combine the smaller writes into 1 big buffer
                    while (write != null) {

                        buff.writeInt(write.location.getSize());
                        buff.writeByte(write.location.getType());
                        buff.write(RESERVED_SPACE);
                        buff.write(AsyncDataManager.ITEM_HEAD_SOR);
                        buff.write(write.data.getData(), write.data.getOffset(), write.data.getLength());
                        buff.write(AsyncDataManager.ITEM_HEAD_EOR);

                        write = (WriteCommand)write.getNext();
                    }

                    // Now do the 1 big write.
                    ByteSequence sequence = buff.toByteSequence();
                    file.write(sequence.getData(), sequence.getOffset(), sequence.getLength());
                    buff.reset();
                }

                file.getFD().sync();

                WriteCommand lastWrite = (WriteCommand)wb.first.getTailNode();
                dataManager.setLastAppendLocation(lastWrite.location);

                // Signal any waiting threads that the write is on disk.
                wb.latch.countDown();

                // Now that the data is on disk, remove the writes from the in
                // flight
                // cache.
                write = wb.first;
                while (write != null) {
                    if (!write.sync) {
                        inflightWrites.remove(new WriteKey(write.location));
                    }
                    write = (WriteCommand)write.getNext();
                }
            }
            buff.close();
        } catch (IOException e) {
            synchronized (enqueueMutex) {
                firstAsyncException = e;
            }
        } catch (InterruptedException e) {
View Full Code Here

    protected void processQueue() {
        DataFile dataFile = null;
        RandomAccessFile file = null;
        try {

            DataByteArrayOutputStream buff = new DataByteArrayOutputStream(maxWriteBatchSize);
            while (true) {

                Object o = null;

                // Block till we get a command.
                synchronized (enqueueMutex) {
                    while (true) {
                        if (nextWriteBatch != null) {
                            o = nextWriteBatch;
                            nextWriteBatch = null;
                            break;
                        }
                        if (shutdown) {
                            return;
                        }
                        enqueueMutex.wait();
                    }
                    enqueueMutex.notify();
                }

                WriteBatch wb = (WriteBatch)o;
                if (dataFile != wb.dataFile) {
                    if (file != null) {
                        dataFile.closeRandomAccessFile(file);
                    }
                    dataFile = wb.dataFile;
                    file = dataFile.openRandomAccessFile(true);
                }

                WriteCommand write = wb.first;

                // Write all the data.
                // Only need to seek to first location.. all others
                // are in sequence.
                file.seek(write.location.getOffset());

               
                boolean forceToDisk=false;
               
                //
                // is it just 1 big write?
                if (wb.size == write.location.getSize()) {
                    forceToDisk = write.sync | write.onComplete!=null;
                   
                    // Just write it directly..
                    file.writeInt(write.location.getSize());
                    file.writeByte(write.location.getType());
                    file.write(RESERVED_SPACE);
                    file.write(AsyncDataManager.ITEM_HEAD_SOR);
                    file.write(write.data.getData(), write.data.getOffset(), write.data.getLength());
                    file.write(AsyncDataManager.ITEM_HEAD_EOR);

                } else {

                    // Combine the smaller writes into 1 big buffer
                    while (write != null) {
                        forceToDisk |= write.sync | write.onComplete!=null;

                        buff.writeInt(write.location.getSize());
                        buff.writeByte(write.location.getType());
                        buff.write(RESERVED_SPACE);
                        buff.write(AsyncDataManager.ITEM_HEAD_SOR);
                        buff.write(write.data.getData(), write.data.getOffset(), write.data.getLength());
                        buff.write(AsyncDataManager.ITEM_HEAD_EOR);

                        write = (WriteCommand)write.getNext();
                    }

                    // Now do the 1 big write.
                    ByteSequence sequence = buff.toByteSequence();
                    file.write(sequence.getData(), sequence.getOffset(), sequence.getLength());
                    buff.reset();
                }

                if( forceToDisk ) {
                    file.getFD().sync();
                }
View Full Code Here

     *
     * @param file
     */
    StoreDataWriter(DataManager fileManager){
        this.dataManager=fileManager;
        this.buffer=new DataByteArrayOutputStream();
    }
View Full Code Here

       expected.setDestination(new KahaDestination().setName("Foo").setType(DestinationType.QUEUE));
       expected.setMessage(new Buffer(new byte[] {1,2,3,4,5,6} ));
       expected.setMessageId("Hello World");

       int size = expected.serializedSizeFramed();
       DataByteArrayOutputStream os = new DataByteArrayOutputStream(size + 1);
       os.writeByte(expected.type().getNumber());
       expected.writeFramed(os);
       ByteSequence seq = os.toByteSequence();

       DataByteArrayInputStream is = new DataByteArrayInputStream(seq);
       KahaEntryType type = KahaEntryType.valueOf(is.readByte());
       JournalCommand message = (JournalCommand)type.createMessage();
       message.mergeFramed(is);
View Full Code Here

            this.bins = new HashBin[capacity];
            this.numberOfBins=capacity;
            threshold = calculateThreashold();
            keysPerPage = pageSize / keySize;
            dataIn = new DataByteArrayInputStream();
            dataOut = new DataByteArrayOutputStream(pageSize);
            readBuffer = new byte[pageSize];
            try {
                openIndexFile();
                if (indexFile.length() > 0) {
                    doCompress();
View Full Code Here

    public void load() {
        if (loaded.compareAndSet(false, true)) {
            keysPerPage = pageSize / keySize;
            dataIn = new DataByteArrayInputStream();
            dataOut = new DataByteArrayOutputStream(pageSize);
            readBuffer = new byte[pageSize];
            try {
                openIndexFile();
                long offset = 0;
                while ((offset + pageSize) <= indexFile.length()) {
View Full Code Here

TOP

Related Classes of org.apache.activemq.util.DataByteArrayOutputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.