InputStream input = inputStreamParameter;
// We won't know the real size of the message since we are compressing while reading the streaming.
// This counter will be passed to the deflater to be updated for every byte read
AtomicLong messageSize = new AtomicLong();
DeflaterReader deflaterReader = null;
if (session.isCompressLargeMessages())
{
msgI.putBooleanProperty(Message.HDR_LARGE_COMPRESSED, true);
deflaterReader = new DeflaterReader(inputStreamParameter, messageSize);
input = deflaterReader;
}
long totalSize = 0;
boolean headerSent = false;
while (!lastPacket)
{
byte[] buff = new byte[minLargeMessageSize];
int pos = 0;
do
{
int numberOfBytesRead;
int wanted = minLargeMessageSize - pos;
try
{
numberOfBytesRead = input.read(buff, pos, wanted);
}
catch (IOException e)
{
throw HornetQClientMessageBundle.BUNDLE.errorReadingBody(e);
}
if (numberOfBytesRead == -1)
{
lastPacket = true;
break;
}
pos += numberOfBytesRead;
}
while (pos < minLargeMessageSize);
totalSize += pos;
final SessionSendContinuationMessage chunk;
if (lastPacket)
{
if (!session.isCompressLargeMessages())
{
messageSize.set(totalSize);
}
// This is replacing the last packet by a smaller packet
byte[] buff2 = new byte[pos];
System.arraycopy(buff, 0, buff2, 0, pos);
buff = buff2;
// This is the case where the message is being converted as a regular message
if (!headerSent && session.isCompressLargeMessages() && buff2.length < minLargeMessageSize)
{
msgI.getBodyBuffer().resetReaderIndex();
msgI.getBodyBuffer().resetWriterIndex();
msgI.putLongProperty(Message.HDR_LARGE_BODY_SIZE, deflaterReader.getTotalSize());
msgI.getBodyBuffer().writeBytes(buff, 0, pos);
sendRegularMessage(msgI, sendBlocking, credits, handler);
return;
}
chunk = new SessionSendContinuationMessage(msgI, buff, false, sendBlocking, messageSize.get(), handler);
}
else
{
chunk = new SessionSendContinuationMessage(msgI, buff, true, false, null);
}