bug 10737: Fix java.nio.BufferOverflowException in decompression

Also add unit test for compression/decompression

status 10737: resolved fixed
This commit is contained in:
Sheng Yang 2011-07-15 18:01:45 -07:00
parent 4a30f38dd4
commit 1dac8f3815
2 changed files with 42 additions and 11 deletions

View File

@ -247,6 +247,7 @@ public class Request {
buffer.put((byte) 0);
buffer.putShort(getFlags());
buffer.putLong(_seq);
// The size here is uncompressed size, if the data is compressed.
buffer.putInt(contentSize);
buffer.putLong(_mgmtId);
buffer.putLong(_agentId);
@ -256,7 +257,7 @@ public class Request {
return buffer;
}
public static ByteBuffer doDecompress(ByteBuffer buffer) {
public static ByteBuffer doDecompress(ByteBuffer buffer, int length) {
byte[] byteArrayIn = new byte[1024];
ByteArrayInputStream byteIn;
if (buffer.hasArray()) {
@ -268,7 +269,7 @@ public class Request {
buffer.get(array);
byteIn = new ByteArrayInputStream(array);
}
ByteBuffer retBuff = ByteBuffer.allocate(65535);
ByteBuffer retBuff = ByteBuffer.allocate(length);
int len = 0;
try {
GZIPInputStream in = new GZIPInputStream(byteIn);
@ -283,12 +284,17 @@ public class Request {
return retBuff;
}
public static ByteBuffer doCompress(ByteBuffer buffer) {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream(65535);
byte[] array = new byte[buffer.capacity()];
buffer.get(array);
public static ByteBuffer doCompress(ByteBuffer buffer, int length) {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream(length);
byte[] array;
if (buffer.hasArray()) {
array = buffer.array();
} else {
array = new byte[buffer.capacity()];
buffer.get(array);
}
try {
GZIPOutputStream out = new GZIPOutputStream(byteOut, 65535);
GZIPOutputStream out = new GZIPOutputStream(byteOut, length);
out.write(array);
out.finish();
out.close();
@ -306,13 +312,14 @@ public class Request {
_content = s_gson.toJson(_cmds, _cmds.getClass());
}
tmp = ByteBuffer.wrap(_content.getBytes());
int capacity = tmp.capacity();
/* Check if we need to compress the data */
if (tmp.capacity() >= 8192) {
tmp = doCompress(tmp);
if (capacity >= 8192) {
tmp = doCompress(tmp, capacity);
_flags |= FLAG_COMPRESSED;
}
buffers[1] = tmp;
buffers[0] = serializeHeader(buffers[1].capacity());
buffers[0] = serializeHeader(capacity);
return buffers;
}
@ -441,6 +448,7 @@ public class Request {
final boolean isRequest = (flags & FLAG_REQUEST) > 0;
final long seq = buff.getLong();
// The size here is uncompressed size, if the data is compressed.
final int size = buff.getInt();
final long mgmtId = buff.getLong();
final long agentId = buff.getLong();
@ -453,7 +461,7 @@ public class Request {
}
if ((flags & FLAG_COMPRESSED) != 0) {
buff = doDecompress(buff);
buff = doDecompress(buff, size);
}
byte[] command = null;

View File

@ -1,9 +1,12 @@
package com.cloud.agent.transport;
import java.nio.ByteBuffer;
import junit.framework.TestCase;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.Assert;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.Command;
@ -134,6 +137,26 @@ public class RequestTest extends TestCase {
}
public void testCompress() {
s_logger.info("testCompress");
int len = 800000;
ByteBuffer inputBuffer = ByteBuffer.allocate(len);
for (int i = 0; i < len; i ++) {
inputBuffer.array()[i] = 1;
}
inputBuffer.limit(len);
ByteBuffer compressedBuffer = ByteBuffer.allocate(len);
compressedBuffer = Request.doCompress(inputBuffer, len);
s_logger.info("compressed length: " + compressedBuffer.limit());
ByteBuffer decompressedBuffer = ByteBuffer.allocate(len);
decompressedBuffer = Request.doDecompress(compressedBuffer, len);
for (int i = 0; i < len; i ++) {
if (inputBuffer.array()[i] != decompressedBuffer.array()[i]) {
Assert.fail("Fail at " + i);
}
}
}
public void testLogging() {
s_logger.info("Testing Logging");
GetHostStatsCommand cmd3 = new GetHostStatsCommand("hostguid", "hostname", 101);