<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-core-api</artifactId>
</dependency>
- <dependency>
- <groupId>org.opendaylight.controller</groupId>
- <artifactId>sal-core-api</artifactId>
- </dependency>
<dependency>
<groupId>org.opendaylight.controller</groupId>
<artifactId>sal-core-spi</artifactId>
<artifactId>yang-common</artifactId>
</dependency>
- <dependency>
- <groupId>org.opendaylight.yangtools</groupId>
- <artifactId>yang-data-api</artifactId>
- </dependency>
-
<dependency>
<groupId>org.osgi</groupId>
<artifactId>org.osgi.core</artifactId>
package org.opendaylight.controller.netconf.nettyutil.handler;
+import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import io.netty.buffer.ByteBuf;
-import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
import org.opendaylight.controller.netconf.util.messages.NetconfMessageConstants;
-import org.opendaylight.controller.netconf.util.messages.NetconfMessageHeader;
public class ChunkedFramingMechanismEncoder extends MessageToByteEncoder<ByteBuf> {
public static final int DEFAULT_CHUNK_SIZE = 8192;
this(DEFAULT_CHUNK_SIZE);
}
- public ChunkedFramingMechanismEncoder(int chunkSize) {
- Preconditions.checkArgument(chunkSize > MIN_CHUNK_SIZE);
- Preconditions.checkArgument(chunkSize < MAX_CHUNK_SIZE);
+ public ChunkedFramingMechanismEncoder(final int chunkSize) {
+ Preconditions.checkArgument(chunkSize >= MIN_CHUNK_SIZE && chunkSize <= MAX_CHUNK_SIZE, "Unsupported chunk size %s", chunkSize);
this.chunkSize = chunkSize;
}
}
@Override
- protected void encode(ChannelHandlerContext ctx, ByteBuf msg, ByteBuf out) {
- while (msg.readableBytes() > chunkSize) {
- ByteBuf chunk = Unpooled.buffer(chunkSize);
- chunk.writeBytes(createChunkHeader(chunkSize));
- chunk.writeBytes(msg.readBytes(chunkSize));
- ctx.write(chunk);
- }
- out.writeBytes(createChunkHeader(msg.readableBytes()));
- out.writeBytes(msg.readBytes(msg.readableBytes()));
- out.writeBytes(NetconfMessageConstants.END_OF_CHUNK);
- }
+ protected void encode(final ChannelHandlerContext ctx, final ByteBuf msg, final ByteBuf out) {
+ do {
+ final int xfer = Math.min(chunkSize, msg.readableBytes());
+
+ out.writeBytes(NetconfMessageConstants.START_OF_CHUNK);
+ out.writeBytes(String.valueOf(xfer).getBytes(Charsets.US_ASCII));
+ out.writeByte('\n');
- private ByteBuf createChunkHeader(int chunkSize) {
- return Unpooled.wrappedBuffer(NetconfMessageHeader.toBytes(chunkSize));
+ out.writeBytes(msg, xfer);
+ } while (msg.isReadable());
+
+ out.writeBytes(NetconfMessageConstants.END_OF_CHUNK);
}
}
package org.opendaylight.controller.netconf.nettyutil.handler;
import static org.junit.Assert.assertEquals;
-import static org.mockito.Matchers.anyObject;
-import static org.mockito.Mockito.doAnswer;
-import com.google.common.collect.Lists;
+import static org.junit.Assert.assertTrue;
+import com.google.common.base.Charsets;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
-import java.util.List;
+import java.nio.ByteBuffer;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.stubbing.Answer;
-import org.opendaylight.controller.netconf.util.messages.NetconfMessageConstants;
public class ChunkedFramingMechanismEncoderTest {
@Test
public void testEncode() throws Exception {
- final List<ByteBuf> chunks = Lists.newArrayList();
- doAnswer(new Answer<Object>() {
- @Override
- public Object answer(final InvocationOnMock invocation) throws Throwable {
- chunks.add((ByteBuf) invocation.getArguments()[0]);
- return null;
- }
- }).when(ctx).write(anyObject());
-
final ChunkedFramingMechanismEncoder encoder = new ChunkedFramingMechanismEncoder(chunkSize);
final int lastChunkSize = 20;
final ByteBuf src = Unpooled.wrappedBuffer(getByteArray(chunkSize * 4 + lastChunkSize));
final ByteBuf destination = Unpooled.buffer();
encoder.encode(ctx, src, destination);
- assertEquals(4, chunks.size());
- final int framingSize = "#256\n".getBytes().length + 1/* new line at end */;
+ assertEquals(1077, destination.readableBytes());
- for (final ByteBuf chunk : chunks) {
- assertEquals(chunkSize + framingSize, chunk.readableBytes());
- }
+ byte[] buf = new byte[destination.readableBytes()];
+ destination.readBytes(buf);
+ String s = Charsets.US_ASCII.decode(ByteBuffer.wrap(buf)).toString();
- final int lastFramingSize = "#20\n".length() + NetconfMessageConstants.END_OF_CHUNK.length + 1/* new line at end */;
- assertEquals(lastChunkSize + lastFramingSize, destination.readableBytes());
+ assertTrue(s.startsWith("\n#256\na"));
+ assertTrue(s.endsWith("\n#20\naaaaaaaaaaaaaaaaaaaa\n##\n"));
}
private byte[] getByteArray(final int size) {
public static final int MAX_HEADER_LENGTH = 13;
+ public static final byte[] START_OF_CHUNK = "\n#".getBytes(Charsets.UTF_8);
public static final byte[] END_OF_CHUNK = "\n##\n".getBytes(Charsets.UTF_8);
}