use of java.nio.channels.WritableByteChannel in project databus by linkedin.
the class ReadEventsTestParams method streamWriterReader.
protected boolean streamWriterReader(DbusEventBuffer prodBuffer, int batchSize, Checkpoint cp, String filename, DbusEventBuffer readBuffer, Vector<Long> seenScn, DbusEventsStatisticsCollector stats) {
try {
WritableByteChannel writeChannel = null;
File directory = new File(".");
File writeFile = File.createTempFile(filename, ".dbus", directory);
int numStreamedEvents = 0;
try {
writeChannel = Utils.openChannel(writeFile, true);
StreamEventsArgs args = new StreamEventsArgs(batchSize);
numStreamedEvents = prodBuffer.streamEvents(cp, writeChannel, args).getNumEventsStreamed();
} catch (ScnNotFoundException e) {
e.printStackTrace();
return false;
}
writeChannel.close();
ReadableByteChannel readChannel = Utils.openChannel(writeFile, false);
int readEvents = readBuffer.readEvents(readChannel, null, stats);
writeFile.delete();
//System.out.printf("Wrote %d events, read %d events\n",numStreamedEvents,readEvents);
for (DbusEvent e : readBuffer) {
if (e.isEndOfPeriodMarker()) {
seenScn.add(e.sequence());
}
}
return (readEvents == numStreamedEvents);
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
use of java.nio.channels.WritableByteChannel in project databus by linkedin.
the class ReadEventsTestParams method testGetStreamedEvents.
@Test
public void testGetStreamedEvents() throws Exception {
DbusEventBuffer dbuf = new DbusEventBuffer(getConfig(10000000, DbusEventBuffer.Config.DEFAULT_INDIVIDUAL_BUFFER_SIZE, 100000, 1000000, AllocationPolicy.HEAP_MEMORY, QueuePolicy.OVERWRITE_ON_WRITE, AssertLevel.ALL));
int numEntries = 50000;
int eventWindowSize = 20;
HashMap<Long, KeyValue> testDataMap = new HashMap<Long, KeyValue>(20000);
dbuf.start(0);
for (long i = 1; i < numEntries; ++i) {
//LOG.info("Iteration:"+i);
DbusEventKey key = new DbusEventKey(RngUtils.randomLong());
String value = RngUtils.randomString(20);
dbuf.startEvents();
//ms offset for nanosecond base
long baseTsForWindow = timeStamp + ((RngUtils.randomPositiveInt() % numEntries) + 1) * 1000 * 1000;
for (int j = 0; j < eventWindowSize; ++j) {
//ms offset for nanosecond base
long ts = baseTsForWindow + ((RngUtils.randomPositiveInt() % eventWindowSize) + 1) * 1000 * 1000;
assertTrue(dbuf.appendEvent(key, pPartitionId, lPartitionId, ts, srcId, schemaId, value.getBytes(Charset.defaultCharset()), false));
testDataMap.put(i, new KeyValue(key, value));
++i;
}
dbuf.endEvents(i);
}
for (int i = 0; i < 2; ++i) {
//TODO (medium) try out corner cases, more batches, etc.
int batchFetchSize = 5000;
Checkpoint cp = new Checkpoint();
cp.setFlexible();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
WritableByteChannel writeChannel = Channels.newChannel(baos);
//File directory = new File(".");
//File writeFile = File.createTempFile("test", ".dbus", directory);
int streamedEvents = 0;
final DbusEventsStatisticsCollector streamStats = new DbusEventsStatisticsCollector(1, "stream", true, false, null);
//writeChannel = Utils.openChannel(writeFile, true);
StreamEventsArgs args = new StreamEventsArgs(batchFetchSize).setStatsCollector(streamStats);
streamedEvents = dbuf.streamEvents(cp, writeChannel, args).getNumEventsStreamed();
writeChannel.close();
final byte[] eventBytes = baos.toByteArray();
Assert.assertTrue(eventBytes.length > 0);
Assert.assertTrue(streamedEvents > 0);
final DbusEventsStatisticsCollector inputStats = new DbusEventsStatisticsCollector(1, "input", true, false, null);
ByteArrayInputStream bais = new ByteArrayInputStream(eventBytes);
ReadableByteChannel readChannel = Channels.newChannel(bais);
DbusEventBuffer checkDbusEventBuffer = new DbusEventBuffer(getConfig(5000000, DbusEventBuffer.Config.DEFAULT_INDIVIDUAL_BUFFER_SIZE, 100000, 4000, AllocationPolicy.HEAP_MEMORY, QueuePolicy.OVERWRITE_ON_WRITE, AssertLevel.ALL));
int messageSize = 0;
int numEvents = 0;
checkDbusEventBuffer.clear();
numEvents = checkDbusEventBuffer.readEvents(readChannel, inputStats);
long ts = 0;
for (DbusEventInternalWritable e : checkDbusEventBuffer) {
ts = Math.max(e.timestampInNanos(), ts);
messageSize += e.size();
if (e.isEndOfPeriodMarker()) {
//check if of eop has timestamp of most recent data event in the window
assertEquals(ts, e.timestampInNanos());
LOG.debug("EOP:" + e.sequence() + " ts=" + e.timestampInNanos());
ts = 0;
} else {
LOG.debug("DAT:" + e.sequence() + " ts=" + e.timestampInNanos());
}
}
assertEquals("Events Count Check", streamedEvents, numEvents);
assertTrue(messageSize <= batchFetchSize);
assertEquals(streamStats.getTotalStats().getNumDataEvents(), inputStats.getTotalStats().getNumDataEvents());
assertEquals(streamStats.getTotalStats().getNumSysEvents(), inputStats.getTotalStats().getNumSysEvents());
LOG.debug("BatchFetchSize = " + batchFetchSize + " messagesSize = " + messageSize + " numEvents = " + numEvents);
}
}
use of java.nio.channels.WritableByteChannel in project druid by druid-io.
the class SerializerUtilsTest method testChannelWritefloat.
@Test
public void testChannelWritefloat() throws IOException {
final int index = 0;
WritableByteChannel channelOutput = Channels.newChannel(outStream);
serializerUtils.writeFloat(channelOutput, floats[index]);
ByteArrayInputStream inputstream = new ByteArrayInputStream(outStream.toByteArray());
if (channelOutput != null) {
channelOutput.close();
}
float expected = serializerUtils.readFloat(inputstream);
float actuals = floats[index];
Assert.assertEquals(expected, actuals, delta);
}
use of java.nio.channels.WritableByteChannel in project guava by google.
the class ByteStreamsTest method testCopyFileChannel.
public void testCopyFileChannel() throws IOException {
// Random prime, unlikely to match any internal chunk size
final int chunkSize = 14407;
ByteArrayOutputStream out = new ByteArrayOutputStream();
WritableByteChannel outChannel = Channels.newChannel(out);
File testFile = createTempFile();
FileOutputStream fos = new FileOutputStream(testFile);
byte[] dummyData = newPreFilledByteArray(chunkSize);
try {
for (int i = 0; i < 500; i++) {
fos.write(dummyData);
}
} finally {
fos.close();
}
ReadableByteChannel inChannel = new RandomAccessFile(testFile, "r").getChannel();
try {
ByteStreams.copy(inChannel, outChannel);
} finally {
inChannel.close();
}
byte[] actual = out.toByteArray();
for (int i = 0; i < 500 * chunkSize; i += chunkSize) {
assertEquals(dummyData, Arrays.copyOfRange(actual, i, i + chunkSize));
}
}
use of java.nio.channels.WritableByteChannel in project j2objc by google.
the class ChannelsTest method testNewWriterWritableByteChannelString_InputNull.
/*
* this test cannot be passed when buffer set to 0!
*/
public void testNewWriterWritableByteChannelString_InputNull() throws IOException {
this.fouts = new FileOutputStream(tmpFile);
WritableByteChannel wbChannel = Channels.newChannel(this.fouts);
Writer testWriter = Channels.newWriter(wbChannel, Charset.forName(CODE_SET).newEncoder(), 1);
//$NON-NLS-1$
String writebuf = "";
for (int val = 0; val < this.writebufSize / 2; val++) {
writebuf = writebuf + ((char) (val + 64));
}
// can write to buffer
testWriter.write(writebuf);
testWriter.flush();
testWriter.close();
}
Aggregations