use of org.apache.flink.runtime.plugable.SerializationDelegate in project flink by apache.
the class OutputEmitterTest method testPartitionHash.
@Test
public void testPartitionHash() {
// Test for IntValue
@SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> intComp = new RecordComparatorFactory(new int[] { 0 }, new Class[] { IntValue.class }).createComparator();
final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_HASH, intComp);
final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(new RecordSerializerFactory().getSerializer());
int numChans = 100;
int numRecs = 50000;
int[] hit = new int[numChans];
for (int i = 0; i < numRecs; i++) {
IntValue k = new IntValue(i);
Record rec = new Record(k);
delegate.setInstance(rec);
int[] chans = oe1.selectChannels(delegate, hit.length);
for (int chan : chans) {
hit[chan]++;
}
}
int cnt = 0;
for (int aHit : hit) {
assertTrue(aHit > 0);
cnt += aHit;
}
assertTrue(cnt == numRecs);
// Test for StringValue
@SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> stringComp = new RecordComparatorFactory(new int[] { 0 }, new Class[] { StringValue.class }).createComparator();
final ChannelSelector<SerializationDelegate<Record>> oe2 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_HASH, stringComp);
numChans = 100;
numRecs = 10000;
hit = new int[numChans];
for (int i = 0; i < numRecs; i++) {
StringValue k = new StringValue(i + "");
Record rec = new Record(k);
delegate.setInstance(rec);
int[] chans = oe2.selectChannels(delegate, hit.length);
for (int chan : chans) {
hit[chan]++;
}
}
cnt = 0;
for (int aHit : hit) {
assertTrue(aHit > 0);
cnt += aHit;
}
assertTrue(cnt == numRecs);
// test hash corner cases
final TestIntComparator testIntComp = new TestIntComparator();
final ChannelSelector<SerializationDelegate<Integer>> oe3 = new OutputEmitter<Integer>(ShipStrategyType.PARTITION_HASH, testIntComp);
final SerializationDelegate<Integer> intDel = new SerializationDelegate<Integer>(new IntSerializer());
numChans = 100;
// MinVal hash
intDel.setInstance(Integer.MIN_VALUE);
int[] chans = oe3.selectChannels(intDel, numChans);
assertTrue(chans.length == 1);
assertTrue(chans[0] >= 0 && chans[0] <= numChans - 1);
// -1 hash
intDel.setInstance(-1);
chans = oe3.selectChannels(intDel, hit.length);
assertTrue(chans.length == 1);
assertTrue(chans[0] >= 0 && chans[0] <= numChans - 1);
// 0 hash
intDel.setInstance(0);
chans = oe3.selectChannels(intDel, hit.length);
assertTrue(chans.length == 1);
assertTrue(chans[0] >= 0 && chans[0] <= numChans - 1);
// 1 hash
intDel.setInstance(1);
chans = oe3.selectChannels(intDel, hit.length);
assertTrue(chans.length == 1);
assertTrue(chans[0] >= 0 && chans[0] <= numChans - 1);
// MaxVal hash
intDel.setInstance(Integer.MAX_VALUE);
chans = oe3.selectChannels(intDel, hit.length);
assertTrue(chans.length == 1);
assertTrue(chans[0] >= 0 && chans[0] <= numChans - 1);
}
use of org.apache.flink.runtime.plugable.SerializationDelegate in project flink by apache.
the class OutputEmitterTest method testWrongKeyClass.
@Test
public void testWrongKeyClass() {
// Test for IntValue
@SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> doubleComp = new RecordComparatorFactory(new int[] { 0 }, new Class[] { DoubleValue.class }).createComparator();
final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_HASH, doubleComp);
final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(new RecordSerializerFactory().getSerializer());
;
Record rec = null;
try {
PipedInputStream pipedInput = new PipedInputStream(1024 * 1024);
DataInputView in = new DataInputViewStreamWrapper(pipedInput);
DataOutputView out = new DataOutputViewStreamWrapper(new PipedOutputStream(pipedInput));
rec = new Record(1);
rec.setField(0, new IntValue());
rec.write(out);
rec = new Record();
rec.read(in);
} catch (IOException e) {
fail("Test erroneous");
}
try {
delegate.setInstance(rec);
oe1.selectChannels(delegate, 100);
} catch (DeserializationException re) {
return;
}
Assert.fail("Expected a NullKeyFieldException.");
}
use of org.apache.flink.runtime.plugable.SerializationDelegate in project flink by apache.
the class OperatorChain method createStreamOutput.
private <T> RecordWriterOutput<T> createStreamOutput(StreamEdge edge, StreamConfig upStreamConfig, int outputIndex, Environment taskEnvironment, String taskName) {
// OutputTag, return null if not sideOutput
OutputTag sideOutputTag = edge.getOutputTag();
TypeSerializer outSerializer = null;
if (edge.getOutputTag() != null) {
// side output
outSerializer = upStreamConfig.getTypeSerializerSideOut(edge.getOutputTag(), taskEnvironment.getUserClassLoader());
} else {
// main output
outSerializer = upStreamConfig.getTypeSerializerOut(taskEnvironment.getUserClassLoader());
}
@SuppressWarnings("unchecked") StreamPartitioner<T> outputPartitioner = (StreamPartitioner<T>) edge.getPartitioner();
LOG.debug("Using partitioner {} for output {} of task ", outputPartitioner, outputIndex, taskName);
ResultPartitionWriter bufferWriter = taskEnvironment.getWriter(outputIndex);
// we initialize the partitioner here with the number of key groups (aka max. parallelism)
if (outputPartitioner instanceof ConfigurableStreamPartitioner) {
int numKeyGroups = bufferWriter.getNumTargetKeyGroups();
if (0 < numKeyGroups) {
((ConfigurableStreamPartitioner) outputPartitioner).configure(numKeyGroups);
}
}
StreamRecordWriter<SerializationDelegate<StreamRecord<T>>> output = new StreamRecordWriter<>(bufferWriter, outputPartitioner, upStreamConfig.getBufferTimeout());
output.setMetricGroup(taskEnvironment.getMetricGroup().getIOMetricGroup());
return new RecordWriterOutput<>(output, outSerializer, sideOutputTag, this);
}
use of org.apache.flink.runtime.plugable.SerializationDelegate in project flink by apache.
the class OutputEmitterTest method testNullKey.
@Test
public void testNullKey() {
// Test for IntValue
@SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> intComp = new RecordComparatorFactory(new int[] { 0 }, new Class[] { IntValue.class }).createComparator();
final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_HASH, intComp);
final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(new RecordSerializerFactory().getSerializer());
Record rec = new Record(2);
rec.setField(1, new IntValue(1));
delegate.setInstance(rec);
try {
oe1.selectChannels(delegate, 100);
} catch (NullKeyFieldException re) {
Assert.assertEquals(0, re.getFieldNumber());
return;
}
Assert.fail("Expected a NullKeyFieldException.");
}
use of org.apache.flink.runtime.plugable.SerializationDelegate in project flink by apache.
the class OutputEmitterTest method testMultiKeys.
@Test
public void testMultiKeys() {
@SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> multiComp = new RecordComparatorFactory(new int[] { 0, 1, 3 }, new Class[] { IntValue.class, StringValue.class, DoubleValue.class }).createComparator();
final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_HASH, multiComp);
final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(new RecordSerializerFactory().getSerializer());
int numChannels = 100;
int numRecords = 5000;
int[] hit = new int[numChannels];
for (int i = 0; i < numRecords; i++) {
Record rec = new Record(4);
rec.setField(0, new IntValue(i));
rec.setField(1, new StringValue("AB" + i + "CD" + i));
rec.setField(3, new DoubleValue(i * 3.141d));
delegate.setInstance(rec);
int[] chans = oe1.selectChannels(delegate, hit.length);
for (int chan : chans) {
hit[chan]++;
}
}
int cnt = 0;
for (int aHit : hit) {
assertTrue(aHit > 0);
cnt += aHit;
}
assertTrue(cnt == numRecords);
}
Aggregations