use of org.apache.flink.types.DoubleValue in project flink by apache.
the class InstantiationUtilTest method testSerializationToByteArray.
@Test
public void testSerializationToByteArray() throws IOException {
final DoubleValue toSerialize = new DoubleValue(Math.random());
final DoubleValueSerializer serializer = new DoubleValueSerializer();
byte[] serialized = InstantiationUtil.serializeToByteArray(serializer, toSerialize);
DoubleValue deserialized = InstantiationUtil.deserializeFromByteArray(serializer, serialized);
assertEquals("Serialized record is not equal after serialization.", toSerialize, deserialized);
}
use of org.apache.flink.types.DoubleValue in project flink by apache.
the class DoubleValueComparator method compareToReference.
@Override
public int compareToReference(TypeComparator<DoubleValue> referencedComparator) {
DoubleValue otherRef = ((DoubleValueComparator) referencedComparator).reference;
int comp = otherRef.compareTo(reference);
return ascendingComparison ? comp : -comp;
}
use of org.apache.flink.types.DoubleValue in project flink by apache.
the class HITS method runInternal.
@Override
public DataSet<Result<K>> runInternal(Graph<K, VV, EV> input) throws Exception {
DataSet<Tuple2<K, K>> edges = input.getEdges().map(new ExtractEdgeIDs<>()).setParallelism(parallelism).name("Extract edge IDs");
// ID, hub, authority
DataSet<Tuple3<K, DoubleValue, DoubleValue>> initialScores = edges.map(new InitializeScores<>()).setParallelism(parallelism).name("Initial scores").groupBy(0).reduce(new SumScores<>()).setCombineHint(CombineHint.HASH).setParallelism(parallelism).name("Sum");
IterativeDataSet<Tuple3<K, DoubleValue, DoubleValue>> iterative = initialScores.iterate(maxIterations).setParallelism(parallelism);
// ID, hubbiness
DataSet<Tuple2<K, DoubleValue>> hubbiness = iterative.coGroup(edges).where(0).equalTo(1).with(new Hubbiness<>()).setParallelism(parallelism).name("Hub").groupBy(0).reduce(new SumScore<>()).setCombineHint(CombineHint.HASH).setParallelism(parallelism).name("Sum");
// sum-of-hubbiness-squared
DataSet<DoubleValue> hubbinessSumSquared = hubbiness.map(new Square<>()).setParallelism(parallelism).name("Square").reduce(new Sum()).setParallelism(parallelism).name("Sum");
// ID, new authority
DataSet<Tuple2<K, DoubleValue>> authority = hubbiness.coGroup(edges).where(0).equalTo(0).with(new Authority<>()).setParallelism(parallelism).name("Authority").groupBy(0).reduce(new SumScore<>()).setCombineHint(CombineHint.HASH).setParallelism(parallelism).name("Sum");
// sum-of-authority-squared
DataSet<DoubleValue> authoritySumSquared = authority.map(new Square<>()).setParallelism(parallelism).name("Square").reduce(new Sum()).setParallelism(parallelism).name("Sum");
// ID, normalized hubbiness, normalized authority
DataSet<Tuple3<K, DoubleValue, DoubleValue>> scores = hubbiness.fullOuterJoin(authority, JoinHint.REPARTITION_SORT_MERGE).where(0).equalTo(0).with(new JoinAndNormalizeHubAndAuthority<>()).withBroadcastSet(hubbinessSumSquared, HUBBINESS_SUM_SQUARED).withBroadcastSet(authoritySumSquared, AUTHORITY_SUM_SQUARED).setParallelism(parallelism).name("Join scores");
DataSet<Tuple3<K, DoubleValue, DoubleValue>> passThrough;
if (convergenceThreshold < Double.MAX_VALUE) {
passThrough = iterative.fullOuterJoin(scores, JoinHint.REPARTITION_SORT_MERGE).where(0).equalTo(0).with(new ChangeInScores<>()).setParallelism(parallelism).name("Change in scores");
iterative.registerAggregationConvergenceCriterion(CHANGE_IN_SCORES, new DoubleSumAggregator(), new ScoreConvergence(convergenceThreshold));
} else {
passThrough = scores;
}
return iterative.closeWith(passThrough).map(new TranslateResult<>()).setParallelism(parallelism).name("Map result");
}
use of org.apache.flink.types.DoubleValue in project flink by apache.
the class DoubleValueArrayTest method testBoundedArray.
@Test
public void testBoundedArray() {
int count = DoubleValueArray.DEFAULT_CAPACITY_IN_BYTES / DoubleValueArray.ELEMENT_LENGTH_IN_BYTES;
ValueArray<DoubleValue> lva = new DoubleValueArray(DoubleValueArray.DEFAULT_CAPACITY_IN_BYTES);
// fill the array
for (int i = 0; i < count; i++) {
assertFalse(lva.isFull());
assertEquals(i, lva.size());
assertTrue(lva.add(new DoubleValue(i)));
assertEquals(i + 1, lva.size());
}
// array is now full
assertTrue(lva.isFull());
assertEquals(count, lva.size());
// verify the array values
int idx = 0;
for (DoubleValue lv : lva) {
assertEquals(idx++, lv.getValue(), 0.000001);
}
// add element past end of array
assertFalse(lva.add(new DoubleValue(count)));
assertFalse(lva.addAll(lva));
// test copy
assertEquals(lva, lva.copy());
// test copyTo
DoubleValueArray lvaTo = new DoubleValueArray();
lva.copyTo(lvaTo);
assertEquals(lva, lvaTo);
// test clear
lva.clear();
assertEquals(0, lva.size());
}
use of org.apache.flink.types.DoubleValue in project flink by apache.
the class EitherSerializerTest method testSerializeIndividually.
@Test
public void testSerializeIndividually() throws IOException {
EitherTypeInfo<LongValue, DoubleValue> eitherTypeInfo = new EitherTypeInfo<>(ValueTypeInfo.LONG_VALUE_TYPE_INFO, ValueTypeInfo.DOUBLE_VALUE_TYPE_INFO);
EitherSerializer<LongValue, DoubleValue> eitherSerializer = (EitherSerializer<LongValue, DoubleValue>) eitherTypeInfo.createSerializer(new ExecutionConfig());
LongValue lv = new LongValue();
DoubleValue dv = new DoubleValue();
Either<LongValue, DoubleValue> left = Left(lv);
Either<LongValue, DoubleValue> right = Right(dv);
TestOutputView out = new TestOutputView();
eitherSerializer.serialize(left, out);
eitherSerializer.serialize(right, out);
eitherSerializer.serialize(left, out);
TestInputView in = out.getInputView();
// the first deserialization creates a new instance of Left
Either<LongValue, DoubleValue> copy0 = eitherSerializer.deserialize(right, in);
// then the cross-references are used for future copies
Either<LongValue, DoubleValue> copy1 = eitherSerializer.deserialize(copy0, in);
Either<LongValue, DoubleValue> copy2 = eitherSerializer.deserialize(copy1, in);
// validate reference equality
assertSame(right, copy1);
assertSame(copy0, copy2);
// validate reference equality of contained objects
assertSame(right.right(), copy1.right());
assertSame(copy0.left(), copy2.left());
}
Aggregations