Search in sources :

Example 11 with StringData

use of org.apache.flink.table.data.StringData in project flink by apache.

the class SortUtilTest method testNormalizedKey.

@Test
public void testNormalizedKey() {
    int len = 10;
    Random random = new Random();
    MemorySegment[] segments = new MemorySegment[len];
    MemorySegment[] compareSegs = new MemorySegment[len];
    for (int i = 0; i < len; i++) {
        segments[i] = MemorySegmentFactory.allocateUnpooledSegment(20);
        compareSegs[i] = MemorySegmentFactory.allocateUnpooledSegment(20);
    }
    {
        SortUtil.minNormalizedKey(segments[0], 0, 20);
        SortUtil.maxNormalizedKey(segments[1], 0, 20);
        for (int i = 0; i < len; i++) {
            byte[] rndBytes = new byte[20];
            random.nextBytes(rndBytes);
            segments[2].put(0, rndBytes);
            Assert.assertTrue(segments[0].compare(segments[2], 0, 0, 20) <= 0);
            Assert.assertTrue(segments[1].compare(segments[2], 0, 0, 20) >= 0);
        }
    }
    {
        DecimalData[] arr = new DecimalData[len];
        for (int i = 0; i < len; i++) {
            arr[i] = DecimalData.fromBigDecimal(new BigDecimal(random.nextInt()), 18, 0);
            SortUtil.putDecimalNormalizedKey(arr[i], segments[i], 0, 8);
        }
        Arrays.sort(arr, DecimalData::compareTo);
        for (int i = 0; i < len; i++) {
            SortUtil.putDecimalNormalizedKey(arr[i], compareSegs[i], 0, 8);
        }
        Arrays.sort(segments, (o1, o2) -> o1.compare(o2, 0, 0, 8));
        for (int i = 0; i < len; i++) {
            Assert.assertTrue(compareSegs[i].equalTo(segments[i], 0, 0, 8));
        }
    }
    {
        Float[] arr = new Float[len];
        for (int i = 0; i < len; i++) {
            arr[i] = random.nextFloat();
            SortUtil.putFloatNormalizedKey(arr[i], segments[i], 0, 4);
        }
        Arrays.sort(arr, Float::compareTo);
        for (int i = 0; i < len; i++) {
            SortUtil.putFloatNormalizedKey(arr[i], compareSegs[i], 0, 4);
        }
        Arrays.sort(segments, (o1, o2) -> o1.compare(o2, 0, 0, 4));
        for (int i = 0; i < len; i++) {
            Assert.assertTrue(compareSegs[i].equalTo(segments[i], 0, 0, 4));
        }
    }
    {
        Double[] arr = new Double[len];
        for (int i = 0; i < len; i++) {
            arr[i] = random.nextDouble();
            SortUtil.putDoubleNormalizedKey(arr[i], segments[i], 0, 8);
        }
        Arrays.sort(arr, Double::compareTo);
        for (int i = 0; i < len; i++) {
            SortUtil.putDoubleNormalizedKey(arr[i], compareSegs[i], 0, 8);
        }
        Arrays.sort(segments, (o1, o2) -> o1.compare(o2, 0, 0, 8));
        for (int i = 0; i < len; i++) {
            Assert.assertTrue(compareSegs[i].equalTo(segments[i], 0, 0, 8));
        }
    }
    {
        BinaryStringData[] arr = new BinaryStringData[len];
        for (int i = 0; i < len; i++) {
            arr[i] = BinaryStringData.fromString(String.valueOf(random.nextLong()));
            SortUtil.putStringNormalizedKey(arr[i], segments[i], 0, 8);
        }
        Arrays.sort(arr, StringData::compareTo);
        for (int i = 0; i < len; i++) {
            SortUtil.putStringNormalizedKey(arr[i], compareSegs[i], 0, 8);
        }
        Arrays.sort(segments, (o1, o2) -> o1.compare(o2, 0, 0, 8));
        for (int i = 0; i < len; i++) {
            Assert.assertTrue(compareSegs[i].equalTo(segments[i], 0, 0, 8));
        }
    }
}
Also used : MemorySegment(org.apache.flink.core.memory.MemorySegment) BinaryStringData(org.apache.flink.table.data.binary.BinaryStringData) MemorySegmentFactory(org.apache.flink.core.memory.MemorySegmentFactory) StringData(org.apache.flink.table.data.StringData) BigDecimal(java.math.BigDecimal) Arrays(java.util.Arrays) DecimalData(org.apache.flink.table.data.DecimalData) Test(org.junit.Test) Random(java.util.Random) Assert(org.junit.Assert) Random(java.util.Random) MemorySegment(org.apache.flink.core.memory.MemorySegment) BigDecimal(java.math.BigDecimal) Test(org.junit.Test)

Example 12 with StringData

use of org.apache.flink.table.data.StringData in project flink by apache.

the class EqualiserCodeGeneratorTest method testManyFields.

@Test
public void testManyFields() {
    final LogicalType[] fieldTypes = IntStream.range(0, 499).mapToObj(i -> new VarCharType()).toArray(LogicalType[]::new);
    final RecordEqualiser equaliser = new EqualiserCodeGenerator(fieldTypes).generateRecordEqualiser("ManyFields").newInstance(Thread.currentThread().getContextClassLoader());
    final StringData[] fields = IntStream.range(0, 499).mapToObj(i -> StringData.fromString("Entry " + i)).toArray(StringData[]::new);
    assertTrue(equaliser.equals(GenericRowData.of((Object[]) fields), GenericRowData.of((Object[]) fields)));
}
Also used : Types(org.apache.flink.api.common.typeinfo.Types) TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) IntStream(java.util.stream.IntStream) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) TimestampData(org.apache.flink.table.data.TimestampData) Assert.assertTrue(org.junit.Assert.assertTrue) VarCharType(org.apache.flink.table.types.logical.VarCharType) Test(org.junit.Test) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) Function(java.util.function.Function) TimestampData.fromEpochMillis(org.apache.flink.table.data.TimestampData.fromEpochMillis) IntSerializer(org.apache.flink.api.common.typeutils.base.IntSerializer) StringData(org.apache.flink.table.data.StringData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) TimestampType(org.apache.flink.table.types.logical.TimestampType) RawValueDataSerializer(org.apache.flink.table.runtime.typeutils.RawValueDataSerializer) GenericRowData(org.apache.flink.table.data.GenericRowData) LogicalType(org.apache.flink.table.types.logical.LogicalType) RawValueData(org.apache.flink.table.data.RawValueData) Assert(org.junit.Assert) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) LogicalType(org.apache.flink.table.types.logical.LogicalType) VarCharType(org.apache.flink.table.types.logical.VarCharType) StringData(org.apache.flink.table.data.StringData) Test(org.junit.Test)

Example 13 with StringData

use of org.apache.flink.table.data.StringData in project flink by apache.

the class ListAggWsWithRetractAggFunctionTest method retractValues.

@Override
protected void retractValues(ListAggWsWithRetractAccumulator accumulator, List<StringData> values) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
    AggregateFunction<StringData, ListAggWsWithRetractAccumulator> aggregator = getAggregator();
    Method retractFunc = getRetractFunc();
    Preconditions.checkArgument(values.size() % 2 == 0, "number of values must be an integer multiple of 2.");
    for (int i = 0; i < values.size(); i += 2) {
        StringData value = values.get(i + 1);
        StringData delimiter = values.get(i);
        retractFunc.invoke(aggregator, accumulator, delimiter, value);
    }
}
Also used : ListAggWsWithRetractAccumulator(org.apache.flink.table.runtime.functions.aggregate.ListAggWsWithRetractAggFunction.ListAggWsWithRetractAccumulator) Method(java.lang.reflect.Method) StringData(org.apache.flink.table.data.StringData)

Example 14 with StringData

use of org.apache.flink.table.data.StringData in project flink by apache.

the class ListAggWsWithRetractAggFunctionTest method accumulateValues.

@Override
protected ListAggWsWithRetractAccumulator accumulateValues(List<StringData> values) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
    AggregateFunction<StringData, ListAggWsWithRetractAccumulator> aggregator = getAggregator();
    ListAggWsWithRetractAccumulator accumulator = getAggregator().createAccumulator();
    Method accumulateFunc = getAccumulateFunc();
    Preconditions.checkArgument(values.size() % 2 == 0, "number of values must be an integer multiple of 2.");
    for (int i = 0; i < values.size(); i += 2) {
        StringData value = values.get(i + 1);
        StringData delimiter = values.get(i);
        accumulateFunc.invoke(aggregator, accumulator, delimiter, value);
    }
    return accumulator;
}
Also used : ListAggWsWithRetractAccumulator(org.apache.flink.table.runtime.functions.aggregate.ListAggWsWithRetractAggFunction.ListAggWsWithRetractAccumulator) Method(java.lang.reflect.Method) StringData(org.apache.flink.table.data.StringData)

Aggregations

StringData (org.apache.flink.table.data.StringData)14 GenericRowData (org.apache.flink.table.data.GenericRowData)5 BinaryStringData (org.apache.flink.table.data.binary.BinaryStringData)5 GenericArrayData (org.apache.flink.table.data.GenericArrayData)3 Test (org.junit.Test)3 Method (java.lang.reflect.Method)2 ArrayList (java.util.ArrayList)2 JsonNode (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode)2 RawValue (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.util.RawValue)2 TableException (org.apache.flink.table.api.TableException)2 GenericMapData (org.apache.flink.table.data.GenericMapData)2 RowData (org.apache.flink.table.data.RowData)2 TimestampData (org.apache.flink.table.data.TimestampData)2 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)2 ListAggWsWithRetractAccumulator (org.apache.flink.table.runtime.functions.aggregate.ListAggWsWithRetractAggFunction.ListAggWsWithRetractAccumulator)2 VarCharType (org.apache.flink.table.types.logical.VarCharType)2 BytesColumnVector (org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector)2 StructColumnVector (org.apache.hadoop.hive.ql.exec.vector.StructColumnVector)2 Assert (org.junit.Assert)2 BigDecimal (java.math.BigDecimal)1