Search in sources :

Example 11 with VarCharType

use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.

the class PythonTableFunctionOperatorTestBase method getTestHarness.

private OneInputStreamOperatorTestHarness<IN, OUT> getTestHarness(Configuration config, JoinRelType joinRelType) throws Exception {
    RowType inputType = new RowType(Arrays.asList(new RowType.RowField("f1", new VarCharType()), new RowType.RowField("f2", new VarCharType()), new RowType.RowField("f3", new BigIntType())));
    RowType outputType = new RowType(Arrays.asList(new RowType.RowField("f1", new VarCharType()), new RowType.RowField("f2", new VarCharType()), new RowType.RowField("f3", new BigIntType()), new RowType.RowField("f4", new BigIntType())));
    PythonTableFunctionOperator operator = getTestOperator(config, new PythonFunctionInfo(PythonScalarFunctionOperatorTestBase.DummyPythonFunction.INSTANCE, new Integer[] { 0 }), inputType, outputType, new int[] { 2 }, joinRelType);
    OneInputStreamOperatorTestHarness<IN, OUT> testHarness = new OneInputStreamOperatorTestHarness(operator);
    testHarness.getStreamConfig().setManagedMemoryFractionOperatorOfUseCase(ManagedMemoryUseCase.PYTHON, 0.5);
    return testHarness;
}
Also used : PythonFunctionInfo(org.apache.flink.table.functions.python.PythonFunctionInfo) RowType(org.apache.flink.table.types.logical.RowType) BigIntType(org.apache.flink.table.types.logical.BigIntType) VarCharType(org.apache.flink.table.types.logical.VarCharType) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness)

Example 12 with VarCharType

use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.

the class HiveInspectors method getConversion.

/**
 * Get conversion for converting Flink object to Hive object from an ObjectInspector and the
 * corresponding Flink DataType.
 */
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
    if (inspector instanceof PrimitiveObjectInspector) {
        HiveObjectConversion conversion;
        if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
            conversion = IdentityConversion.INSTANCE;
        } else if (inspector instanceof DateObjectInspector) {
            conversion = hiveShim::toHiveDate;
        } else if (inspector instanceof TimestampObjectInspector) {
            conversion = hiveShim::toHiveTimestamp;
        } else if (inspector instanceof HiveCharObjectInspector) {
            conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
        } else if (inspector instanceof HiveVarcharObjectInspector) {
            conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
        } else if (inspector instanceof HiveDecimalObjectInspector) {
            conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
        } else {
            throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
        }
        // currently this happens for constant arguments for UDFs
        if (((PrimitiveObjectInspector) inspector).preferWritable()) {
            conversion = new WritableHiveObjectConversion(conversion, hiveShim);
        }
        return conversion;
    }
    if (inspector instanceof ListObjectInspector) {
        HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Object[] array = (Object[]) o;
            List<Object> result = new ArrayList<>();
            for (Object ele : array) {
                result.add(eleConvert.toHiveObject(ele));
            }
            return result;
        };
    }
    if (inspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) inspector;
        MapType kvType = (MapType) dataType;
        HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
        HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Map<Object, Object> map = (Map) o;
            Map<Object, Object> result = new HashMap<>(map.size());
            for (Map.Entry<Object, Object> entry : map.entrySet()) {
                result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
            }
            return result;
        };
    }
    if (inspector instanceof StructObjectInspector) {
        StructObjectInspector structInspector = (StructObjectInspector) inspector;
        List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
        List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
        HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
        for (int i = 0; i < structFields.size(); i++) {
            conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
        }
        return o -> {
            if (o == null) {
                return null;
            }
            Row row = (Row) o;
            List<Object> result = new ArrayList<>(row.getArity());
            for (int i = 0; i < row.getArity(); i++) {
                result.add(conversions[i].toHiveObject(row.getField(i)));
            }
            return result;
        };
    }
    throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
Also used : DataType(org.apache.flink.table.types.DataType) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) Array(java.lang.reflect.Array) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) BigDecimal(java.math.BigDecimal) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) Map(java.util.Map) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) List(java.util.List) HiveReflectionUtils(org.apache.flink.table.catalog.hive.util.HiveReflectionUtils) LogicalType(org.apache.flink.table.types.logical.LogicalType) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Row(org.apache.flink.types.Row) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) HashMap(java.util.HashMap) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) Constructor(java.lang.reflect.Constructor) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ArrayList(java.util.ArrayList) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) Nonnull(javax.annotation.Nonnull) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) PrimitiveObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) Internal(org.apache.flink.annotation.Internal) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) List(java.util.List) ArrayList(java.util.ArrayList) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Row(org.apache.flink.types.Row) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Map(java.util.Map) HashMap(java.util.HashMap) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 13 with VarCharType

use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.

the class OrcBulkRowDataWriterTest method initInput.

@Before
public void initInput() {
    input = new ArrayList<>();
    fieldTypes = new LogicalType[4];
    fieldTypes[0] = new VarCharType();
    fieldTypes[1] = new IntType();
    List<RowType.RowField> arrayRowFieldList = Collections.singletonList(new RowType.RowField("_col2_col0", new VarCharType()));
    fieldTypes[2] = new ArrayType(new RowType(arrayRowFieldList));
    List<RowType.RowField> mapRowFieldList = Arrays.asList(new RowType.RowField("_col3_col0", new VarCharType()), new RowType.RowField("_col3_col1", new TimestampType()));
    fieldTypes[3] = new MapType(new VarCharType(), new RowType(mapRowFieldList));
    {
        GenericRowData rowData = new GenericRowData(4);
        rowData.setField(0, new BinaryStringData("_col_0_string_1"));
        rowData.setField(1, 1);
        GenericRowData arrayValue1 = new GenericRowData(1);
        arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_1"));
        GenericRowData arrayValue2 = new GenericRowData(1);
        arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_1"));
        GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
        rowData.setField(2, arrayData);
        GenericRowData mapValue1 = new GenericRowData(2);
        mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_1")));
        mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
        Map<StringData, RowData> mapDataMap = new HashMap<>();
        mapDataMap.put(new BinaryStringData("_col_3_map_key_1"), mapValue1);
        GenericMapData mapData = new GenericMapData(mapDataMap);
        rowData.setField(3, mapData);
        input.add(rowData);
    }
    {
        GenericRowData rowData = new GenericRowData(4);
        rowData.setField(0, new BinaryStringData("_col_0_string_2"));
        rowData.setField(1, 2);
        GenericRowData arrayValue1 = new GenericRowData(1);
        arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_2"));
        GenericRowData arrayValue2 = new GenericRowData(1);
        arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_2"));
        GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
        rowData.setField(2, arrayData);
        GenericRowData mapValue1 = new GenericRowData(2);
        mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_2")));
        mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
        Map<StringData, RowData> mapDataMap = new HashMap<>();
        mapDataMap.put(new BinaryStringData("_col_3_map_key_2"), mapValue1);
        GenericMapData mapData = new GenericMapData(mapDataMap);
        rowData.setField(3, mapData);
        input.add(rowData);
    }
}
Also used : GenericMapData(org.apache.flink.table.data.GenericMapData) GenericArrayData(org.apache.flink.table.data.GenericArrayData) RowType(org.apache.flink.table.types.logical.RowType) Timestamp(java.sql.Timestamp) MapType(org.apache.flink.table.types.logical.MapType) IntType(org.apache.flink.table.types.logical.IntType) ArrayType(org.apache.flink.table.types.logical.ArrayType) TimestampType(org.apache.flink.table.types.logical.TimestampType) GenericRowData(org.apache.flink.table.data.GenericRowData) VarCharType(org.apache.flink.table.types.logical.VarCharType) BinaryStringData(org.apache.flink.table.data.binary.BinaryStringData) Map(java.util.Map) HashMap(java.util.HashMap) Before(org.junit.Before)

Example 14 with VarCharType

use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.

the class OrcFileSystemITCase method initNestedTypesFile.

private String initNestedTypesFile(List<RowData> data) throws Exception {
    LogicalType[] fieldTypes = new LogicalType[4];
    fieldTypes[0] = new VarCharType();
    fieldTypes[1] = new IntType();
    List<RowType.RowField> arrayRowFieldList = Collections.singletonList(new RowType.RowField("_col2_col0", new VarCharType()));
    fieldTypes[2] = new ArrayType(new RowType(arrayRowFieldList));
    List<RowType.RowField> mapRowFieldList = Arrays.asList(new RowType.RowField("_col3_col0", new VarCharType()), new RowType.RowField("_col3_col1", new TimestampType()));
    fieldTypes[3] = new MapType(new VarCharType(), new RowType(mapRowFieldList));
    String schema = "struct<_col0:string,_col1:int,_col2:array<struct<_col2_col0:string>>," + "_col3:map<string,struct<_col3_col0:string,_col3_col1:timestamp>>>";
    File outDir = TEMPORARY_FOLDER.newFolder();
    Properties writerProps = new Properties();
    writerProps.setProperty("orc.compress", "LZ4");
    final OrcBulkWriterFactory<RowData> writer = new OrcBulkWriterFactory<>(new RowDataVectorizer(schema, fieldTypes), writerProps, new Configuration());
    StreamingFileSink<RowData> sink = StreamingFileSink.forBulkFormat(new org.apache.flink.core.fs.Path(outDir.toURI()), writer).withBucketCheckInterval(10000).build();
    try (OneInputStreamOperatorTestHarness<RowData, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink), 1, 1, 0)) {
        testHarness.setup();
        testHarness.open();
        int time = 0;
        for (final RowData record : data) {
            testHarness.processElement(record, ++time);
        }
        testHarness.snapshot(1, ++time);
        testHarness.notifyOfCompletedCheckpoint(1);
    }
    return outDir.getAbsolutePath();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowType(org.apache.flink.table.types.logical.RowType) Properties(java.util.Properties) MapType(org.apache.flink.table.types.logical.MapType) IntType(org.apache.flink.table.types.logical.IntType) ArrayType(org.apache.flink.table.types.logical.ArrayType) OrcBulkWriterFactory(org.apache.flink.orc.writer.OrcBulkWriterFactory) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) RowDataVectorizer(org.apache.flink.orc.vector.RowDataVectorizer) TimestampType(org.apache.flink.table.types.logical.TimestampType) VarCharType(org.apache.flink.table.types.logical.VarCharType) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) OrcFile(org.apache.orc.OrcFile) File(java.io.File)

Example 15 with VarCharType

use of org.apache.flink.table.types.logical.VarCharType in project flink by apache.

the class ArrowReaderWriterTest method init.

@BeforeClass
public static void init() {
    fieldTypes.add(new TinyIntType());
    fieldTypes.add(new SmallIntType());
    fieldTypes.add(new IntType());
    fieldTypes.add(new BigIntType());
    fieldTypes.add(new BooleanType());
    fieldTypes.add(new FloatType());
    fieldTypes.add(new DoubleType());
    fieldTypes.add(new VarCharType());
    fieldTypes.add(new VarBinaryType());
    fieldTypes.add(new DecimalType(10, 3));
    fieldTypes.add(new DateType());
    fieldTypes.add(new TimeType(0));
    fieldTypes.add(new TimeType(2));
    fieldTypes.add(new TimeType(4));
    fieldTypes.add(new TimeType(8));
    fieldTypes.add(new LocalZonedTimestampType(0));
    fieldTypes.add(new LocalZonedTimestampType(2));
    fieldTypes.add(new LocalZonedTimestampType(4));
    fieldTypes.add(new LocalZonedTimestampType(8));
    fieldTypes.add(new TimestampType(0));
    fieldTypes.add(new TimestampType(2));
    fieldTypes.add(new TimestampType(4));
    fieldTypes.add(new TimestampType(8));
    fieldTypes.add(new ArrayType(new VarCharType()));
    rowFieldType = new RowType(Arrays.asList(new RowType.RowField("a", new IntType()), new RowType.RowField("b", new VarCharType()), new RowType.RowField("c", new ArrayType(new VarCharType())), new RowType.RowField("d", new TimestampType(2)), new RowType.RowField("e", new RowType(Arrays.asList(new RowType.RowField("e1", new IntType()), new RowType.RowField("e2", new VarCharType()))))));
    fieldTypes.add(rowFieldType);
    List<RowType.RowField> rowFields = new ArrayList<>();
    for (int i = 0; i < fieldTypes.size(); i++) {
        rowFields.add(new RowType.RowField("f" + i, fieldTypes.get(i)));
    }
    rowType = new RowType(rowFields);
    allocator = ArrowUtils.getRootAllocator().newChildAllocator("stdout", 0, Long.MAX_VALUE);
}
Also used : VarBinaryType(org.apache.flink.table.types.logical.VarBinaryType) BooleanType(org.apache.flink.table.types.logical.BooleanType) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) ArrayList(java.util.ArrayList) BigIntType(org.apache.flink.table.types.logical.BigIntType) RowType(org.apache.flink.table.types.logical.RowType) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) TinyIntType(org.apache.flink.table.types.logical.TinyIntType) IntType(org.apache.flink.table.types.logical.IntType) BigIntType(org.apache.flink.table.types.logical.BigIntType) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) FloatType(org.apache.flink.table.types.logical.FloatType) TimeType(org.apache.flink.table.types.logical.TimeType) ArrayType(org.apache.flink.table.types.logical.ArrayType) SmallIntType(org.apache.flink.table.types.logical.SmallIntType) DoubleType(org.apache.flink.table.types.logical.DoubleType) DecimalType(org.apache.flink.table.types.logical.DecimalType) TimestampType(org.apache.flink.table.types.logical.TimestampType) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) VarCharType(org.apache.flink.table.types.logical.VarCharType) DateType(org.apache.flink.table.types.logical.DateType) BeforeClass(org.junit.BeforeClass)

Aggregations

VarCharType (org.apache.flink.table.types.logical.VarCharType)20 RowType (org.apache.flink.table.types.logical.RowType)11 BigIntType (org.apache.flink.table.types.logical.BigIntType)10 IntType (org.apache.flink.table.types.logical.IntType)10 LogicalType (org.apache.flink.table.types.logical.LogicalType)10 TimestampType (org.apache.flink.table.types.logical.TimestampType)10 ArrayList (java.util.ArrayList)7 ArrayType (org.apache.flink.table.types.logical.ArrayType)7 DecimalType (org.apache.flink.table.types.logical.DecimalType)7 DoubleType (org.apache.flink.table.types.logical.DoubleType)7 SmallIntType (org.apache.flink.table.types.logical.SmallIntType)7 TinyIntType (org.apache.flink.table.types.logical.TinyIntType)7 BooleanType (org.apache.flink.table.types.logical.BooleanType)6 FloatType (org.apache.flink.table.types.logical.FloatType)6 DateType (org.apache.flink.table.types.logical.DateType)5 MapType (org.apache.flink.table.types.logical.MapType)5 GenericRowData (org.apache.flink.table.data.GenericRowData)4 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)4 VarBinaryType (org.apache.flink.table.types.logical.VarBinaryType)4 Configuration (org.apache.hadoop.conf.Configuration)4