use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class ParquetColumnarRowInputFormatTest method testProjection.
@Test
public void testProjection() throws IOException {
int number = 1000;
List<Row> records = new ArrayList<>(number);
for (int i = 0; i < number; i++) {
Integer v = i;
records.add(newRow(v));
}
Path testPath = createTempParquetFile(TEMPORARY_FOLDER.newFolder(), PARQUET_SCHEMA, records, rowGroupSize);
// test reader
LogicalType[] fieldTypes = new LogicalType[] { new DoubleType(), new TinyIntType(), new IntType() };
ParquetColumnarRowInputFormat<FileSourceSplit> format = new ParquetColumnarRowInputFormat(new Configuration(), RowType.of(fieldTypes, new String[] { "f7", "f2", "f4" }), null, 500, false, true);
AtomicInteger cnt = new AtomicInteger(0);
forEachRemaining(format.createReader(EMPTY_CONF, new FileSourceSplit("id", testPath, 0, Long.MAX_VALUE, 0, Long.MAX_VALUE)), row -> {
int i = cnt.get();
assertEquals(i, row.getDouble(0), 0);
assertEquals((byte) i, row.getByte(1));
assertEquals(i, row.getInt(2));
cnt.incrementAndGet();
});
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class ParquetColumnarRowInputFormatTest method testProjectionReadUnknownField.
@Test
public void testProjectionReadUnknownField() throws IOException {
int number = 1000;
List<Row> records = new ArrayList<>(number);
for (int i = 0; i < number; i++) {
Integer v = i;
records.add(newRow(v));
}
Path testPath = createTempParquetFile(TEMPORARY_FOLDER.newFolder(), PARQUET_SCHEMA, records, rowGroupSize);
// test reader
LogicalType[] fieldTypes = new LogicalType[] { new DoubleType(), new TinyIntType(), new IntType(), new VarCharType() };
ParquetColumnarRowInputFormat<FileSourceSplit> format = new ParquetColumnarRowInputFormat<>(new Configuration(), // f99 not exist in parquet file.
RowType.of(fieldTypes, new String[] { "f7", "f2", "f4", "f99" }), null, 500, false, true);
AtomicInteger cnt = new AtomicInteger(0);
forEachRemaining(format.createReader(EMPTY_CONF, new FileSourceSplit("id", testPath, 0, Long.MAX_VALUE, 0, Long.MAX_VALUE)), row -> {
int i = cnt.get();
assertEquals(i, row.getDouble(0), 0);
assertEquals((byte) i, row.getByte(1));
assertEquals(i, row.getInt(2));
assertTrue(row.isNullAt(3));
cnt.incrementAndGet();
});
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class PythonTypeUtilsTest method testLogicalTypeToDataConverter.
@Test
public void testLogicalTypeToDataConverter() {
PythonTypeUtils.DataConverter converter = PythonTypeUtils.toDataConverter(new IntType());
GenericRowData data = new GenericRowData(1);
data.setField(0, 10);
Object externalData = converter.toExternal(data, 0);
assertTrue(externalData instanceof Long);
assertEquals(externalData, 10L);
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class JdbcRowDataInputFormatTest method verifySplit.
private void verifySplit(InputSplit split, int expectedIDSum) throws IOException {
int sum = 0;
RowData row = new GenericRowData(5);
inputFormat.open(split);
RowData.FieldGetter idFieldGetter = RowData.createFieldGetter(new IntType(), 0);
while (!inputFormat.reachedEnd()) {
row = inputFormat.nextRecord(row);
int id = (int) idFieldGetter.getFieldOrNull(row);
int testDataIndex = id - 1001;
assertEquals(TEST_DATA[testDataIndex], row);
sum += id;
}
Assert.assertEquals(expectedIDSum, sum);
}
use of org.apache.flink.table.types.logical.IntType in project flink by apache.
the class InternalDataUtils method toGenericMap.
static GenericMapData toGenericMap(MapData mapData, LogicalType logicalType) {
final LogicalType keyType = logicalType.is(LogicalTypeRoot.MULTISET) ? ((MultisetType) logicalType).getElementType() : ((MapType) logicalType).getKeyType();
final LogicalType valueType = logicalType.is(LogicalTypeRoot.MULTISET) ? new IntType(false) : ((MapType) logicalType).getValueType();
final ArrayData.ElementGetter keyGetter = ArrayData.createElementGetter(keyType);
final ArrayData.ElementGetter valueGetter = ArrayData.createElementGetter(valueType);
final ArrayData keys = mapData.keyArray();
final ArrayData values = mapData.valueArray();
final LinkedHashMap<Object, Object> newMap = new LinkedHashMap<>();
for (int i = 0; i < mapData.size(); i++) {
Object key = null;
Object value = null;
if (!keys.isNullAt(i)) {
key = toGenericInternalData(keyGetter.getElementOrNull(keys, i), keyType);
}
if (!values.isNullAt(i)) {
value = toGenericInternalData(valueGetter.getElementOrNull(values, i), valueType);
}
newMap.put(key, value);
}
return new GenericMapData(newMap);
}
Aggregations