Search in sources :

Example 11 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class HBaseSerdeTest method convertToReusedRowTest.

@Test
public void convertToReusedRowTest() {
    HBaseSerde serde = createHBaseSerde();
    List<List<Cell>> cellsList = prepareCells();
    List<RowData> resultRowDatas = new ArrayList<>();
    List<String> resultRowDataStr = new ArrayList<>();
    for (List<Cell> cells : cellsList) {
        RowData row = serde.convertToReusedRow(Result.create(cells));
        resultRowDatas.add(row);
        resultRowDataStr.add(row.toString());
    }
    // this verifies RowData is reused
    assertTrue(resultRowDatas.get(0) == resultRowDatas.get(1));
    List<String> expected = new ArrayList<>();
    expected.add("+I(1,+I(10),+I(Hello-1,100),+I(1.01,false,Welt-1))");
    expected.add("+I(2,+I(20),+I(Hello-2,200),+I(2.02,true,Welt-2))");
    assertEquals(expected, resultRowDataStr);
}
Also used : RowData(org.apache.flink.table.data.RowData) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 12 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class HBaseSerde method createPutMutation.

/**
 * Returns an instance of Put that writes record to HBase table.
 *
 * @return The appropriate instance of Put for this use case.
 */
@Nullable
public Put createPutMutation(RowData row) {
    checkArgument(keyEncoder != null, "row key is not set.");
    byte[] rowkey = keyEncoder.encode(row, rowkeyIndex);
    if (rowkey.length == 0) {
        // drop dirty records, rowkey shouldn't be zero length
        return null;
    }
    // upsert
    Put put = new Put(rowkey);
    for (int i = 0; i < fieldLength; i++) {
        if (i != rowkeyIndex) {
            int f = i > rowkeyIndex ? i - 1 : i;
            // get family key
            byte[] familyKey = families[f];
            RowData familyRow = row.getRow(i, qualifiers[f].length);
            for (int q = 0; q < this.qualifiers[f].length; q++) {
                // get quantifier key
                byte[] qualifier = qualifiers[f][q];
                // serialize value
                byte[] value = qualifierEncoders[f][q].encode(familyRow, q);
                put.addColumn(familyKey, qualifier, value);
            }
        }
    }
    return put;
}
Also used : RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) Put(org.apache.hadoop.hbase.client.Put) Nullable(javax.annotation.Nullable)

Example 13 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class HBaseRowDataLookupFunction method eval.

/**
 * The invoke entry point of lookup function.
 *
 * @param rowKey the lookup key. Currently only support single rowkey.
 */
public void eval(Object rowKey) throws IOException {
    if (cache != null) {
        RowData cacheRowData = cache.getIfPresent(rowKey);
        if (cacheRowData != null) {
            collect(cacheRowData);
            return;
        }
    }
    for (int retry = 0; retry <= maxRetryTimes; retry++) {
        try {
            // fetch result
            Get get = serde.createGet(rowKey);
            if (get != null) {
                Result result = table.get(get);
                if (!result.isEmpty()) {
                    if (cache != null) {
                        // parse and collect
                        RowData rowData = serde.convertToNewRow(result);
                        collect(rowData);
                        cache.put(rowKey, rowData);
                    } else {
                        collect(serde.convertToReusedRow(result));
                    }
                }
            }
            break;
        } catch (IOException e) {
            LOG.error(String.format("HBase lookup error, retry times = %d", retry), e);
            if (retry >= maxRetryTimes) {
                throw new RuntimeException("Execution of HBase lookup failed.", e);
            }
            try {
                Thread.sleep(1000 * retry);
            } catch (InterruptedException e1) {
                throw new RuntimeException(e1);
            }
        }
    }
}
Also used : RowData(org.apache.flink.table.data.RowData) Get(org.apache.hadoop.hbase.client.Get) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result)

Example 14 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class RegistryAvroRowDataSeDeSchemaTest method testRowDataWriteReadWithSchema.

private void testRowDataWriteReadWithSchema(Schema schema) throws Exception {
    DataType dataType = AvroSchemaConverter.convertToDataType(schema.toString());
    RowType rowType = (RowType) dataType.getLogicalType();
    AvroRowDataSerializationSchema serializer = getSerializationSchema(rowType, schema);
    Schema writeSchema = AvroSchemaConverter.convertToSchema(dataType.getLogicalType());
    AvroRowDataDeserializationSchema deserializer = getDeserializationSchema(rowType, writeSchema);
    serializer.open(null);
    deserializer.open(null);
    assertNull(deserializer.deserialize(null));
    RowData oriData = address2RowData(address);
    byte[] serialized = serializer.serialize(oriData);
    RowData rowData = deserializer.deserialize(serialized);
    assertThat(rowData.getArity(), equalTo(schema.getFields().size()));
    assertEquals(address.getNum(), rowData.getInt(0));
    assertEquals(address.getStreet(), rowData.getString(1).toString());
    if (schema != ADDRESS_SCHEMA_COMPATIBLE) {
        assertEquals(address.getCity(), rowData.getString(2).toString());
        assertEquals(address.getState(), rowData.getString(3).toString());
        assertEquals(address.getZip(), rowData.getString(4).toString());
    }
}
Also used : AvroRowDataSerializationSchema(org.apache.flink.formats.avro.AvroRowDataSerializationSchema) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) RegistryAvroDeserializationSchema(org.apache.flink.formats.avro.RegistryAvroDeserializationSchema) AvroRowDataDeserializationSchema(org.apache.flink.formats.avro.AvroRowDataDeserializationSchema) Schema(org.apache.avro.Schema) RegistryAvroSerializationSchema(org.apache.flink.formats.avro.RegistryAvroSerializationSchema) AvroRowDataSerializationSchema(org.apache.flink.formats.avro.AvroRowDataSerializationSchema) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) AvroRowDataDeserializationSchema(org.apache.flink.formats.avro.AvroRowDataDeserializationSchema)

Example 15 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class RegistryAvroFormatFactoryTest method testDeserializationSchema.

@Test
public void testDeserializationSchema() {
    final AvroRowDataDeserializationSchema expectedDeser = new AvroRowDataDeserializationSchema(ConfluentRegistryAvroDeserializationSchema.forGeneric(AvroSchemaConverter.convertToSchema(ROW_TYPE), REGISTRY_URL), AvroToRowDataConverters.createRowConverter(ROW_TYPE), InternalTypeInfo.of(ROW_TYPE));
    final DynamicTableSource actualSource = createTableSource(SCHEMA, getDefaultOptions());
    assertThat(actualSource, instanceOf(TestDynamicTableFactory.DynamicTableSourceMock.class));
    TestDynamicTableFactory.DynamicTableSourceMock scanSourceMock = (TestDynamicTableFactory.DynamicTableSourceMock) actualSource;
    DeserializationSchema<RowData> actualDeser = scanSourceMock.valueFormat.createRuntimeDecoder(ScanRuntimeProviderContext.INSTANCE, SCHEMA.toPhysicalRowDataType());
    assertEquals(expectedDeser, actualDeser);
}
Also used : RowData(org.apache.flink.table.data.RowData) AvroRowDataDeserializationSchema(org.apache.flink.formats.avro.AvroRowDataDeserializationSchema) TestDynamicTableFactory(org.apache.flink.table.factories.TestDynamicTableFactory) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.Test)

Aggregations

RowData (org.apache.flink.table.data.RowData)602 Test (org.junit.Test)201 GenericRowData (org.apache.flink.table.data.GenericRowData)178 ArrayList (java.util.ArrayList)109 RowType (org.apache.flink.table.types.logical.RowType)105 JoinedRowData (org.apache.flink.table.data.utils.JoinedRowData)90 Watermark (org.apache.flink.streaming.api.watermark.Watermark)84 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)72 Transformation (org.apache.flink.api.dag.Transformation)70 Configuration (org.apache.flink.configuration.Configuration)68 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)67 List (java.util.List)65 ExecEdge (org.apache.flink.table.planner.plan.nodes.exec.ExecEdge)54 DataType (org.apache.flink.table.types.DataType)52 Map (java.util.Map)42 LogicalType (org.apache.flink.table.types.logical.LogicalType)41 TableException (org.apache.flink.table.api.TableException)34 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)33 RowDataKeySelector (org.apache.flink.table.runtime.keyselector.RowDataKeySelector)32 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)31