Search in sources :

Example 1 with RowDataToStringConverterImpl

use of org.apache.flink.table.planner.functions.casting.RowDataToStringConverterImpl in project flink by apache.

the class CliTableauResultViewTest method setUp.

@Before
public void setUp() {
    terminalOutput = new ByteArrayOutputStream();
    terminal = TerminalUtils.createDumbTerminal(terminalOutput);
    schema = ResolvedSchema.of(Column.physical("boolean", DataTypes.BOOLEAN()), Column.physical("int", DataTypes.INT()), Column.physical("bigint", DataTypes.BIGINT()), Column.physical("varchar", DataTypes.STRING()), Column.physical("decimal(10, 5)", DataTypes.DECIMAL(10, 5)), Column.physical("timestamp", DataTypes.TIMESTAMP(6).bridgedTo(Timestamp.class)));
    rowDataToStringConverter = new RowDataToStringConverterImpl(schema.toPhysicalRowDataType());
    List<Row> rows = Arrays.asList(Row.ofKind(RowKind.INSERT, null, 1, 2L, "abc", BigDecimal.valueOf(1.23), Timestamp.valueOf("2020-03-01 18:39:14")), Row.ofKind(RowKind.UPDATE_BEFORE, false, null, 0L, "", BigDecimal.valueOf(1), Timestamp.valueOf("2020-03-01 18:39:14.1")), Row.ofKind(RowKind.UPDATE_AFTER, true, Integer.MAX_VALUE, null, "abcdefg", BigDecimal.valueOf(12345), Timestamp.valueOf("2020-03-01 18:39:14.12")), Row.ofKind(RowKind.DELETE, false, Integer.MIN_VALUE, Long.MAX_VALUE, null, BigDecimal.valueOf(12345.06789), Timestamp.valueOf("2020-03-01 18:39:14.123")), Row.ofKind(RowKind.INSERT, true, 100, Long.MIN_VALUE, "abcdefg111", null, Timestamp.valueOf("2020-03-01 18:39:14.123456")), Row.ofKind(RowKind.DELETE, null, -1, -1L, "abcdefghijklmnopqrstuvwxyz", BigDecimal.valueOf(-12345.06789), null), Row.ofKind(RowKind.INSERT, null, -1, -1L, "这是一段中文", BigDecimal.valueOf(-12345.06789), Timestamp.valueOf("2020-03-04 18:39:14")), Row.ofKind(RowKind.DELETE, null, -1, -1L, "これは日本語をテストするための文です", BigDecimal.valueOf(-12345.06789), Timestamp.valueOf("2020-03-04 18:39:14")));
    final DataStructureConverter<Object, Object> dataStructureConverter = DataStructureConverters.getConverter(schema.toPhysicalRowDataType());
    data = rows.stream().map(r -> (RowData) (dataStructureConverter.toInternal(r))).collect(Collectors.toList());
    streamingData = rows.stream().map(r -> (RowData) (dataStructureConverter.toInternal(r))).collect(Collectors.toList());
}
Also used : ByteArrayOutputStream(java.io.ByteArrayOutputStream) Row(org.apache.flink.types.Row) Timestamp(java.sql.Timestamp) RowDataToStringConverterImpl(org.apache.flink.table.planner.functions.casting.RowDataToStringConverterImpl) Before(org.junit.Before)

Example 2 with RowDataToStringConverterImpl

use of org.apache.flink.table.planner.functions.casting.RowDataToStringConverterImpl in project flink by apache.

the class CliResultViewTest method testResultViewClearResult.

private void testResultViewClearResult(TypedResult<?> typedResult, boolean isTableMode, int expectedCancellationCount) throws Exception {
    final CountDownLatch cancellationCounterLatch = new CountDownLatch(expectedCancellationCount);
    final MockExecutor executor = new MockExecutor(typedResult, cancellationCounterLatch);
    final Configuration testConfig = new Configuration();
    testConfig.set(EXECUTION_RESULT_MODE, ResultMode.TABLE);
    testConfig.set(RUNTIME_MODE, RuntimeExecutionMode.STREAMING);
    String sessionId = executor.openSession("test-session");
    ResolvedSchema schema = ResolvedSchema.of(Column.physical("Null Field", DataTypes.STRING()));
    final ResultDescriptor descriptor = new ResultDescriptor("result-id", schema, false, testConfig, new RowDataToStringConverterImpl(schema.toPhysicalRowDataType()));
    try (CliClient cli = new TestingCliClient(TerminalUtils.createDumbTerminal(), sessionId, executor, File.createTempFile("history", "tmp").toPath(), null)) {
        Thread resultViewRunner = new Thread(new TestingCliResultView(cli, descriptor, isTableMode));
        resultViewRunner.start();
        if (!resultViewRunner.isInterrupted()) {
            resultViewRunner.interrupt();
        }
        // close the client until view exit
        while (resultViewRunner.isAlive()) {
            Thread.sleep(100);
        }
    }
    assertTrue("Invalid number of cancellations.", cancellationCounterLatch.await(10, TimeUnit.SECONDS));
}
Also used : Configuration(org.apache.flink.configuration.Configuration) ResultDescriptor(org.apache.flink.table.client.gateway.ResultDescriptor) AttributedString(org.jline.utils.AttributedString) CountDownLatch(java.util.concurrent.CountDownLatch) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) RowDataToStringConverterImpl(org.apache.flink.table.planner.functions.casting.RowDataToStringConverterImpl)

Aggregations

RowDataToStringConverterImpl (org.apache.flink.table.planner.functions.casting.RowDataToStringConverterImpl)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 Timestamp (java.sql.Timestamp)1 CountDownLatch (java.util.concurrent.CountDownLatch)1 Configuration (org.apache.flink.configuration.Configuration)1 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)1 ResultDescriptor (org.apache.flink.table.client.gateway.ResultDescriptor)1 Row (org.apache.flink.types.Row)1 AttributedString (org.jline.utils.AttributedString)1 Before (org.junit.Before)1