Search in sources :

Example 11 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class StreamTableEnvironmentImplTest method getStreamTableEnvironment.

private StreamTableEnvironmentImpl getStreamTableEnvironment(StreamExecutionEnvironment env, DataStreamSource<Integer> elements) {
    TableConfig tableConfig = new TableConfig();
    CatalogManager catalogManager = CatalogManagerMocks.createEmptyCatalogManager();
    ModuleManager moduleManager = new ModuleManager();
    return new StreamTableEnvironmentImpl(catalogManager, moduleManager, new FunctionCatalog(tableConfig, catalogManager, moduleManager), tableConfig, env, new TestPlanner(elements.getTransformation()), new ExecutorMock(), true, this.getClass().getClassLoader());
}
Also used : FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) TableConfig(org.apache.flink.table.api.TableConfig) ExecutorMock(org.apache.flink.table.utils.ExecutorMock) ModuleManager(org.apache.flink.table.module.ModuleManager) CatalogManager(org.apache.flink.table.catalog.CatalogManager)

Example 12 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class SortCodeGeneratorTest method getSortBaseWithNulls.

public static Tuple2<NormalizedKeyComputer, RecordComparator> getSortBaseWithNulls(String namePrefix, RowType inputType, SortSpec sortSpec) {
    SortCodeGenerator generator = new SortCodeGenerator(new TableConfig(), inputType, sortSpec);
    GeneratedNormalizedKeyComputer computer = generator.generateNormalizedKeyComputer(namePrefix + "Computer");
    GeneratedRecordComparator comparator = generator.generateRecordComparator(namePrefix + "Comparator");
    ClassLoader cl = Thread.currentThread().getContextClassLoader();
    return new Tuple2<>(computer.newInstance(cl), comparator.newInstance(cl));
}
Also used : GeneratedNormalizedKeyComputer(org.apache.flink.table.runtime.generated.GeneratedNormalizedKeyComputer) Tuple2(org.apache.flink.api.java.tuple.Tuple2) TableConfig(org.apache.flink.table.api.TableConfig) SortCodeGenerator(org.apache.flink.table.planner.codegen.sort.SortCodeGenerator) GeneratedRecordComparator(org.apache.flink.table.runtime.generated.GeneratedRecordComparator)

Example 13 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class CodeSplitTest method testJoinCondition.

@Test
public void testJoinCondition() {
    int numFields = 200;
    FlinkTypeFactory typeFactory = FlinkTypeFactory.INSTANCE();
    RexBuilder builder = new RexBuilder(typeFactory);
    RelDataType intType = typeFactory.createFieldTypeFromLogicalType(new IntType());
    RexNode[] conditions = new RexNode[numFields];
    for (int i = 0; i < numFields; i++) {
        conditions[i] = builder.makeCall(SqlStdOperatorTable.LESS_THAN, new RexInputRef(i, intType), new RexInputRef(numFields + i, intType));
    }
    RexNode joinCondition = builder.makeCall(SqlStdOperatorTable.AND, conditions);
    RowType rowType = getIntRowType(numFields);
    GenericRowData rowData1 = new GenericRowData(numFields);
    GenericRowData rowData2 = new GenericRowData(numFields);
    Random random = new Random();
    for (int i = 0; i < numFields; i++) {
        rowData1.setField(i, 0);
        rowData2.setField(i, 1);
    }
    boolean result = random.nextBoolean();
    if (!result) {
        rowData1.setField(random.nextInt(numFields), 1);
    }
    Consumer<TableConfig> consumer = tableConfig -> {
        JoinCondition instance = JoinUtil.generateConditionFunction(tableConfig, joinCondition, rowType, rowType).newInstance(classLoader);
        for (int i = 0; i < 100; i++) {
            Assert.assertEquals(result, instance.apply(rowData1, rowData2));
        }
    };
    runTest(consumer);
}
Also used : Arrays(java.util.Arrays) FlinkMatchers(org.apache.flink.core.testutils.FlinkMatchers) IntType(org.apache.flink.table.types.logical.IntType) Random(java.util.Random) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RowType(org.apache.flink.table.types.logical.RowType) ArrayList(java.util.ArrayList) HashFunction(org.apache.flink.table.runtime.generated.HashFunction) TableConfigOptions(org.apache.flink.table.api.config.TableConfigOptions) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) GenericRowData(org.apache.flink.table.data.GenericRowData) RexNode(org.apache.calcite.rex.RexNode) OutputStream(java.io.OutputStream) PrintStream(java.io.PrintStream) RelDataType(org.apache.calcite.rel.type.RelDataType) TableConfig(org.apache.flink.table.api.TableConfig) RecordComparator(org.apache.flink.table.runtime.generated.RecordComparator) RexBuilder(org.apache.calcite.rex.RexBuilder) Test(org.junit.Test) IOException(java.io.IOException) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) ComparatorCodeGenerator(org.apache.flink.table.planner.codegen.sort.ComparatorCodeGenerator) RexInputRef(org.apache.calcite.rex.RexInputRef) Consumer(java.util.function.Consumer) JoinUtil(org.apache.flink.table.planner.plan.utils.JoinUtil) JoinCondition(org.apache.flink.table.runtime.generated.JoinCondition) List(java.util.List) MatcherAssert(org.hamcrest.MatcherAssert) LogicalType(org.apache.flink.table.types.logical.LogicalType) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) Assert(org.junit.Assert) Collections(java.util.Collections) SortSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.SortSpec) Projection(org.apache.flink.table.runtime.generated.Projection) RowType(org.apache.flink.table.types.logical.RowType) RelDataType(org.apache.calcite.rel.type.RelDataType) IntType(org.apache.flink.table.types.logical.IntType) JoinCondition(org.apache.flink.table.runtime.generated.JoinCondition) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) Random(java.util.Random) RexBuilder(org.apache.calcite.rex.RexBuilder) RexInputRef(org.apache.calcite.rex.RexInputRef) GenericRowData(org.apache.flink.table.data.GenericRowData) TableConfig(org.apache.flink.table.api.TableConfig) RexNode(org.apache.calcite.rex.RexNode) Test(org.junit.Test)

Example 14 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class CodeSplitTest method testHashFunction.

@Test
public void testHashFunction() {
    int numFields = 1000;
    RowType rowType = getIntRowType(numFields);
    int[] hashFields = new int[numFields];
    for (int i = 0; i < numFields; i++) {
        hashFields[i] = i;
    }
    GenericRowData rowData = new GenericRowData(numFields);
    for (int i = 0; i < numFields; i++) {
        rowData.setField(i, i);
    }
    Consumer<TableConfig> consumer = tableConfig -> {
        HashFunction instance = HashCodeGenerator.generateRowHash(new CodeGeneratorContext(tableConfig), rowType, "", hashFields).newInstance(classLoader);
        for (int i = 0; i < 100; i++) {
            Assert.assertEquals(-1433414860, instance.hashCode(rowData));
        }
    };
    runTest(consumer);
}
Also used : Arrays(java.util.Arrays) FlinkMatchers(org.apache.flink.core.testutils.FlinkMatchers) IntType(org.apache.flink.table.types.logical.IntType) Random(java.util.Random) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RowType(org.apache.flink.table.types.logical.RowType) ArrayList(java.util.ArrayList) HashFunction(org.apache.flink.table.runtime.generated.HashFunction) TableConfigOptions(org.apache.flink.table.api.config.TableConfigOptions) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) GenericRowData(org.apache.flink.table.data.GenericRowData) RexNode(org.apache.calcite.rex.RexNode) OutputStream(java.io.OutputStream) PrintStream(java.io.PrintStream) RelDataType(org.apache.calcite.rel.type.RelDataType) TableConfig(org.apache.flink.table.api.TableConfig) RecordComparator(org.apache.flink.table.runtime.generated.RecordComparator) RexBuilder(org.apache.calcite.rex.RexBuilder) Test(org.junit.Test) IOException(java.io.IOException) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) ComparatorCodeGenerator(org.apache.flink.table.planner.codegen.sort.ComparatorCodeGenerator) RexInputRef(org.apache.calcite.rex.RexInputRef) Consumer(java.util.function.Consumer) JoinUtil(org.apache.flink.table.planner.plan.utils.JoinUtil) JoinCondition(org.apache.flink.table.runtime.generated.JoinCondition) List(java.util.List) MatcherAssert(org.hamcrest.MatcherAssert) LogicalType(org.apache.flink.table.types.logical.LogicalType) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) Assert(org.junit.Assert) Collections(java.util.Collections) SortSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.SortSpec) Projection(org.apache.flink.table.runtime.generated.Projection) HashFunction(org.apache.flink.table.runtime.generated.HashFunction) RowType(org.apache.flink.table.types.logical.RowType) GenericRowData(org.apache.flink.table.data.GenericRowData) TableConfig(org.apache.flink.table.api.TableConfig) Test(org.junit.Test)

Example 15 with TableConfig

use of org.apache.flink.table.api.TableConfig in project flink by apache.

the class KeySelectorUtil method getRowDataSelector.

/**
 * Create a RowDataKeySelector to extract keys from DataStream which type is {@link
 * InternalTypeInfo} of {@link RowData}.
 *
 * @param keyFields key fields
 * @param rowType type of DataStream to extract keys
 * @return the RowDataKeySelector to extract keys from DataStream which type is {@link
 *     InternalTypeInfo} of {@link RowData}.
 */
public static RowDataKeySelector getRowDataSelector(int[] keyFields, InternalTypeInfo<RowData> rowType) {
    if (keyFields.length > 0) {
        LogicalType[] inputFieldTypes = rowType.toRowFieldTypes();
        LogicalType[] keyFieldTypes = new LogicalType[keyFields.length];
        for (int i = 0; i < keyFields.length; ++i) {
            keyFieldTypes[i] = inputFieldTypes[keyFields[i]];
        }
        // do not provide field names for the result key type,
        // because we may have duplicate key fields and the field names may conflict
        RowType returnType = RowType.of(keyFieldTypes);
        RowType inputType = rowType.toRowType();
        GeneratedProjection generatedProjection = ProjectionCodeGenerator.generateProjection(CodeGeneratorContext.apply(new TableConfig()), "KeyProjection", inputType, returnType, keyFields);
        InternalTypeInfo<RowData> keyRowType = InternalTypeInfo.of(returnType);
        return new BinaryRowDataKeySelector(keyRowType, generatedProjection);
    } else {
        return EmptyRowDataKeySelector.INSTANCE;
    }
}
Also used : RowData(org.apache.flink.table.data.RowData) GeneratedProjection(org.apache.flink.table.runtime.generated.GeneratedProjection) LogicalType(org.apache.flink.table.types.logical.LogicalType) RowType(org.apache.flink.table.types.logical.RowType) TableConfig(org.apache.flink.table.api.TableConfig) BinaryRowDataKeySelector(org.apache.flink.table.runtime.keyselector.BinaryRowDataKeySelector)

Aggregations

TableConfig (org.apache.flink.table.api.TableConfig)41 RowType (org.apache.flink.table.types.logical.RowType)19 Test (org.junit.Test)10 ArrayList (java.util.ArrayList)6 RexNode (org.apache.calcite.rex.RexNode)6 CatalogManager (org.apache.flink.table.catalog.CatalogManager)6 IOException (java.io.IOException)5 OutputStream (java.io.OutputStream)5 PrintStream (java.io.PrintStream)5 Arrays (java.util.Arrays)5 Collections (java.util.Collections)5 List (java.util.List)5 RexBuilder (org.apache.calcite.rex.RexBuilder)5 RexInputRef (org.apache.calcite.rex.RexInputRef)5 Table (org.apache.flink.table.api.Table)5 FlinkTypeFactory (org.apache.flink.table.planner.calcite.FlinkTypeFactory)5 IntType (org.apache.flink.table.types.logical.IntType)5 Row (org.apache.flink.types.Row)5 Random (java.util.Random)4 Consumer (java.util.function.Consumer)4