use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project beam by apache.
the class DataStoreReadWriteIT method testDataStoreV1SqlWriteRead.
@Test
public void testDataStoreV1SqlWriteRead() {
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new DataStoreV1TableProvider());
String projectId = options.getProject();
String createTableStatement = "CREATE EXTERNAL TABLE TEST( \n" + " `__key__` VARBINARY, \n" + " `content` VARCHAR \n" + ") \n" + "TYPE 'datastoreV1' \n" + "LOCATION '" + projectId + "/" + KIND + "'";
sqlEnv.executeDdl(createTableStatement);
Key ancestor = makeKey(KIND, UUID.randomUUID().toString()).build();
Key itemKey = makeKey(ancestor, KIND, UUID.randomUUID().toString()).build();
String insertStatement = "INSERT INTO TEST VALUES ( \n" + keyToSqlByteString(itemKey) + ", \n" + "'2000' \n" + ")";
BeamSqlRelUtils.toPCollection(writePipeline, sqlEnv.parseQuery(insertStatement));
writePipeline.run().waitUntilFinish();
String selectTableStatement = "SELECT * FROM TEST";
PCollection<Row> output = BeamSqlRelUtils.toPCollection(readPipeline, sqlEnv.parseQuery(selectTableStatement));
assertThat(output.getSchema(), equalTo(SOURCE_SCHEMA));
PipelineResult.State state = readPipeline.run().waitUntilFinish(Duration.standardMinutes(5));
assertThat(state, equalTo(State.DONE));
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project beam by apache.
the class BeamTableFunctionScanRule method convert.
@Override
public RelNode convert(RelNode relNode) {
TableFunctionScan tableFunctionScan = (TableFunctionScan) relNode;
// only support one input for table function scan.
List<RelNode> inputs = new ArrayList<>();
checkArgument(relNode.getInputs().size() == 1, "Wrong number of inputs for %s, expected 1 input but received: %s", BeamTableFunctionScanRel.class.getSimpleName(), relNode.getInputs().size());
inputs.add(convert(relNode.getInput(0), relNode.getInput(0).getTraitSet().replace(BeamLogicalConvention.INSTANCE)));
return new BeamTableFunctionScanRel(tableFunctionScan.getCluster(), tableFunctionScan.getTraitSet().replace(BeamLogicalConvention.INSTANCE), inputs, tableFunctionScan.getCall(), null, tableFunctionScan.getCall().getType(), Collections.EMPTY_SET);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project beam by apache.
the class ExpressionConverter method convertTableValuedFunction.
/**
* Convert a TableValuedFunction in ZetaSQL to a RexCall in Calcite.
*/
public RexCall convertTableValuedFunction(RelNode input, TableValuedFunction tvf, List<ResolvedNodes.ResolvedFunctionArgument> argumentList, List<ResolvedColumn> inputTableColumns) {
ResolvedColumn wmCol;
// Handle builtin windowing TVF.
switch(tvf.getName()) {
case TVFStreamingUtils.FIXED_WINDOW_TVF:
// TUMBLE tvf's second argument is descriptor.
wmCol = extractWatermarkColumnFromDescriptor(argumentList.get(1).getDescriptorArg());
return (RexCall) rexBuilder().makeCall(new SqlWindowTableFunction(SqlKind.TUMBLE.name()), convertRelNodeToRexRangeRef(input), convertResolvedColumnToRexInputRef(wmCol, inputTableColumns), convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(2).getExpr()));
case TVFStreamingUtils.SLIDING_WINDOW_TVF:
// HOP tvf's second argument is descriptor.
wmCol = extractWatermarkColumnFromDescriptor(argumentList.get(1).getDescriptorArg());
return (RexCall) rexBuilder().makeCall(new SqlWindowTableFunction(SqlKind.HOP.name()), convertRelNodeToRexRangeRef(input), convertResolvedColumnToRexInputRef(wmCol, inputTableColumns), convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(2).getExpr()), convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(3).getExpr()));
case TVFStreamingUtils.SESSION_WINDOW_TVF:
// SESSION tvf's second argument is descriptor.
wmCol = extractWatermarkColumnFromDescriptor(argumentList.get(1).getDescriptorArg());
// SESSION tvf's third argument is descriptor.
List<ResolvedColumn> keyCol = extractSessionKeyColumnFromDescriptor(argumentList.get(2).getDescriptorArg());
List<RexNode> operands = new ArrayList<>();
operands.add(convertRelNodeToRexRangeRef(input));
operands.add(convertResolvedColumnToRexInputRef(wmCol, inputTableColumns));
operands.add(convertIntervalToRexIntervalLiteral((ResolvedLiteral) argumentList.get(3).getExpr()));
operands.addAll(convertResolvedColumnsToRexInputRef(keyCol, inputTableColumns));
return (RexCall) rexBuilder().makeCall(new SqlWindowTableFunction(SqlKind.SESSION.name()), operands);
}
if (tvf instanceof FixedOutputSchemaTVF) {
FixedOutputSchemaTVF fixedOutputSchemaTVF = (FixedOutputSchemaTVF) tvf;
return (RexCall) rexBuilder().makeCall(new ZetaSqlUserDefinedSQLNativeTableValuedFunction(new SqlIdentifier(tvf.getName(), SqlParserPos.ZERO), opBinding -> {
List<RelDataTypeField> relDataTypeFields = convertTVFRelationColumnsToRelDataTypeFields(fixedOutputSchemaTVF.getOutputSchema().getColumns());
return new RelRecordType(relDataTypeFields);
}, null, null, null, null));
}
throw new UnsupportedOperationException("Does not support table-valued function: " + tvf.getName());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project beam by apache.
the class ZetaSQLPlannerImpl method rel.
public RelRoot rel(String sql, QueryParameters params) {
RelOptCluster cluster = RelOptCluster.create(planner, new RexBuilder(typeFactory));
AnalyzerOptions options = SqlAnalyzer.getAnalyzerOptions(params, defaultTimezone);
BeamZetaSqlCatalog catalog = BeamZetaSqlCatalog.create(defaultSchemaPlus, (JavaTypeFactory) cluster.getTypeFactory(), options);
// Set up table providers that need to be pre-registered
SqlAnalyzer analyzer = new SqlAnalyzer();
List<List<String>> tables = analyzer.extractTableNames(sql, options);
TableResolution.registerTables(this.defaultSchemaPlus, tables);
QueryTrait trait = new QueryTrait();
catalog.addTables(tables, trait);
ResolvedQueryStmt statement = analyzer.analyzeQuery(sql, options, catalog);
ExpressionConverter expressionConverter = new ExpressionConverter(cluster, params, catalog.getUserFunctionDefinitions());
ConversionContext context = ConversionContext.of(config, expressionConverter, cluster, trait);
RelNode convertedNode = QueryStatementConverter.convertRootQuery(context, statement);
return RelRoot.of(convertedNode, SqlKind.ALL);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project beam by apache.
the class PubsubTableProviderIT method testSQLSelectsPayloadContent.
@Test
public void testSQLSelectsPayloadContent() throws Exception {
String createTableString = String.format("CREATE EXTERNAL TABLE message (\n" + "event_timestamp TIMESTAMP, \n" + "attributes MAP<VARCHAR, VARCHAR>, \n" + "payload ROW< \n" + " id INTEGER, \n" + " name VARCHAR \n" + " > \n" + ") \n" + "TYPE '%s' \n" + "LOCATION '%s' \n" + "TBLPROPERTIES '{ " + "%s" + "\"protoClass\" : \"%s\", " + "\"timestampAttributeKey\" : \"ts\" }'", tableProvider.getTableType(), eventsTopic.topicPath(), payloadFormatParam(), PayloadMessages.SimpleMessage.class.getName());
String queryString = "SELECT message.payload.id, message.payload.name from message";
// Initialize SQL environment and create the pubsub table
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new PubsubTableProvider());
sqlEnv.executeDdl(createTableString);
// Apply the PTransform to query the pubsub topic
PCollection<Row> queryOutput = query(sqlEnv, pipeline, queryString);
// Observe the query results and send success signal after seeing the expected messages
queryOutput.apply("waitForSuccess", resultSignal.signalSuccessWhen(SchemaCoder.of(PAYLOAD_SCHEMA), observedRows -> observedRows.equals(ImmutableSet.of(row(PAYLOAD_SCHEMA, 3, "foo"), row(PAYLOAD_SCHEMA, 5, "bar"), row(PAYLOAD_SCHEMA, 7, "baz")))));
// Start the pipeline
pipeline.run();
// Block until a subscription for this topic exists
eventsTopic.assertSubscriptionEventuallyCreated(pipeline.getOptions().as(GcpOptions.class).getProject(), Duration.standardMinutes(5));
// Start publishing the messages when main pipeline is started and signaling topic is ready
eventsTopic.publish(ImmutableList.of(objectsProvider.messageIdName(ts(1), 3, "foo"), objectsProvider.messageIdName(ts(2), 5, "bar"), objectsProvider.messageIdName(ts(3), 7, "baz")));
// Poll the signaling topic for success message
resultSignal.waitForSuccess(timeout);
}
Aggregations