use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project drill by axbaretto.
the class DropTableHandler method getPlan.
/**
* Function resolves the schema and invokes the drop method
* (while IF EXISTS statement is used function invokes the drop method only if table exists).
* Raises an exception if the schema is immutable.
*
* @param sqlNode - SqlDropTable (SQL parse tree of drop table [if exists] query)
* @return - Single row indicating drop succeeded or table is not found while IF EXISTS statement is used,
* raise exception otherwise
*/
@Override
public PhysicalPlan getPlan(SqlNode sqlNode) throws ValidationException, RelConversionException, IOException {
SqlDropTable dropTableNode = ((SqlDropTable) sqlNode);
String originalTableName = dropTableNode.getName();
SchemaPlus defaultSchema = config.getConverter().getDefaultSchema();
List<String> tableSchema = dropTableNode.getSchema();
DrillConfig drillConfig = context.getConfig();
UserSession session = context.getSession();
AbstractSchema temporarySchema = resolveToTemporarySchema(tableSchema, defaultSchema, drillConfig);
boolean isTemporaryTable = session.isTemporaryTable(temporarySchema, drillConfig, originalTableName);
if (isTemporaryTable) {
session.removeTemporaryTable(temporarySchema, originalTableName, drillConfig);
} else {
AbstractSchema drillSchema = SchemaUtilites.resolveToMutableDrillSchema(defaultSchema, tableSchema);
Table tableToDrop = SqlHandlerUtil.getTableFromSchema(drillSchema, originalTableName);
if (tableToDrop == null || tableToDrop.getJdbcTableType() != Schema.TableType.TABLE) {
if (dropTableNode.checkTableExistence()) {
return DirectPlan.createDirectPlan(context, false, String.format("Table [%s] not found", originalTableName));
} else {
throw UserException.validationError().message("Table [%s] not found", originalTableName).build(logger);
}
}
SqlHandlerUtil.dropTableFromSchema(drillSchema, originalTableName);
}
String message = String.format("%s [%s] dropped", isTemporaryTable ? "Temporary table" : "Table", originalTableName);
logger.info(message);
return DirectPlan.createDirectPlan(context, true, message);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project drill by axbaretto.
the class InfoSchemaRecordGenerator method visitTables.
/**
* Visit the tables in the given schema. The
* @param schemaPath the path to the given schema
* @param schema the given schema
*/
public void visitTables(String schemaPath, SchemaPlus schema) {
final AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class);
final List<String> tableNames = Lists.newArrayList(schema.getTableNames());
for (Pair<String, ? extends Table> tableNameToTable : drillSchema.getTablesByNames(tableNames)) {
final String tableName = tableNameToTable.getKey();
final Table table = tableNameToTable.getValue();
final TableType tableType = table.getJdbcTableType();
// Visit the table, and if requested ...
if (shouldVisitTable(schemaPath, tableName, tableType) && visitTable(schemaPath, tableName, table)) {
// ... do for each of the table's fields.
final RelDataType tableRow = table.getRowType(new JavaTypeFactoryImpl(DRILL_REL_DATATYPE_SYSTEM));
for (RelDataTypeField field : tableRow.getFieldList()) {
if (shouldVisitColumn(schemaPath, tableName, field.getName())) {
visitField(schemaPath, tableName, field);
}
}
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project streamline by hortonworks.
the class TestCompilerUtils method sqlOverDummyTable.
public static CalciteState sqlOverDummyTable(String sql) throws RelConversionException, ValidationException, SqlParseException {
SchemaPlus schema = Frameworks.createRootSchema(true);
JavaTypeFactory typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory).field("ID", SqlTypeName.INTEGER).field("NAME", typeFactory.createType(String.class)).field("ADDR", typeFactory.createType(String.class)).build();
Table table = streamableTable.stream();
schema.add("FOO", table);
schema.add("BAR", table);
schema.add("MYPLUS", ScalarFunctionImpl.create(MyPlus.class, "eval"));
List<SqlOperatorTable> sqlOperatorTables = new ArrayList<>();
sqlOperatorTables.add(SqlStdOperatorTable.instance());
sqlOperatorTables.add(new CalciteCatalogReader(CalciteSchema.from(schema), false, Collections.<String>emptyList(), typeFactory));
SqlOperatorTable chainedSqlOperatorTable = new ChainedSqlOperatorTable(sqlOperatorTables);
FrameworkConfig config = Frameworks.newConfigBuilder().defaultSchema(schema).operatorTable(chainedSqlOperatorTable).build();
Planner planner = Frameworks.getPlanner(config);
SqlNode parse = planner.parse(sql);
SqlNode validate = planner.validate(parse);
RelNode tree = planner.convert(validate);
System.out.println(RelOptUtil.toString(tree, SqlExplainLevel.ALL_ATTRIBUTES));
return new CalciteState(schema, tree);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project streamline by hortonworks.
the class StreamlineSqlImpl method updateSchema.
private List<FieldInfo> updateSchema(SqlCreateTable n) {
CompilerUtil.TableBuilderInfo builder = new CompilerUtil.TableBuilderInfo(typeFactory);
List<FieldInfo> fields = new ArrayList<>();
for (ColumnDefinition col : n.fieldList()) {
builder.field(col.name(), col.type(), col.constraint());
RelDataType dataType = col.type().deriveType(typeFactory);
Class<?> javaType = (Class<?>) typeFactory.getJavaClass(dataType);
ColumnConstraint constraint = col.constraint();
boolean isPrimary = constraint != null && constraint instanceof ColumnConstraint.PrimaryKey;
fields.add(new FieldInfo(col.name(), javaType, isPrimary));
}
if (n.parallelism() != null) {
builder.parallelismHint(n.parallelism());
}
Table table = builder.build();
schema.add(n.tableName(), table);
return fields;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table in project beam by apache.
the class DataStoreReadWriteIT method testReadAllSupportedTypes.
@Test
public void testReadAllSupportedTypes() {
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new DataStoreV1TableProvider());
String projectId = options.getProject();
final Schema expectedSchema = Schema.builder().addNullableField("__key__", VARBINARY).addNullableField("boolean", BOOLEAN).addNullableField("datetime", DATETIME).addNullableField("floatingnumber", DOUBLE).addNullableField("integer", INT64).addNullableField("primitivearray", FieldType.array(STRING)).addNullableField("string", STRING).addNullableField("text", STRING).build();
String createTableStatement = "CREATE EXTERNAL TABLE TEST( \n" + " `__key__` VARBINARY, \n" + " `boolean` BOOLEAN, \n" + " `datetime` TIMESTAMP, \n" + // + " `embeddedentity` ROW(`property1` VARCHAR, `property2` BIGINT), \n"
" `floatingnumber` DOUBLE, \n" + " `integer` BIGINT, \n" + " `primitivearray` ARRAY<VARCHAR>, \n" + " `string` VARCHAR, \n" + " `text` VARCHAR" + ") \n" + "TYPE 'datastoreV1' \n" + "LOCATION '" + projectId + "/" + KIND_ALL_TYPES + "'";
sqlEnv.executeDdl(createTableStatement);
String selectTableStatement = "SELECT * FROM TEST";
PCollection<Row> output = BeamSqlRelUtils.toPCollection(readPipeline, sqlEnv.parseQuery(selectTableStatement));
assertThat(output.getSchema(), equalTo(expectedSchema));
PipelineResult.State state = readPipeline.run().waitUntilFinish(Duration.standardMinutes(5));
assertThat(state, equalTo(State.DONE));
}
Aggregations