use of org.apache.flink.table.delegation.Parser in project flink by apache.
the class HiveDialectITCase method testParseCommand.
@Test
public void testParseCommand() {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
Parser parser = tableEnvInternal.getParser();
// hive dialect should use HiveParser
assertTrue(parser instanceof HiveParser);
assertThat(parser.parse("HELP").get(0), instanceOf(HelpOperation.class));
assertThat(parser.parse("clear").get(0), instanceOf(ClearOperation.class));
assertThat(parser.parse("SET").get(0), instanceOf(SetOperation.class));
assertThat(parser.parse("ResET").get(0), instanceOf(ResetOperation.class));
assertThat(parser.parse("Exit").get(0), instanceOf(QuitOperation.class));
}
use of org.apache.flink.table.delegation.Parser in project flink by apache.
the class HiveDialectITCase method testPluggableParser.
@Test
public void testPluggableParser() {
TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
Parser parser = tableEnvInternal.getParser();
// hive dialect should use HiveParser
assertTrue(parser instanceof HiveParser);
// execute some sql and verify the parser instance is reused
tableEnvInternal.executeSql("show databases");
assertSame(parser, tableEnvInternal.getParser());
// switching dialect will result in a new parser
tableEnvInternal.getConfig().setSqlDialect(SqlDialect.DEFAULT);
assertNotEquals(parser.getClass().getName(), tableEnvInternal.getParser().getClass().getName());
}
use of org.apache.flink.table.delegation.Parser in project flink by apache.
the class HiveDialectITCase method testCreateTable.
@Test
public void testCreateTable() throws Exception {
String location = warehouse + "/external_location";
tableEnv.executeSql(String.format("create external table tbl1 (d decimal(10,0),ts timestamp) partitioned by (p string) location '%s' tblproperties('k1'='v1')", location));
Table hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl1"));
assertEquals(TableType.EXTERNAL_TABLE.toString(), hiveTable.getTableType());
assertEquals(1, hiveTable.getPartitionKeysSize());
assertEquals(location, locationPath(hiveTable.getSd().getLocation()));
assertEquals("v1", hiveTable.getParameters().get("k1"));
assertFalse(hiveTable.getParameters().containsKey(SqlCreateHiveTable.TABLE_LOCATION_URI));
tableEnv.executeSql("create table tbl2 (s struct<ts:timestamp,bin:binary>) stored as orc");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl2"));
assertEquals(TableType.MANAGED_TABLE.toString(), hiveTable.getTableType());
assertEquals(OrcSerde.class.getName(), hiveTable.getSd().getSerdeInfo().getSerializationLib());
assertEquals(OrcInputFormat.class.getName(), hiveTable.getSd().getInputFormat());
assertEquals(OrcOutputFormat.class.getName(), hiveTable.getSd().getOutputFormat());
tableEnv.executeSql("create table tbl3 (m map<timestamp,binary>) partitioned by (p1 bigint,p2 tinyint) " + "row format serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe'");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl3"));
assertEquals(2, hiveTable.getPartitionKeysSize());
assertEquals(LazyBinarySerDe.class.getName(), hiveTable.getSd().getSerdeInfo().getSerializationLib());
tableEnv.executeSql("create table tbl4 (x int,y smallint) row format delimited fields terminated by '|' lines terminated by '\n'");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl4"));
assertEquals("|", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.FIELD_DELIM));
assertEquals("|", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.SERIALIZATION_FORMAT));
assertEquals("\n", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.LINE_DELIM));
tableEnv.executeSql("create table tbl5 (m map<bigint,string>) row format delimited collection items terminated by ';' " + "map keys terminated by ':'");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl5"));
assertEquals(";", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.COLLECTION_DELIM));
assertEquals(":", hiveTable.getSd().getSerdeInfo().getParameters().get(serdeConstants.MAPKEY_DELIM));
int createdTimeForTableExists = hiveTable.getCreateTime();
tableEnv.executeSql("create table if not exists tbl5 (m map<bigint,string>)");
hiveTable = hiveCatalog.getHiveTable(new ObjectPath("default", "tbl5"));
assertEquals(createdTimeForTableExists, hiveTable.getCreateTime());
// test describe table
Parser parser = ((TableEnvironmentInternal) tableEnv).getParser();
DescribeTableOperation operation = (DescribeTableOperation) parser.parse("desc tbl1").get(0);
assertFalse(operation.isExtended());
assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl1"), operation.getSqlIdentifier());
operation = (DescribeTableOperation) parser.parse("describe default.tbl2").get(0);
assertFalse(operation.isExtended());
assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl2"), operation.getSqlIdentifier());
operation = (DescribeTableOperation) parser.parse("describe extended tbl3").get(0);
assertTrue(operation.isExtended());
assertEquals(ObjectIdentifier.of(hiveCatalog.getName(), "default", "tbl3"), operation.getSqlIdentifier());
}
use of org.apache.flink.table.delegation.Parser in project zeppelin by apache.
the class Flink114Shims method parseSql.
/**
* Parse it via flink SqlParser first, then fallback to regular expression matching.
*
* @param tableEnv
* @param stmt
* @return
*/
@Override
public Optional<SqlCommandParser.SqlCommandCall> parseSql(Object tableEnv, String stmt) {
Parser sqlParser = ((TableEnvironmentInternal) tableEnv).getParser();
SqlCommandCall sqlCommandCall = null;
try {
// parse statement via regex matching first
Optional<SqlCommandCall> callOpt = parseByRegexMatching(stmt);
if (callOpt.isPresent()) {
sqlCommandCall = callOpt.get();
} else {
sqlCommandCall = parseBySqlParser(sqlParser, stmt);
}
} catch (Exception e) {
return Optional.empty();
}
return Optional.of(sqlCommandCall);
}
use of org.apache.flink.table.delegation.Parser in project zeppelin by apache.
the class Flink113Shims method parseSql.
/**
* Parse it via flink SqlParser first, then fallback to regular expression matching.
*
* @param tableEnv
* @param stmt
* @return
*/
@Override
public Optional<SqlCommandParser.SqlCommandCall> parseSql(Object tableEnv, String stmt) {
Parser sqlParser = ((TableEnvironmentInternal) tableEnv).getParser();
SqlCommandCall sqlCommandCall = null;
try {
// parse statement via regex matching first
Optional<SqlCommandCall> callOpt = parseByRegexMatching(stmt);
if (callOpt.isPresent()) {
sqlCommandCall = callOpt.get();
} else {
sqlCommandCall = parseBySqlParser(sqlParser, stmt);
}
} catch (Exception e) {
return Optional.empty();
}
return Optional.of(sqlCommandCall);
}
Aggregations