use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateView.
private Operation convertCreateView(HiveParserASTNode ast) throws SemanticException {
String[] qualTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String dbDotTable = HiveParserBaseSemanticAnalyzer.getDotName(qualTabName);
List<FieldSchema> cols = null;
boolean ifNotExists = false;
boolean isAlterViewAs = false;
String comment = null;
HiveParserASTNode selectStmt = null;
Map<String, String> tblProps = null;
boolean isMaterialized = ast.getToken().getType() == HiveASTParser.TOK_CREATE_MATERIALIZED_VIEW;
if (isMaterialized) {
handleUnsupportedOperation("MATERIALIZED VIEW is not supported");
}
HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine());
int numCh = ast.getChildCount();
for (int num = 1; num < numCh; num++) {
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
if (storageFormat.fillStorageFormat(child)) {
handleUnsupportedOperation("FILE FORMAT for view is not supported");
}
switch(child.getToken().getType()) {
case HiveASTParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveASTParser.TOK_REWRITE_ENABLED:
handleUnsupportedOperation("MATERIALIZED VIEW REWRITE is not supported");
break;
case HiveASTParser.TOK_ORREPLACE:
handleUnsupportedOperation("CREATE OR REPLACE VIEW is not supported");
break;
case HiveASTParser.TOK_QUERY:
selectStmt = child;
break;
case HiveASTParser.TOK_TABCOLNAME:
cols = HiveParserBaseSemanticAnalyzer.getColumns(child);
break;
case HiveASTParser.TOK_TABLECOMMENT:
comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
break;
case HiveASTParser.TOK_TABLEPROPERTIES:
tblProps = getProps((HiveParserASTNode) child.getChild(0));
break;
case HiveASTParser.TOK_TABLEROWFORMAT:
handleUnsupportedOperation("ROW FORMAT for view is not supported");
break;
case HiveASTParser.TOK_TABLESERIALIZER:
handleUnsupportedOperation("SERDE for view is not supported");
break;
case HiveASTParser.TOK_TABLELOCATION:
handleUnsupportedOperation("LOCATION for view is not supported");
break;
case HiveASTParser.TOK_VIEWPARTCOLS:
handleUnsupportedOperation("PARTITION COLUMN for view is not supported");
break;
default:
throw new ValidationException("Unknown AST node for CREATE/ALTER VIEW: " + child);
}
}
if (ast.getToken().getType() == HiveASTParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
isAlterViewAs = true;
}
queryState.setCommandType(HiveOperation.CREATEVIEW);
HiveParserCreateViewInfo createViewInfo = new HiveParserCreateViewInfo(dbDotTable, cols, selectStmt);
hiveParser.analyzeCreateView(createViewInfo, context, queryState, hiveShim);
ObjectIdentifier viewIdentifier = parseObjectIdentifier(createViewInfo.getCompoundName());
TableSchema schema = HiveTableUtil.createTableSchema(createViewInfo.getSchema(), Collections.emptyList(), Collections.emptySet(), null);
Map<String, String> props = new HashMap<>();
if (isAlterViewAs) {
CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
props.putAll(baseTable.getOptions());
comment = baseTable.getComment();
} else {
if (tblProps != null) {
props.putAll(tblProps);
}
}
CatalogView catalogView = new CatalogViewImpl(createViewInfo.getOriginalText(), createViewInfo.getExpandedText(), schema, props, comment);
if (isAlterViewAs) {
return new AlterViewAsOperation(viewIdentifier, catalogView);
} else {
return new CreateViewOperation(viewIdentifier, catalogView, ifNotExists, false);
}
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method getPartSpec.
public static HashMap<String, String> getPartSpec(HiveParserASTNode partspec) {
if (partspec == null) {
return null;
}
HashMap<String, String> partSpec = new LinkedHashMap<>();
for (int i = 0; i < partspec.getChildCount(); ++i) {
HiveParserASTNode partVal = (HiveParserASTNode) partspec.getChild(i);
String key = partVal.getChild(0).getText();
String val = null;
if (partVal.getChildCount() == 3) {
val = HiveParserBaseSemanticAnalyzer.stripQuotes(partVal.getChild(2).getText());
} else if (partVal.getChildCount() == 2) {
val = HiveParserBaseSemanticAnalyzer.stripQuotes(partVal.getChild(1).getText());
}
partSpec.put(key.toLowerCase(), val);
}
return partSpec;
}
use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.
the class HiveParserErrorMsg method renderOrigin.
private static void renderOrigin(StringBuilder sb, HiveParserASTNodeOrigin origin) {
while (origin != null) {
sb.append(" in definition of ");
sb.append(origin.getObjectType());
sb.append(" ");
sb.append(origin.getObjectName());
sb.append(" [");
sb.append(LINE_SEP);
sb.append(origin.getObjectDefinition());
sb.append(LINE_SEP);
sb.append("] used as ");
sb.append(origin.getUsageAlias());
sb.append(" at ");
HiveParserASTNode usageNode = origin.getUsageNode();
renderPosition(sb, usageNode);
origin = usageNode.getOrigin();
}
}
Aggregations