use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class KafkaConnectorOptionsUtil method parseSpecificOffsets.
/**
* Parses SpecificOffsets String to Map.
*
* <p>SpecificOffsets String format was given as following:
*
* <pre>
* scan.startup.specific-offsets = partition:0,offset:42;partition:1,offset:300
* </pre>
*
* @return SpecificOffsets with Map format, key is partition, and value is offset
*/
public static Map<Integer, Long> parseSpecificOffsets(String specificOffsetsStr, String optionKey) {
final Map<Integer, Long> offsetMap = new HashMap<>();
final String[] pairs = specificOffsetsStr.split(";");
final String validationExceptionMessage = String.format("Invalid properties '%s' should follow the format " + "'partition:0,offset:42;partition:1,offset:300', but is '%s'.", optionKey, specificOffsetsStr);
if (pairs.length == 0) {
throw new ValidationException(validationExceptionMessage);
}
for (String pair : pairs) {
if (null == pair || pair.length() == 0 || !pair.contains(",")) {
throw new ValidationException(validationExceptionMessage);
}
final String[] kv = pair.split(",");
if (kv.length != 2 || !kv[0].startsWith(PARTITION + ':') || !kv[1].startsWith(OFFSET + ':')) {
throw new ValidationException(validationExceptionMessage);
}
String partitionValue = kv[0].substring(kv[0].indexOf(":") + 1);
String offsetValue = kv[1].substring(kv[1].indexOf(":") + 1);
try {
final Integer partition = Integer.valueOf(partitionValue);
final Long offset = Long.valueOf(offsetValue);
offsetMap.put(partition, offset);
} catch (NumberFormatException e) {
throw new ValidationException(validationExceptionMessage, e);
}
}
return offsetMap;
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTable.
private Operation convertAlterTable(HiveParserASTNode input) throws SemanticException {
Operation operation = null;
HiveParserASTNode ast = (HiveParserASTNode) input.getChild(1);
String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) input.getChild(0));
String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
HashMap<String, String> partSpec = null;
HiveParserASTNode partSpecNode = (HiveParserASTNode) input.getChild(2);
if (partSpecNode != null) {
partSpec = getPartSpec(partSpecNode);
}
CatalogBaseTable alteredTable = getAlteredTable(tableName, false);
switch(ast.getType()) {
case HiveASTParser.TOK_ALTERTABLE_RENAME:
operation = convertAlterTableRename(tableName, ast, false);
break;
case HiveASTParser.TOK_ALTERTABLE_ADDCOLS:
operation = convertAlterTableModifyCols(alteredTable, tableName, ast, false);
break;
case HiveASTParser.TOK_ALTERTABLE_REPLACECOLS:
operation = convertAlterTableModifyCols(alteredTable, tableName, ast, true);
break;
case HiveASTParser.TOK_ALTERTABLE_RENAMECOL:
operation = convertAlterTableChangeCol(alteredTable, qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_ADDPARTS:
operation = convertAlterTableAddParts(qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_DROPPARTS:
operation = convertAlterTableDropParts(qualified, ast);
break;
case HiveASTParser.TOK_ALTERTABLE_PROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, false);
break;
case HiveASTParser.TOK_ALTERTABLE_DROPPROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, true);
break;
case HiveASTParser.TOK_ALTERTABLE_UPDATESTATS:
operation = convertAlterTableProps(alteredTable, tableName, partSpec, ast, false, false);
break;
case HiveASTParser.TOK_ALTERTABLE_FILEFORMAT:
operation = convertAlterTableFileFormat(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_LOCATION:
operation = convertAlterTableLocation(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_SERIALIZER:
operation = convertAlterTableSerde(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_SERDEPROPERTIES:
operation = convertAlterTableSerdeProps(alteredTable, ast, tableName, partSpec);
break;
case HiveASTParser.TOK_ALTERTABLE_TOUCH:
case HiveASTParser.TOK_ALTERTABLE_ARCHIVE:
case HiveASTParser.TOK_ALTERTABLE_UNARCHIVE:
case HiveASTParser.TOK_ALTERTABLE_PARTCOLTYPE:
case HiveASTParser.TOK_ALTERTABLE_SKEWED:
case HiveASTParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
case HiveASTParser.TOK_ALTERTABLE_MERGEFILES:
case HiveASTParser.TOK_ALTERTABLE_RENAMEPART:
case HiveASTParser.TOK_ALTERTABLE_SKEWED_LOCATION:
case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
case HiveASTParser.TOK_ALTERTABLE_CLUSTER_SORT:
case HiveASTParser.TOK_ALTERTABLE_COMPACT:
case HiveASTParser.TOK_ALTERTABLE_UPDATECOLSTATS:
case HiveASTParser.TOK_ALTERTABLE_DROPCONSTRAINT:
case HiveASTParser.TOK_ALTERTABLE_ADDCONSTRAINT:
handleUnsupportedOperation(ast);
break;
default:
throw new ValidationException("Unknown AST node for ALTER TABLE: " + ast);
}
return operation;
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterView.
private Operation convertAlterView(HiveParserASTNode ast) throws SemanticException {
Operation operation = null;
String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
CatalogBaseTable alteredTable = getAlteredTable(tableName, true);
if (ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
// alter view as
operation = convertCreateView(ast);
} else {
ast = (HiveParserASTNode) ast.getChild(1);
switch(ast.getType()) {
case HiveASTParser.TOK_ALTERVIEW_PROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, false);
break;
case HiveASTParser.TOK_ALTERVIEW_DROPPROPERTIES:
operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, true);
break;
case HiveASTParser.TOK_ALTERVIEW_RENAME:
operation = convertAlterTableRename(tableName, ast, true);
break;
case HiveASTParser.TOK_ALTERVIEW_ADDPARTS:
case HiveASTParser.TOK_ALTERVIEW_DROPPARTS:
handleUnsupportedOperation("ADD/DROP PARTITION for view is not supported");
break;
default:
throw new ValidationException("Unknown AST node for ALTER VIEW: " + ast);
}
}
return operation;
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method getTypeName.
public static String getTypeName(HiveParserASTNode node) throws SemanticException {
int token = node.getType();
String typeName;
// datetime type isn't currently supported
if (token == HiveASTParser.TOK_DATETIME) {
throw new ValidationException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
}
switch(token) {
case HiveASTParser.TOK_CHAR:
CharTypeInfo charTypeInfo = HiveASTParseUtils.getCharTypeInfo(node);
typeName = charTypeInfo.getQualifiedName();
break;
case HiveASTParser.TOK_VARCHAR:
VarcharTypeInfo varcharTypeInfo = HiveASTParseUtils.getVarcharTypeInfo(node);
typeName = varcharTypeInfo.getQualifiedName();
break;
case HiveASTParser.TOK_DECIMAL:
DecimalTypeInfo decTypeInfo = HiveASTParseUtils.getDecimalTypeTypeInfo(node);
typeName = decTypeInfo.getQualifiedName();
break;
default:
typeName = TokenToTypeName.get(token);
}
return typeName;
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertCreateFunction.
private Operation convertCreateFunction(HiveParserASTNode ast) {
// ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
String functionName = ast.getChild(0).getText().toLowerCase();
boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveASTParser.TOK_TEMPORARY) != null);
String className = HiveParserBaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
// Temp functions are not allowed to have qualified names.
if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
// belong to a catalog/db
throw new ValidationException("Temporary function cannot be created with a qualified name.");
}
if (isTemporaryFunction) {
FunctionDefinition funcDefinition = funcDefFactory.createFunctionDefinition(functionName, new CatalogFunctionImpl(className, FunctionLanguage.JAVA));
return new CreateTempSystemFunctionOperation(functionName, false, funcDefinition);
} else {
ObjectIdentifier identifier = parseObjectIdentifier(functionName);
CatalogFunction catalogFunction = new CatalogFunctionImpl(className, FunctionLanguage.JAVA);
return new CreateCatalogFunctionOperation(identifier, catalogFunction, false, false);
}
}
Aggregations