use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlCharStringLiteral in project flink by apache.
the class SqlAddHivePartitions method toProps.
private static List<SqlNodeList> toProps(List<SqlCharStringLiteral> partLocations) {
List<SqlNodeList> res = new ArrayList<>(partLocations.size());
for (SqlCharStringLiteral partLocation : partLocations) {
SqlNodeList prop = null;
if (partLocation != null) {
prop = new SqlNodeList(partLocation.getParserPosition());
prop.add(HiveDDLUtils.toTableOption(SqlCreateHiveTable.TABLE_LOCATION_URI, partLocation, partLocation.getParserPosition()));
}
res.add(prop);
}
return res;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlCharStringLiteral in project flink by apache.
the class SqlCreateHiveTable method unparseRowFormat.
private void unparseRowFormat(SqlWriter writer, int leftPrec, int rightPrec) {
if (rowFormat == null) {
return;
}
writer.newlineAndIndent();
writer.keyword("ROW FORMAT");
if (rowFormat.serdeClass != null) {
writer.keyword("SERDE");
rowFormat.serdeClass.unparse(writer, leftPrec, rightPrec);
if (rowFormat.serdeProps != null) {
writer.keyword("WITH SERDEPROPERTIES");
unparsePropList(rowFormat.serdeProps, writer, leftPrec, rightPrec);
}
} else {
writer.keyword("DELIMITED");
SqlCharStringLiteral fieldDelim = rowFormat.delimitPropToValue.get(HiveTableRowFormat.FIELD_DELIM);
SqlCharStringLiteral escape = rowFormat.delimitPropToValue.get(HiveTableRowFormat.ESCAPE_CHAR);
if (fieldDelim != null) {
writer.newlineAndIndent();
writer.print(" ");
writer.keyword("FIELDS TERMINATED BY");
fieldDelim.unparse(writer, leftPrec, rightPrec);
if (escape != null) {
writer.keyword("ESCAPED BY");
escape.unparse(writer, leftPrec, rightPrec);
}
}
SqlCharStringLiteral collectionDelim = rowFormat.delimitPropToValue.get(HiveTableRowFormat.COLLECTION_DELIM);
if (collectionDelim != null) {
writer.newlineAndIndent();
writer.print(" ");
writer.keyword("COLLECTION ITEMS TERMINATED BY");
collectionDelim.unparse(writer, leftPrec, rightPrec);
}
SqlCharStringLiteral mapKeyDelim = rowFormat.delimitPropToValue.get(HiveTableRowFormat.MAPKEY_DELIM);
if (mapKeyDelim != null) {
writer.newlineAndIndent();
writer.print(" ");
writer.keyword("MAP KEYS TERMINATED BY");
mapKeyDelim.unparse(writer, leftPrec, rightPrec);
}
SqlCharStringLiteral lineDelim = rowFormat.delimitPropToValue.get(HiveTableRowFormat.LINE_DELIM);
if (lineDelim != null) {
writer.newlineAndIndent();
writer.print(" ");
writer.keyword("LINES TERMINATED BY");
lineDelim.unparse(writer, leftPrec, rightPrec);
}
SqlCharStringLiteral nullAs = rowFormat.delimitPropToValue.get(HiveTableRowFormat.SERIALIZATION_NULL_FORMAT);
if (nullAs != null) {
writer.newlineAndIndent();
writer.print(" ");
writer.keyword("NULL DEFINED AS");
nullAs.unparse(writer, leftPrec, rightPrec);
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlCharStringLiteral in project calcite by apache.
the class SqlLiteralChainOperator method unparse.
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
final SqlWriter.Frame frame = writer.startList("", "");
SqlCollation collation = null;
for (Ord<SqlNode> operand : Ord.zip(call.getOperandList())) {
SqlLiteral rand = (SqlLiteral) operand.e;
if (operand.i > 0) {
// SQL:2003 says there must be a newline between string
// fragments.
writer.newlineAndIndent();
}
if (rand instanceof SqlCharStringLiteral) {
NlsString nls = ((SqlCharStringLiteral) rand).getNlsString();
if (operand.i == 0) {
collation = nls.getCollation();
// print with prefix
writer.literal(nls.asSql(true, false));
} else {
// print without prefix
writer.literal(nls.asSql(false, false));
}
} else if (operand.i == 0) {
// print with prefix
rand.unparse(writer, leftPrec, rightPrec);
} else {
// print without prefix
if (rand.getTypeName() == SqlTypeName.BINARY) {
BitString bs = (BitString) rand.getValue();
writer.literal("'" + bs.toHexString() + "'");
} else {
writer.literal("'" + rand.toValue() + "'");
}
}
}
if (collation != null) {
collation.unparse(writer, 0, 0);
}
writer.endList(frame);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlCharStringLiteral in project drill by apache.
the class ShowSchemasHandler method rewrite.
/**
* Rewrite the parse tree as SELECT ... FROM INFORMATION_SCHEMA.SCHEMATA ...
*/
@Override
public SqlNode rewrite(SqlNode sqlNode) throws ForemanSetupException {
SqlShowSchemas node = unwrap(sqlNode, SqlShowSchemas.class);
List<SqlNode> selectList = Collections.singletonList(new SqlIdentifier(SCHS_COL_SCHEMA_NAME, SqlParserPos.ZERO));
SqlNode fromClause = new SqlIdentifier(Arrays.asList(IS_SCHEMA_NAME, InfoSchemaTableType.SCHEMATA.name()), SqlParserPos.ZERO);
SqlNode where = null;
SqlNode likePattern = node.getLikePattern();
if (likePattern != null) {
SqlNode column = new SqlIdentifier(SCHS_COL_SCHEMA_NAME, SqlParserPos.ZERO);
// schema names are case insensitive, wrap column in lower function, pattern to lower case
if (likePattern instanceof SqlCharStringLiteral) {
NlsString conditionString = ((SqlCharStringLiteral) likePattern).getNlsString();
likePattern = SqlCharStringLiteral.createCharString(conditionString.getValue().toLowerCase(), conditionString.getCharsetName(), likePattern.getParserPosition());
column = SqlStdOperatorTable.LOWER.createCall(SqlParserPos.ZERO, column);
}
where = DrillParserUtil.createCondition(column, SqlStdOperatorTable.LIKE, likePattern);
} else if (node.getWhereClause() != null) {
where = node.getWhereClause();
}
return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(selectList, SqlParserPos.ZERO), fromClause, where, null, null, null, null, null, null);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlCharStringLiteral in project drill by apache.
the class DropFunctionHandler method getPlan.
/**
* Unregisters UDFs dynamically. Process consists of several steps:
* <ol>
* <li>Registering jar in jar registry to ensure that several jars with the same name is not being unregistered.</li>
* <li>Starts remote unregistration process, gets list of all jars and excludes jar to be deleted.</li>
* <li>Signals drill bits to start local unregistration process.</li>
* <li>Removes source and binary jars from registry area.</li>
* </ol>
*
* UDFs unregistration is allowed only if dynamic UDFs support is enabled.
* Only jars registered dynamically can be unregistered,
* built-in functions loaded at start up are not allowed to be unregistered.
*
* Limitation: before jar unregistration make sure no one is using functions from this jar.
* There is no guarantee that running queries will finish successfully or give correct result.
*
* @return - Single row indicating list of unregistered UDFs, raise exception otherwise
*/
@Override
public PhysicalPlan getPlan(SqlNode sqlNode) throws ForemanSetupException, IOException {
if (!context.getOption(ExecConstants.DYNAMIC_UDF_SUPPORT_ENABLED).bool_val) {
throw UserException.validationError().message("Dynamic UDFs support is disabled.").build(logger);
}
SqlDropFunction node = unwrap(sqlNode, SqlDropFunction.class);
String jarName = ((SqlCharStringLiteral) node.getJar()).toValue();
RemoteFunctionRegistry remoteFunctionRegistry = context.getRemoteFunctionRegistry();
boolean inProgress = false;
try {
final String action = remoteFunctionRegistry.addToJars(jarName, RemoteFunctionRegistry.Action.UNREGISTRATION);
if (!(inProgress = action == null)) {
return DirectPlan.createDirectPlan(context, false, String.format("Jar with %s name is used. Action: %s", jarName, action));
}
Jar deletedJar = unregister(jarName, remoteFunctionRegistry);
if (deletedJar == null) {
return DirectPlan.createDirectPlan(context, false, String.format("Jar %s is not registered in remote registry", jarName));
}
remoteFunctionRegistry.submitForUnregistration(jarName);
removeJarFromArea(jarName, remoteFunctionRegistry.getFs(), remoteFunctionRegistry.getRegistryArea());
removeJarFromArea(JarUtil.getSourceName(jarName), remoteFunctionRegistry.getFs(), remoteFunctionRegistry.getRegistryArea());
return DirectPlan.createDirectPlan(context, true, String.format("The following UDFs in jar %s have been unregistered:\n%s", jarName, deletedJar.getFunctionSignatureList()));
} catch (Exception e) {
logger.error("Error during UDF unregistration", e);
return DirectPlan.createDirectPlan(context, false, e.getMessage());
} finally {
if (inProgress) {
remoteFunctionRegistry.finishUnregistration(jarName);
remoteFunctionRegistry.removeFromJars(jarName);
}
}
}
Aggregations