use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlWriter in project flink by apache.
the class SqlCreateDatabase method unparse.
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("CREATE DATABASE");
if (isIfNotExists()) {
writer.keyword("IF NOT EXISTS");
}
databaseName.unparse(writer, leftPrec, rightPrec);
if (comment != null) {
writer.newlineAndIndent();
writer.keyword("COMMENT");
comment.unparse(writer, leftPrec, rightPrec);
}
if (this.propertyList.size() > 0) {
writer.keyword("WITH");
SqlWriter.Frame withFrame = writer.startList("(", ")");
for (SqlNode property : propertyList) {
printIndent(writer);
property.unparse(writer, leftPrec, rightPrec);
}
writer.newlineAndIndent();
writer.endList(withFrame);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlWriter in project flink by apache.
the class SqlCreateTable method unparse.
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("CREATE");
if (isTemporary()) {
writer.keyword("TEMPORARY");
}
writer.keyword("TABLE");
if (isIfNotExists()) {
writer.keyword("IF NOT EXISTS");
}
tableName.unparse(writer, leftPrec, rightPrec);
if (columnList.size() > 0 || tableConstraints.size() > 0 || watermark != null) {
SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.create("sds"), "(", ")");
for (SqlNode column : columnList) {
printIndent(writer);
column.unparse(writer, leftPrec, rightPrec);
}
if (tableConstraints.size() > 0) {
for (SqlTableConstraint constraint : tableConstraints) {
printIndent(writer);
constraint.unparse(writer, leftPrec, rightPrec);
}
}
if (watermark != null) {
printIndent(writer);
watermark.unparse(writer, leftPrec, rightPrec);
}
writer.newlineAndIndent();
writer.endList(frame);
}
if (comment != null) {
writer.newlineAndIndent();
writer.keyword("COMMENT");
comment.unparse(writer, leftPrec, rightPrec);
}
if (this.partitionKeyList.size() > 0) {
writer.newlineAndIndent();
writer.keyword("PARTITIONED BY");
SqlWriter.Frame partitionedByFrame = writer.startList("(", ")");
this.partitionKeyList.unparse(writer, leftPrec, rightPrec);
writer.endList(partitionedByFrame);
writer.newlineAndIndent();
}
if (this.propertyList.size() > 0) {
writer.keyword("WITH");
SqlWriter.Frame withFrame = writer.startList("(", ")");
for (SqlNode property : propertyList) {
printIndent(writer);
property.unparse(writer, leftPrec, rightPrec);
}
writer.newlineAndIndent();
writer.endList(withFrame);
}
if (this.tableLike != null) {
writer.newlineAndIndent();
this.tableLike.unparse(writer, leftPrec, rightPrec);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlWriter in project beam by apache.
the class BeamBigQuerySqlDialect method unparseCall.
@Override
public void unparseCall(final SqlWriter writer, final SqlCall call, final int leftPrec, final int rightPrec) {
switch(call.getKind()) {
case ROW:
final SqlWriter.Frame structFrame = writer.startFunCall("STRUCT");
for (SqlNode operand : call.getOperandList()) {
writer.sep(",");
operand.unparse(writer, leftPrec, rightPrec);
}
writer.endFunCall(structFrame);
break;
case OTHER_FUNCTION:
String funName = call.getOperator().getName();
if (DOUBLE_LITERAL_WRAPPERS.containsKey(funName)) {
// self-designed function dealing with the unparsing of ZetaSQL DOUBLE positive
// infinity, negative infinity and NaN
unparseDoubleLiteralWrapperFunction(writer, funName);
break;
} else if (NUMERIC_LITERAL_WRAPPER.equals(funName)) {
// self-designed function dealing with the unparsing of ZetaSQL NUMERIC literal
unparseNumericLiteralWrapperFunction(writer, call, leftPrec, rightPrec);
break;
} else if (FUNCTIONS_USING_INTERVAL.contains(funName)) {
unparseFunctionsUsingInterval(writer, call, leftPrec, rightPrec);
break;
} else if (EXTRACT_FUNCTIONS.containsKey(funName)) {
unparseExtractFunctions(writer, call, leftPrec, rightPrec);
break;
} else if (IN_ARRAY_OPERATOR.equals(funName)) {
unparseInArrayOperator(writer, call, leftPrec, rightPrec);
break;
}
// fall through
default:
super.unparseCall(writer, call, leftPrec, rightPrec);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlWriter in project druid by druid-io.
the class DruidSqlInsert method unparse.
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
super.unparse(writer, leftPrec, rightPrec);
writer.keyword("PARTITIONED BY");
writer.keyword(partitionedByStringForUnparse);
if (getClusteredBy() != null) {
writer.keyword("CLUSTERED BY");
SqlWriter.Frame frame = writer.startList("", "");
for (SqlNode clusterByOpts : getClusteredBy().getList()) {
clusterByOpts.unparse(writer, leftPrec, rightPrec);
}
writer.endList(frame);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlWriter in project hazelcast by hazelcast.
the class SqlCreateIndex method unparse.
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("CREATE INDEX");
if (ifNotExists) {
writer.keyword("IF NOT EXISTS");
}
name.unparse(writer, leftPrec, rightPrec);
writer.keyword("ON");
mapName.unparse(writer, leftPrec, rightPrec);
if (columns.size() > 0) {
SqlWriter.Frame frame = writer.startList("(", ")");
for (SqlNode column : columns) {
printIndent(writer);
column.unparse(writer, 0, 0);
}
writer.newlineAndIndent();
writer.endList(frame);
}
writer.newlineAndIndent();
writer.keyword("TYPE");
type.unparse(writer, leftPrec, rightPrec);
if (options.size() > 0) {
writer.newlineAndIndent();
writer.keyword("OPTIONS");
SqlWriter.Frame withFrame = writer.startList("(", ")");
for (SqlNode property : options) {
printIndent(writer);
property.unparse(writer, leftPrec, rightPrec);
}
writer.newlineAndIndent();
writer.endList(withFrame);
}
}
Aggregations