use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project hive by apache.
the class HiveCalciteUtil method createUDTFForSetOp.
public static HiveTableFunctionScan createUDTFForSetOp(RelOptCluster cluster, RelNode input) throws SemanticException {
RelTraitSet traitSet = TraitsUtil.getDefaultTraitSet(cluster);
List<RexNode> originalInputRefs = Lists.transform(input.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() {
@Override
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
ImmutableList.Builder<RelDataType> argTypeBldr = ImmutableList.<RelDataType>builder();
for (int i = 0; i < originalInputRefs.size(); i++) {
argTypeBldr.add(originalInputRefs.get(i).getType());
}
RelDataType retType = input.getRowType();
String funcName = "replicate_rows";
FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);
SqlOperator calciteOp = SqlFunctionConverter.getCalciteOperator(funcName, fi.getGenericUDTF(), argTypeBldr.build(), retType);
// Hive UDTF only has a single input
List<RelNode> list = new ArrayList<>();
list.add(input);
RexNode rexNode = cluster.getRexBuilder().makeCall(calciteOp, originalInputRefs);
return HiveTableFunctionScan.create(cluster, traitSet, list, rexNode, null, retType, null);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project hive by apache.
the class DruidSqlOperatorConverter method getDefaultMap.
public static final Map<SqlOperator, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter> getDefaultMap() {
if (druidOperatorMap == null) {
druidOperatorMap = new HashMap<SqlOperator, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>();
DruidQuery.DEFAULT_OPERATORS_LIST.stream().forEach(op -> druidOperatorMap.put(op.calciteOperator(), op));
// Override Hive specific operators
druidOperatorMap.putAll(Maps.asMap(HiveFloorDate.ALL_FUNCTIONS, (Function<SqlFunction, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>) input -> new FloorOperatorConversion()));
druidOperatorMap.putAll(Maps.asMap(HiveExtractDate.ALL_FUNCTIONS, (Function<SqlFunction, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>) input -> new ExtractOperatorConversion()));
druidOperatorMap.put(HiveConcat.INSTANCE, new DirectOperatorConversion(HiveConcat.INSTANCE, "concat"));
druidOperatorMap.put(SqlStdOperatorTable.SUBSTRING, new DruidSqlOperatorConverter.DruidSubstringOperatorConversion());
druidOperatorMap.put(SqlStdOperatorTable.IS_NULL, new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NULL, "isnull"));
druidOperatorMap.put(SqlStdOperatorTable.IS_NOT_NULL, new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "notnull"));
druidOperatorMap.put(HiveTruncSqlOperator.INSTANCE, new DruidDateTruncOperatorConversion());
druidOperatorMap.put(HiveToDateSqlOperator.INSTANCE, new DruidToDateOperatorConversion());
druidOperatorMap.put(HiveFromUnixTimeSqlOperator.INSTANCE, new DruidFormUnixTimeOperatorConversion());
druidOperatorMap.put(HiveToUnixTimestampSqlOperator.INSTANCE, new DruidUnixTimestampOperatorConversion());
druidOperatorMap.put(HiveDateAddSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(1, HiveDateAddSqlOperator.INSTANCE));
druidOperatorMap.put(HiveDateSubSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(-1, HiveDateSubSqlOperator.INSTANCE));
}
return druidOperatorMap;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project beam by apache.
the class CEPUtils method getRegexFromPattern.
/**
* Recursively construct a regular expression from a {@code RexNode}.
*/
public static String getRegexFromPattern(RexNode call) {
if (call.getClass() == RexLiteral.class) {
return ((RexLiteral) call).getValueAs(String.class);
} else {
RexCall opr = (RexCall) call;
SqlOperator operator = opr.getOperator();
List<RexNode> operands = opr.getOperands();
if (operator.getKind() == SqlKind.PATTERN_QUANTIFIER) {
String p = ((RexLiteral) operands.get(0)).getValueAs(String.class);
int start = ((RexLiteral) operands.get(1)).getValueAs(Integer.class);
int end = ((RexLiteral) operands.get(2)).getValueAs(Integer.class);
boolean isReluctant = ((RexLiteral) operands.get(3)).getValueAs(Boolean.class);
Quantifier quantifier = getQuantifier(start, end, isReluctant);
return p + quantifier.toString();
}
return getRegexFromPattern(opr.getOperands().get(0)) + getRegexFromPattern(opr.getOperands().get(1));
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project hazelcast by hazelcast.
the class HazelcastSqlOperatorTableTest method testOperandTypeChecker.
/**
* Make sure that all our operators either define the top-level operand checker that overrides that call binding,
* or confirm explicitly that they override the binding manually.
*/
@Test
public void testOperandTypeChecker() {
for (SqlOperator operator : HazelcastSqlOperatorTable.instance().getOperatorList()) {
boolean valid = operator instanceof HazelcastOperandTypeCheckerAware || operator instanceof HazelcastTableFunction || operator instanceof HazelcastWindowTableFunction || operator instanceof HazelcastCaseOperator || operator == HazelcastSqlOperatorTable.ARGUMENT_ASSIGNMENT;
assertTrue("Operator must implement one of classes from " + HazelcastFunction.class.getPackage().toString() + ": " + operator.getClass().getSimpleName(), valid);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlOperator in project hazelcast by hazelcast.
the class HazelcastSqlOperatorTableTest method testNoOverride.
/**
* Make sure there are no overrides for operators defined in the operator table.
*/
@Test
public void testNoOverride() {
Map<BiTuple<String, SqlSyntax>, SqlOperator> map = new HashMap<>();
for (SqlOperator operator : HazelcastSqlOperatorTable.instance().getOperatorList()) {
BiTuple<String, SqlSyntax> key = BiTuple.of(operator.getName(), operator.getSyntax());
SqlOperator oldOperator = map.put(key, operator);
assertNull("Duplicate operator \"" + operator.getName(), oldOperator);
}
}
Aggregations