use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveShimV310 method toHiveTimestamp.
@Override
public Object toHiveTimestamp(Object flinkTimestamp) {
if (flinkTimestamp == null) {
return null;
}
ensureSupportedFlinkTimestamp(flinkTimestamp);
initDateTimeClasses();
if (flinkTimestamp instanceof Timestamp) {
flinkTimestamp = ((Timestamp) flinkTimestamp).toLocalDateTime();
}
try {
return hiveTimestampConstructor.newInstance(flinkTimestamp);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new FlinkHiveException("Failed to convert to Hive timestamp", e);
}
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveShimV310 method toHiveDate.
@Override
public Object toHiveDate(Object flinkDate) {
if (flinkDate == null) {
return null;
}
ensureSupportedFlinkDate(flinkDate);
initDateTimeClasses();
if (flinkDate instanceof Date) {
flinkDate = ((Date) flinkDate).toLocalDate();
}
try {
return hiveDateConstructor.newInstance(flinkDate);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new FlinkHiveException("Failed to convert to Hive date", e);
}
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveMapredSplitReader method nextRecord.
@Override
@SuppressWarnings("unchecked")
public RowData nextRecord(RowData reuse) throws IOException {
if (reachedEnd()) {
return null;
}
try {
// Use HiveDeserializer to deserialize an object out of a Writable blob
Object hiveRowStruct = deserializer.deserialize(value);
for (int i = 0; i < selectedFields.length; i++) {
// set non-partition columns
if (selectedFields[i] < structFields.size()) {
StructField structField = structFields.get(selectedFields[i]);
Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(), structObjectInspector.getStructFieldData(hiveRowStruct, structField), hiveShim);
row.setField(i, converters[i].toInternal(object));
}
}
} catch (Exception e) {
LOG.error("Error happens when converting hive data type to flink data type.");
throw new FlinkHiveException(e);
}
this.fetched = false;
return row;
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveParserUtils method genValuesRelNode.
// creates LogicalValues node
public static RelNode genValuesRelNode(RelOptCluster cluster, RelDataType rowType, List<List<RexLiteral>> rows) {
List<Object> immutableRows = rows.stream().map(HiveParserUtils::toImmutableList).collect(Collectors.toList());
Class[] argTypes = new Class[] { RelOptCluster.class, RelDataType.class, null };
if (useShadedImmutableList) {
argTypes[2] = HiveParserUtils.shadedImmutableListClz;
} else {
argTypes[2] = HiveParserUtils.immutableListClz;
}
Method method = HiveReflectionUtils.tryGetMethod(LogicalValues.class, "create", argTypes);
Preconditions.checkState(method != null, "Cannot get the method to create LogicalValues");
try {
return (RelNode) method.invoke(null, cluster, rowType, HiveParserUtils.toImmutableList(immutableRows));
} catch (IllegalAccessException | InvocationTargetException e) {
throw new FlinkHiveException("Failed to create LogicalValues", e);
}
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class SqlFunctionConverter method visitCall.
@Override
public RexNode visitCall(RexCall call) {
SqlOperator operator = call.getOperator();
List<RexNode> operands = call.getOperands();
SqlOperator convertedOp = convertOperator(operator);
final boolean[] update = null;
if (convertedOp instanceof SqlCastFunction) {
RelDataType type = call.getType();
return builder.makeCall(type, convertedOp, visitList(operands, update));
} else {
if (convertedOp instanceof FlinkSqlTimestampFunction) {
// flink's current_timestamp has different type from hive's, convert it to a literal
Timestamp currentTS = ((HiveParser.HiveParserSessionState) SessionState.get()).getHiveParserCurrentTS();
HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
try {
return HiveParserRexNodeConverter.convertConstant(new ExprNodeConstantDesc(hiveShim.toHiveTimestamp(currentTS)), cluster);
} catch (SemanticException e) {
throw new FlinkHiveException(e);
}
}
return builder.makeCall(convertedOp, visitList(operands, update));
}
}
Aggregations