use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveShimV100 method getHiveOutputFormatClass.
@Override
public Class getHiveOutputFormatClass(Class outputFormatClz) {
try {
Class utilClass = HiveFileFormatUtils.class;
Method utilMethod = utilClass.getDeclaredMethod("getOutputFormatSubstitute", Class.class, boolean.class);
Class res = (Class) utilMethod.invoke(null, outputFormatClz, false);
Preconditions.checkState(res != null, "No Hive substitute output format for " + outputFormatClz);
return res;
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new FlinkHiveException("Failed to get HiveOutputFormat for " + outputFormatClz, e);
}
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveShimV120 method init.
private static void init() {
if (!inited) {
synchronized (HiveShimV120.class) {
if (!inited) {
try {
Field field = TypeInfoFactory.class.getDeclaredField("intervalYearMonthTypeInfo");
intervalYearMonthTypeInfo = (PrimitiveTypeInfo) field.get(null);
field = TypeInfoFactory.class.getDeclaredField("intervalDayTimeTypeInfo");
intervalDayTimeTypeInfo = (PrimitiveTypeInfo) field.get(null);
funcResourceClz = Thread.currentThread().getContextClassLoader().loadClass("org.apache.hadoop.hive.ql.exec.FunctionInfo$FunctionResource");
registerTemporaryUDF = FunctionRegistry.class.getDeclaredMethod("registerTemporaryUDF", String.class, Class.class, Array.newInstance(funcResourceClz, 0).getClass());
inited = true;
} catch (Exception e) {
throw new FlinkHiveException(e);
}
}
}
}
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveShimV310 method initDateTimeClasses.
private static void initDateTimeClasses() {
if (!hiveClassesInited) {
synchronized (HiveShimV310.class) {
if (!hiveClassesInited) {
try {
hiveTimestampClz = Class.forName("org.apache.hadoop.hive.common.type.Timestamp");
hiveTimestampConstructor = hiveTimestampClz.getDeclaredConstructor(LocalDateTime.class);
hiveTimestampConstructor.setAccessible(true);
hiveTimestampLocalDateTime = hiveTimestampClz.getDeclaredField("localDateTime");
hiveTimestampLocalDateTime.setAccessible(true);
timestampWritableConstructor = Class.forName("org.apache.hadoop.hive.serde2.io.TimestampWritableV2").getDeclaredConstructor(hiveTimestampClz);
hiveDateClz = Class.forName("org.apache.hadoop.hive.common.type.Date");
hiveDateConstructor = hiveDateClz.getDeclaredConstructor(LocalDate.class);
hiveDateConstructor.setAccessible(true);
hiveDateLocalDate = hiveDateClz.getDeclaredField("localDate");
hiveDateLocalDate.setAccessible(true);
dateWritableConstructor = Class.forName("org.apache.hadoop.hive.serde2.io.DateWritableV2").getDeclaredConstructor(hiveDateClz);
} catch (ClassNotFoundException | NoSuchMethodException | NoSuchFieldException e) {
throw new FlinkHiveException("Failed to get Hive timestamp class and constructor", e);
}
hiveClassesInited = true;
}
}
}
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveShimV310 method toFlinkDate.
@Override
public LocalDate toFlinkDate(Object hiveDate) {
initDateTimeClasses();
Preconditions.checkArgument(hiveDateClz.isAssignableFrom(hiveDate.getClass()), "Expecting Hive date to be an instance of %s, but actually got %s", hiveDateClz.getName(), hiveDate.getClass().getName());
try {
return (LocalDate) hiveDateLocalDate.get(hiveDate);
} catch (IllegalAccessException e) {
throw new FlinkHiveException("Failed to convert to Flink date", e);
}
}
use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.
the class HiveShimV110 method getHiveOutputFormatClass.
@Override
public Class getHiveOutputFormatClass(Class outputFormatClz) {
try {
Class utilClass = HiveFileFormatUtils.class;
Method utilMethod = utilClass.getDeclaredMethod("getOutputFormatSubstitute", Class.class);
Class res = (Class) utilMethod.invoke(null, outputFormatClz);
Preconditions.checkState(res != null, "No Hive substitute output format for " + outputFormatClz);
return res;
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new FlinkHiveException("Failed to get HiveOutputFormat for " + outputFormatClz, e);
}
}
Aggregations