use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV100 method getHiveRecordWriter.
@Override
public FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jobConf, Class outputFormatClz, Class<? extends Writable> outValClz, boolean isCompressed, Properties tableProps, Path outPath) {
try {
Class utilClass = HiveFileFormatUtils.class;
HiveOutputFormat outputFormat = (HiveOutputFormat) outputFormatClz.newInstance();
Method utilMethod = utilClass.getDeclaredMethod("getRecordWriter", JobConf.class, HiveOutputFormat.class, Class.class, boolean.class, Properties.class, Path.class, Reporter.class);
return (FileSinkOperator.RecordWriter) utilMethod.invoke(null, jobConf, outputFormat, outValClz, isCompressed, tableProps, outPath, Reporter.NULL);
} catch (Exception e) {
throw new CatalogException("Failed to create Hive RecordWriter", e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class AbstractJdbcCatalog method getTable.
// ------ tables and views ------
@Override
public CatalogBaseTable getTable(ObjectPath tablePath) throws TableNotExistException, CatalogException {
if (!tableExists(tablePath)) {
throw new TableNotExistException(getName(), tablePath);
}
String dbUrl = baseUrl + tablePath.getDatabaseName();
try (Connection conn = DriverManager.getConnection(dbUrl, username, pwd)) {
DatabaseMetaData metaData = conn.getMetaData();
Optional<UniqueConstraint> primaryKey = getPrimaryKey(metaData, getSchemaName(tablePath), getTableName(tablePath));
PreparedStatement ps = conn.prepareStatement(String.format("SELECT * FROM %s;", getSchemaTableName(tablePath)));
ResultSetMetaData resultSetMetaData = ps.getMetaData();
String[] columnNames = new String[resultSetMetaData.getColumnCount()];
DataType[] types = new DataType[resultSetMetaData.getColumnCount()];
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
columnNames[i - 1] = resultSetMetaData.getColumnName(i);
types[i - 1] = fromJDBCType(tablePath, resultSetMetaData, i);
if (resultSetMetaData.isNullable(i) == ResultSetMetaData.columnNoNulls) {
types[i - 1] = types[i - 1].notNull();
}
}
Schema.Builder schemaBuilder = Schema.newBuilder().fromFields(columnNames, types);
primaryKey.ifPresent(pk -> schemaBuilder.primaryKeyNamed(pk.getName(), pk.getColumns()));
Schema tableSchema = schemaBuilder.build();
Map<String, String> props = new HashMap<>();
props.put(CONNECTOR.key(), IDENTIFIER);
props.put(URL.key(), dbUrl);
props.put(USERNAME.key(), username);
props.put(PASSWORD.key(), pwd);
props.put(TABLE_NAME.key(), getSchemaTableName(tablePath));
return CatalogTable.of(tableSchema, null, Lists.newArrayList(), props);
} catch (Exception e) {
throw new CatalogException(String.format("Failed getting table %s", tablePath.getFullName()), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class MySqlCatalog method getDriverVersion.
private String getDriverVersion() {
try (Connection conn = DriverManager.getConnection(defaultUrl, username, pwd)) {
String driverVersion = conn.getMetaData().getDriverVersion();
Pattern regexp = Pattern.compile("\\d+?\\.\\d+?\\.\\d+");
Matcher matcher = regexp.matcher(driverVersion);
return matcher.find() ? matcher.group(0) : null;
} catch (Exception e) {
throw new CatalogException(String.format("Failed in getting MySQL driver version by %s.", defaultUrl), e);
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveInspectors method getObjectInspector.
public static ObjectInspector getObjectInspector(TypeInfo type) {
switch(type.getCategory()) {
case PRIMITIVE:
PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type;
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType);
case LIST:
ListTypeInfo listType = (ListTypeInfo) type;
return ObjectInspectorFactory.getStandardListObjectInspector(getObjectInspector(listType.getListElementTypeInfo()));
case MAP:
MapTypeInfo mapType = (MapTypeInfo) type;
return ObjectInspectorFactory.getStandardMapObjectInspector(getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo()));
case STRUCT:
StructTypeInfo structType = (StructTypeInfo) type;
List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>();
for (TypeInfo fieldType : fieldTypes) {
fieldInspectors.add(getObjectInspector(fieldType));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(structType.getAllStructFieldNames(), fieldInspectors);
default:
throw new CatalogException("Unsupported Hive type category " + type.getCategory());
}
}
use of org.apache.flink.table.catalog.exceptions.CatalogException in project flink by apache.
the class HiveShimV100 method getFieldsFromDeserializer.
@Override
public List<FieldSchema> getFieldsFromDeserializer(Configuration conf, Table table, boolean skipConfError) {
try {
Method utilMethod = getHiveMetaStoreUtilsClass().getMethod("getDeserializer", Configuration.class, Table.class);
Deserializer deserializer = (Deserializer) utilMethod.invoke(null, conf, table);
utilMethod = getHiveMetaStoreUtilsClass().getMethod("getFieldsFromDeserializer", String.class, Deserializer.class);
return (List<FieldSchema>) utilMethod.invoke(null, table.getTableName(), deserializer);
} catch (Exception e) {
throw new CatalogException("Failed to get table schema from deserializer", e);
}
}
Aggregations