use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.
the class HiveConnectorTableService method updateTable.
void updateTable(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final Table table, @Nonnull @NonNull final TableInfo tableInfo) throws MetaException {
if (table.getParameters() == null || table.getParameters().isEmpty()) {
table.setParameters(Maps.newHashMap());
}
table.getParameters().putIfAbsent(PARAMETER_EXTERNAL, "TRUE");
if (tableInfo.getMetadata() != null) {
table.getParameters().putAll(tableInfo.getMetadata());
}
//storage
final StorageDescriptor sd = table.getSd() != null ? table.getSd() : new StorageDescriptor();
String inputFormat = null;
String outputFormat = null;
Map<String, String> sdParameters = Maps.newHashMap();
final String location = tableInfo.getSerde() == null ? null : tableInfo.getSerde().getUri();
if (location != null) {
sd.setLocation(location);
} else if (sd.getLocation() == null) {
final String locationStr = hiveConnectorDatabaseService.get(requestContext, QualifiedName.ofDatabase(tableInfo.getName().getCatalogName(), tableInfo.getName().getDatabaseName())).getUri();
final Path databasePath = new Path(locationStr);
final Path targetPath = new Path(databasePath, tableInfo.getName().getTableName());
sd.setLocation(targetPath.toString());
}
if (sd.getSerdeInfo() == null) {
sd.setSerdeInfo(new SerDeInfo());
}
final SerDeInfo serdeInfo = sd.getSerdeInfo();
serdeInfo.setName(tableInfo.getName().getTableName());
final StorageInfo storageInfo = tableInfo.getSerde();
if (storageInfo != null) {
if (!Strings.isNullOrEmpty(storageInfo.getSerializationLib())) {
serdeInfo.setSerializationLib(storageInfo.getSerializationLib());
}
if (storageInfo.getSerdeInfoParameters() != null && !storageInfo.getSerdeInfoParameters().isEmpty()) {
serdeInfo.setParameters(storageInfo.getSerdeInfoParameters());
}
inputFormat = storageInfo.getInputFormat();
outputFormat = storageInfo.getOutputFormat();
if (storageInfo.getParameters() != null && !storageInfo.getParameters().isEmpty()) {
sdParameters = storageInfo.getParameters();
}
} else if (table.getSd() != null) {
final HiveStorageFormat hiveStorageFormat = extractHiveStorageFormat(table);
serdeInfo.setSerializationLib(hiveStorageFormat.getSerde());
serdeInfo.setParameters(ImmutableMap.<String, String>of());
inputFormat = hiveStorageFormat.getInputFormat();
outputFormat = hiveStorageFormat.getOutputFormat();
}
final ImmutableList.Builder<FieldSchema> columnsBuilder = ImmutableList.builder();
final ImmutableList.Builder<FieldSchema> partitionKeysBuilder = ImmutableList.builder();
if (tableInfo.getFields() != null) {
for (FieldInfo column : tableInfo.getFields()) {
final FieldSchema field = hiveMetacatConverters.metacatToHiveField(column);
if (column.isPartitionKey()) {
partitionKeysBuilder.add(field);
} else {
columnsBuilder.add(field);
}
}
}
final ImmutableList<FieldSchema> columns = columnsBuilder.build();
if (!columns.isEmpty()) {
sd.setCols(columns);
}
if (!Strings.isNullOrEmpty(inputFormat)) {
sd.setInputFormat(inputFormat);
}
if (!Strings.isNullOrEmpty(outputFormat)) {
sd.setOutputFormat(outputFormat);
}
if (sd.getParameters() == null) {
sd.setParameters(sdParameters);
}
//partition keys
final ImmutableList<FieldSchema> partitionKeys = partitionKeysBuilder.build();
if (!partitionKeys.isEmpty()) {
table.setPartitionKeys(partitionKeys);
}
table.setSd(sd);
}
use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.
the class CassandraConnectorTableService method getTableInfo.
private TableInfo getTableInfo(@Nonnull @NonNull final QualifiedName name, @Nonnull @NonNull final TableMetadata tableMetadata) {
final ImmutableList.Builder<FieldInfo> fieldInfoBuilder = ImmutableList.builder();
// TODO: Ignores clustering, primary key, index, etc columns. We need to rework TableInfo to support
for (final ColumnMetadata column : tableMetadata.getColumns()) {
final String dataType = column.getType().toString();
fieldInfoBuilder.add(FieldInfo.builder().name(column.getName()).sourceType(dataType).type(this.typeConverter.toMetacatType(dataType)).build());
}
return TableInfo.builder().name(QualifiedName.ofTable(name.getCatalogName(), name.getDatabaseName(), tableMetadata.getName())).fields(fieldInfoBuilder.build()).build();
}
use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.
the class HiveConnectorTableService method updateTable.
void updateTable(final ConnectorRequestContext requestContext, final Table table, final TableInfo tableInfo) throws MetaException {
if (table.getParameters() == null || table.getParameters().isEmpty()) {
table.setParameters(Maps.newHashMap());
}
// otherwise leaves it as such as VIRTUAL_VIEW
if (!isVirtualView(table)) {
table.getParameters().putIfAbsent(PARAMETER_EXTERNAL, "TRUE");
} else {
validAndUpdateVirtualView(table);
}
if (tableInfo.getMetadata() != null) {
table.getParameters().putAll(tableInfo.getMetadata());
}
// storage
final StorageDescriptor sd = table.getSd() != null ? table.getSd() : new StorageDescriptor();
String inputFormat = null;
String outputFormat = null;
Map<String, String> sdParameters = Maps.newHashMap();
final String location = tableInfo.getSerde() == null ? null : tableInfo.getSerde().getUri();
if (location != null) {
sd.setLocation(location);
} else if (sd.getLocation() == null) {
final String locationStr = hiveConnectorDatabaseService.get(requestContext, QualifiedName.ofDatabase(tableInfo.getName().getCatalogName(), tableInfo.getName().getDatabaseName())).getUri();
final Path databasePath = new Path(locationStr);
final Path targetPath = new Path(databasePath, tableInfo.getName().getTableName());
sd.setLocation(targetPath.toString());
}
if (sd.getSerdeInfo() == null) {
sd.setSerdeInfo(new SerDeInfo());
}
final SerDeInfo serdeInfo = sd.getSerdeInfo();
serdeInfo.setName(tableInfo.getName().getTableName());
final StorageInfo storageInfo = tableInfo.getSerde();
if (storageInfo != null) {
if (!Strings.isNullOrEmpty(storageInfo.getSerializationLib())) {
serdeInfo.setSerializationLib(storageInfo.getSerializationLib());
}
if (storageInfo.getSerdeInfoParameters() != null && !storageInfo.getSerdeInfoParameters().isEmpty()) {
serdeInfo.setParameters(storageInfo.getSerdeInfoParameters());
}
inputFormat = storageInfo.getInputFormat();
outputFormat = storageInfo.getOutputFormat();
if (storageInfo.getParameters() != null && !storageInfo.getParameters().isEmpty()) {
sdParameters = storageInfo.getParameters();
}
} else if (table.getSd() != null) {
final HiveStorageFormat hiveStorageFormat = this.extractHiveStorageFormat(table);
serdeInfo.setSerializationLib(hiveStorageFormat.getSerde());
serdeInfo.setParameters(ImmutableMap.of());
inputFormat = hiveStorageFormat.getInputFormat();
outputFormat = hiveStorageFormat.getOutputFormat();
}
final ImmutableList.Builder<FieldSchema> columnsBuilder = ImmutableList.builder();
final ImmutableList.Builder<FieldSchema> partitionKeysBuilder = ImmutableList.builder();
if (tableInfo.getFields() != null) {
for (FieldInfo column : tableInfo.getFields()) {
final FieldSchema field = hiveMetacatConverters.metacatToHiveField(column);
if (column.isPartitionKey()) {
partitionKeysBuilder.add(field);
} else {
columnsBuilder.add(field);
}
}
}
final ImmutableList<FieldSchema> columns = columnsBuilder.build();
if (!columns.isEmpty()) {
sd.setCols(columns);
}
if (!Strings.isNullOrEmpty(inputFormat)) {
sd.setInputFormat(inputFormat);
}
if (!Strings.isNullOrEmpty(outputFormat)) {
sd.setOutputFormat(outputFormat);
}
if (sd.getParameters() == null) {
sd.setParameters(sdParameters);
}
// partition keys
final ImmutableList<FieldSchema> partitionKeys = partitionKeysBuilder.build();
if (!partitionKeys.isEmpty()) {
table.setPartitionKeys(partitionKeys);
}
table.setSd(sd);
}
use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.
the class HiveConnectorInfoConverter method fromTableInfo.
/**
* Converts from TableDto to the connector table.
*
* @param tableInfo Metacat table Info
* @return connector table
*/
@Override
public Table fromTableInfo(final TableInfo tableInfo) {
final QualifiedName name = tableInfo.getName();
final String tableName = (name != null) ? name.getTableName() : "";
final String databaseName = (name != null) ? name.getDatabaseName() : "";
final StorageInfo storageInfo = tableInfo.getSerde();
final String owner = (storageInfo != null && storageInfo.getOwner() != null) ? storageInfo.getOwner() : "";
final AuditInfo auditInfo = tableInfo.getAudit();
final int createTime = (auditInfo != null && auditInfo.getCreatedDate() != null) ? dateToEpochSeconds(auditInfo.getCreatedDate()) : 0;
final Map<String, String> params = (tableInfo.getMetadata() != null) ? tableInfo.getMetadata() : new HashMap<>();
final List<FieldInfo> fields = tableInfo.getFields();
List<FieldSchema> partitionFields = Collections.emptyList();
List<FieldSchema> nonPartitionFields = Collections.emptyList();
if (fields != null) {
nonPartitionFields = Lists.newArrayListWithCapacity(fields.size());
partitionFields = Lists.newArrayListWithCapacity(fields.size());
for (FieldInfo fieldInfo : fields) {
if (fieldInfo.isPartitionKey()) {
partitionFields.add(metacatToHiveField(fieldInfo));
} else {
nonPartitionFields.add(metacatToHiveField(fieldInfo));
}
}
}
final StorageDescriptor sd = fromStorageInfo(storageInfo, nonPartitionFields);
final ViewInfo viewInfo = tableInfo.getView();
final String tableType = (null != viewInfo && !Strings.isNullOrEmpty(viewInfo.getViewOriginalText())) ? TableType.VIRTUAL_VIEW.name() : TableType.EXTERNAL_TABLE.name();
return new Table(tableName, databaseName, owner, createTime, 0, 0, sd, partitionFields, params, tableType.equals(TableType.VIRTUAL_VIEW.name()) ? tableInfo.getView().getViewOriginalText() : null, tableType.equals(TableType.VIRTUAL_VIEW.name()) ? tableInfo.getView().getViewExpandedText() : null, tableType);
}
use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.
the class JdbcConnectorTableService method get.
/**
* {@inheritDoc}
*/
@Override
public TableInfo get(@Nonnull final ConnectorRequestContext context, @Nonnull final QualifiedName name) {
log.debug("Beginning to get table metadata for qualified name {} for request {}", name, context);
try (Connection connection = this.dataSource.getConnection()) {
final String database = name.getDatabaseName();
connection.setSchema(database);
final ImmutableList.Builder<FieldInfo> fields = ImmutableList.builder();
try (ResultSet columns = this.getColumns(connection, name)) {
while (columns.next()) {
final String type = columns.getString("TYPE_NAME");
final String size = columns.getString("COLUMN_SIZE");
final String precision = columns.getString("DECIMAL_DIGITS");
final String sourceType = this.buildSourceType(type, size, precision);
final FieldInfo.FieldInfoBuilder fieldInfo = FieldInfo.builder().name(columns.getString("COLUMN_NAME")).sourceType(sourceType).type(this.typeConverter.toMetacatType(sourceType)).comment(columns.getString("REMARKS")).isNullable(columns.getString("IS_NULLABLE").equals("YES")).defaultValue(columns.getString("COLUMN_DEF"));
if (size != null) {
fieldInfo.size(Integer.parseInt(size));
}
fields.add(fieldInfo.build());
}
}
final List<FieldInfo> fieldInfos = fields.build();
// If table does not exist, throw TableNotFoundException.
if (fieldInfos.isEmpty() && !exists(context, name)) {
throw new TableNotFoundException(name);
}
// Set table details
final TableInfo result = TableInfo.builder().name(name).fields(fields.build()).build();
setTableInfoDetails(connection, result);
log.debug("Finished getting table metadata for qualified name {} for request {}", name, context);
return result;
} catch (final SQLException se) {
throw new ConnectorException(se.getMessage(), se);
}
}
Aggregations