use of com.thinkbiganalytics.discovery.schema.TableSchema in project kylo by Teradata.
the class DerivedDatasourceFactory method ensureDatasource.
public Datasource.ID ensureDatasource(TemplateProcessorDatasourceDefinition definition, FeedMetadata feedMetadata, List<NifiProperty> allProperties) {
return metadataAccess.commit(() -> {
List<NifiProperty> propertiesToEvalulate = new ArrayList<NifiProperty>();
// fetch the def
DatasourceDefinition datasourceDefinition = datasourceDefinitionProvider.findByProcessorType(definition.getProcessorType());
if (datasourceDefinition != null) {
// find out if there are any saved properties on the Feed that match the datasourceDef
List<NifiProperty> feedProperties = feedMetadata.getProperties().stream().filter(property -> matchesDefinition(definition, property) && datasourceDefinition.getDatasourcePropertyKeys().contains(property.getKey())).collect(Collectors.toList());
// resolve any ${metadata.} properties
List<NifiProperty> resolvedFeedProperties = propertyExpressionResolver.resolvePropertyExpressions(feedProperties, feedMetadata);
List<NifiProperty> resolvedAllProperties = propertyExpressionResolver.resolvePropertyExpressions(allProperties, feedMetadata);
// propetyHash
propertiesToEvalulate.addAll(feedProperties);
propertiesToEvalulate.addAll(allProperties);
propertyExpressionResolver.resolveStaticProperties(propertiesToEvalulate);
String identityString = datasourceDefinition.getIdentityString();
String desc = datasourceDefinition.getDescription();
String title = datasourceDefinition.getTitle();
PropertyExpressionResolver.ResolvedVariables identityStringPropertyResolution = propertyExpressionResolver.resolveVariables(identityString, propertiesToEvalulate);
identityString = identityStringPropertyResolution.getResolvedString();
PropertyExpressionResolver.ResolvedVariables titlePropertyResolution = propertyExpressionResolver.resolveVariables(title, propertiesToEvalulate);
title = titlePropertyResolution.getResolvedString();
if (desc != null) {
PropertyExpressionResolver.ResolvedVariables descriptionPropertyResolution = propertyExpressionResolver.resolveVariables(desc, propertiesToEvalulate);
desc = descriptionPropertyResolution.getResolvedString();
}
// if the identityString still contains unresolved variables then make the title readable and replace the idstring with the feed.id
if (propertyExpressionResolver.containsVariablesPatterns(identityString)) {
title = propertyExpressionResolver.replaceAll(title, " {runtime variable} ");
identityString = propertyExpressionResolver.replaceAll(identityString, feedMetadata.getId());
}
// if it is the Source ensure the feed matches this ds
if (isCreateDatasource(datasourceDefinition, feedMetadata)) {
Map<String, String> controllerServiceProperties = parseControllerServiceProperties(datasourceDefinition, feedProperties);
Map<String, Object> properties = new HashMap<String, Object>(identityStringPropertyResolution.getResolvedVariables());
properties.putAll(controllerServiceProperties);
DerivedDatasource derivedDatasource = datasourceProvider.ensureDerivedDatasource(datasourceDefinition.getDatasourceType(), identityString, title, desc, properties);
if (derivedDatasource != null) {
if ("HiveDatasource".equals(derivedDatasource.getDatasourceType()) && Optional.ofNullable(feedMetadata.getTable()).map(TableSetup::getTableSchema).map(TableSchema::getFields).isPresent()) {
derivedDatasource.setGenericProperties(Collections.singletonMap("columns", (Serializable) feedMetadata.getTable().getTableSchema().getFields()));
}
return derivedDatasource.getId();
}
}
return null;
} else {
return null;
}
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.discovery.schema.TableSchema in project kylo by Teradata.
the class HiveMetastoreService method getTable.
public TableSchema getTable(String schema, String table) throws DataAccessException {
// Must use JDBC metadata for user impersonation
if (userImpersonationEnabled) {
return hiveService.getTableSchema(schema, table);
}
String query = "SELECT d.NAME as \"DATABASE_NAME\", t.TBL_NAME, c.COLUMN_NAME, c.TYPE_NAME " + "FROM COLUMNS_V2 c " + "JOIN SDS s on s.CD_ID = c.CD_ID " + "JOIN TBLS t ON s.SD_ID = t.SD_ID " + "JOIN DBS d on d.DB_ID = t.DB_ID " + "WHERE t.TBL_NAME='" + table + "' and d.NAME='" + schema + "' " + "ORDER BY d.NAME, t.TBL_NAME, c.INTEGER_IDX";
if (DatabaseType.POSTGRES.equals(getMetastoreDatabaseType())) {
query = "SELECT d.\"NAME\" as \"DATABASE_NAME\", t.\"TBL_NAME\", c.\"COLUMN_NAME\",c.\"TYPE_NAME\" " + "FROM \"COLUMNS_V2\" c " + "JOIN \"SDS\" s on s.\"CD_ID\" = c.\"CD_ID\" " + "JOIN \"TBLS\" t ON s.\"SD_ID\" = t.\"SD_ID\" " + "JOIN \"DBS\" d on d.\"DB_ID\" = t.\"DB_ID\" " + "WHERE t.\"TBL_NAME\"='" + table + "' and d.\"NAME\"='" + schema + "' " + "ORDER BY d.\"NAME\", t.\"TBL_NAME\", c.\"INTEGER_IDX\"";
}
final DefaultTableSchema metadata = new DefaultTableSchema();
hiveMetatoreJdbcTemplate.query(query, new RowMapper<Object>() {
@Override
public TableSchema mapRow(ResultSet rs, int i) throws SQLException {
String dbName = rs.getString("DATABASE_NAME");
String columnName = rs.getString("COLUMN_NAME");
String tableName = rs.getString("TBL_NAME");
String columnType = rs.getString("TYPE_NAME");
metadata.setName(tableName);
metadata.setSchemaName(dbName);
DefaultField field = new DefaultField();
field.setName(columnName);
field.setNativeDataType(columnType);
field.setDerivedDataType(columnType);
metadata.getFields().add(field);
return metadata;
}
});
return metadata;
}
Aggregations