use of com.thinkbiganalytics.discovery.model.DefaultTableSchema in project kylo by Teradata.
the class HiveMetastoreService method getTableSchemas.
public List<TableSchema> getTableSchemas() throws DataAccessException {
String query = "SELECT d.NAME as \"DATABASE_NAME\", t.TBL_NAME, c.COLUMN_NAME c.TYPE_NAME " + "FROM COLUMNS_V2 c " + "JOIN SDS s on s.CD_ID = c.CD_ID " + "JOIN TBLS t ON s.SD_ID = t.SD_ID " + "JOIN DBS d on d.DB_ID = t.DB_ID " + "ORDER BY d.NAME, t.TBL_NAME";
if (DatabaseType.POSTGRES.equals(getMetastoreDatabaseType())) {
query = "SELECT d.\"NAME\" as \"DATABASE_NAME\", t.\"TBL_NAME\", c.\"COLUMN_NAME\",c.\"TYPE_NAME\" " + "FROM \"COLUMNS_V2\" c " + "JOIN \"SDS\" s on s.\"CD_ID\" = c.\"CD_ID\" " + "JOIN \"TBLS\" t ON s.\"SD_ID\" = t.\"SD_ID\" " + "JOIN \"DBS\" d on d.\"DB_ID\" = t.\"DB_ID\" ";
}
final List<TableSchema> metadata = new ArrayList<>();
final Map<String, Map<String, TableSchema>> databaseTables = new HashMap<>();
hiveMetatoreJdbcTemplate.query(query, new RowMapper<Object>() {
@Override
public TableSchema mapRow(ResultSet rs, int i) throws SQLException {
String dbName = rs.getString("DATABASE_NAME");
String columnName = rs.getString("COLUMN_NAME");
String tableName = rs.getString("TBL_NAME");
String columnType = rs.getString("TYPE_NAME");
if (!databaseTables.containsKey(dbName)) {
databaseTables.put(dbName, new HashMap<String, TableSchema>());
}
Map<String, TableSchema> tables = databaseTables.get(dbName);
if (!tables.containsKey(tableName)) {
DefaultTableSchema schema = new DefaultTableSchema();
schema.setName(tableName);
schema.setSchemaName(dbName);
schema.setFields(new ArrayList<Field>());
tables.put(tableName, schema);
metadata.add(schema);
}
TableSchema schema = tables.get(tableName);
DefaultField field = new DefaultField();
field.setName(columnName);
field.setNativeDataType(columnType);
field.setDerivedDataType(columnType);
schema.getFields().add(field);
return schema;
}
});
return metadata;
}
use of com.thinkbiganalytics.discovery.model.DefaultTableSchema in project kylo by Teradata.
the class PropertyExpressionResolverTest method testFeedMetadataProperties.
@Test
public void testFeedMetadataProperties() {
FeedMetadata metadata = new FeedMetadata();
metadata.setSystemFeedName("feedSystemName");
metadata.setCategory(new FeedCategory());
metadata.setTable(new TableSetup());
metadata.getTable().setSourceTableSchema(new DefaultTableSchema());
metadata.getTable().setTableSchema(new DefaultTableSchema());
metadata.getTable().getSourceTableSchema().setName("sourceTableName");
metadata.getTable().getTableSchema().setName("tableSchemaName");
final NifiProperty prop1 = createProperty("${metadata.table.sourceTableSchema.name}");
Assert.assertTrue(resolver.resolveExpression(metadata, prop1));
Assert.assertEquals("sourceTableName", prop1.getValue());
}
use of com.thinkbiganalytics.discovery.model.DefaultTableSchema in project kylo by Teradata.
the class TableSetupTest method test.
@Test
public void test() throws Exception {
ObjectMapper mapper = new ObjectMapper();
FeedMetadata feedMetadata = new FeedMetadata();
feedMetadata.setCategory(new FeedCategory());
feedMetadata.setTable(new TableSetup());
feedMetadata.getTable().setTableSchema(new DefaultTableSchema());
feedMetadata.getTable().getTableSchema().setName("test");
DefaultField f1 = new DefaultField();
f1.setName("field1");
feedMetadata.getTable().getTableSchema().getFields().add(f1);
String json = mapper.writeValueAsString(feedMetadata);
FeedMetadata feedMetadata2 = mapper.readValue(json, FeedMetadata.class);
assertEquals(feedMetadata2.getTable().getTableSchema().getName(), feedMetadata.getTable().getTableSchema().getName());
}
use of com.thinkbiganalytics.discovery.model.DefaultTableSchema in project kylo by Teradata.
the class DBSchemaParser method describeTable.
/**
* Gets the schema for the specified table.
*
* @param schema the schema name
* @param table the table name
* @return the table schema
* @throws IllegalArgumentException if the table name is empty
* @throws RuntimeException if a database access error occurs
*/
@Nullable
public TableSchema describeTable(@Nullable final String schema, @Nonnull final String table) {
Validate.isTrue(!StringUtils.isEmpty(table), "Table expected");
try (final Connection conn = KerberosUtil.getConnectionWithOrWithoutKerberos(ds, kerberosTicketConfiguration)) {
String queryCatalog = schema;
String querySchema = schema;
if (conn.getMetaData().supportsCatalogsInIndexDefinitions()) {
querySchema = "%";
} else if (conn.getMetaData().supportsSchemasInIndexDefinitions()) {
queryCatalog = null;
}
try (final ResultSet result = getTables(conn, queryCatalog, querySchema, table)) {
while (result != null && result.next()) {
final String cat = result.getString(1);
final String schem = result.getString(2);
final String tableName = result.getString(3);
if (table.equalsIgnoreCase(tableName) && (queryCatalog != null || schema == null || schem == null || schema.equalsIgnoreCase(schem))) {
final DefaultTableSchema tableSchema = new DefaultTableSchema();
tableSchema.setFields(listColumns(conn, schema, tableName));
tableSchema.setName(tableName);
tableSchema.setSchemaName(StringUtils.isBlank(schem) ? cat : schem);
return tableSchema;
}
}
}
} catch (final SQLException e) {
throw new SchemaParserException("Unable to describe schema [" + schema + "] table [" + table + "]", e);
}
return null;
}
use of com.thinkbiganalytics.discovery.model.DefaultTableSchema in project kylo by Teradata.
the class CSVFileSchemaParser method convertToTarget.
/**
* Converts the raw file schema to the target schema with correctly derived types
*
* @param target the target schema
* @param sourceSchema the source
* @return the schema
*/
protected Schema convertToTarget(TableSchemaType target, Schema sourceSchema) {
Schema targetSchema;
switch(target) {
case RAW:
targetSchema = sourceSchema;
break;
case HIVE:
DefaultHiveSchema hiveSchema = new DefaultHiveSchema();
BeanUtils.copyProperties(sourceSchema, hiveSchema);
hiveSchema.setHiveFormat(deriveHiveRecordFormat());
ParserHelper.deriveDataTypes(target, hiveSchema.getFields());
targetSchema = hiveSchema;
break;
case RDBMS:
DefaultTableSchema rdbmsSchema = new DefaultTableSchema();
BeanUtils.copyProperties(sourceSchema, rdbmsSchema);
ParserHelper.deriveDataTypes(target, rdbmsSchema.getFields());
targetSchema = rdbmsSchema;
break;
default:
throw new IllegalArgumentException(target.name() + " is not supported by this parser");
}
return targetSchema;
}
Aggregations