use of com.thinkbiganalytics.discovery.schema.TableSchema in project kylo by Teradata.
the class HiveColumnsUpgradeAction method upgradeTo.
@Override
public void upgradeTo(final KyloVersion startingVersion) {
log.info("Upgrading hive columns from version: {}", startingVersion);
feedService.getFeeds().stream().filter(feed -> Optional.ofNullable(feed.getTable()).map(TableSetup::getTableSchema).map(TableSchema::getFields).isPresent()).forEach(feed -> {
final TableSchema schema = feed.getTable().getTableSchema();
final DerivedDatasource datasource = datasourceProvider.findDerivedDatasource("HiveDatasource", feed.getSystemCategoryName() + "." + feed.getSystemFeedName());
if (datasource != null) {
log.info("Upgrading schema: {}/{}", schema.getDatabaseName(), schema.getSchemaName());
datasource.setGenericProperties(Collections.singletonMap("columns", (Serializable) schema.getFields()));
}
});
}
use of com.thinkbiganalytics.discovery.schema.TableSchema in project kylo by Teradata.
the class DBSchemaParserTest method describeTableSqlServer.
/**
* Verify describing a SQL Server table.
*/
@Test
@SuppressWarnings("unchecked")
public void describeTableSqlServer() throws Exception {
// Mock data source
final DataSource dataSource = Mockito.mock(DataSource.class);
final Connection connection = Mockito.mock(Connection.class);
Mockito.when(dataSource.getConnection()).thenReturn(connection);
final DatabaseMetaData dbMetaData = Mockito.mock(DatabaseMetaData.class);
Mockito.when(dbMetaData.getTables(Mockito.eq("mydb"), Mockito.eq("%"), Mockito.eq("mytable"), Mockito.any(String[].class))).thenThrow(SQLException.class);
Mockito.when(dbMetaData.supportsCatalogsInIndexDefinitions()).thenReturn(true);
Mockito.when(connection.getMetaData()).thenReturn(dbMetaData);
final ResultSet tablesResultSet = Mockito.mock(ResultSet.class);
Mockito.when(tablesResultSet.next()).thenReturn(true);
Mockito.when(tablesResultSet.getString(1)).thenReturn("mycat");
Mockito.when(tablesResultSet.getString(2)).thenReturn("mydb");
Mockito.when(tablesResultSet.getString(3)).thenReturn("mytable");
Mockito.when(dbMetaData.getTables(Mockito.isNull(String.class), Mockito.eq("mydb"), Mockito.eq("mytable"), Mockito.any(String[].class))).thenReturn(tablesResultSet);
final ResultSet keysResultSet = Mockito.mock(ResultSet.class);
Mockito.when(keysResultSet.next()).thenReturn(false);
Mockito.when(dbMetaData.getPrimaryKeys(null, "mydb", "mytable")).thenReturn(keysResultSet);
final ResultSet columnsResultSet = Mockito.mock(ResultSet.class);
Mockito.when(columnsResultSet.next()).thenReturn(true, false);
Mockito.when(columnsResultSet.getString("TABLE_CAT")).thenReturn("mydb");
Mockito.when(columnsResultSet.getString("COLUMN_NAME")).thenReturn("mycol");
Mockito.when(columnsResultSet.getInt("DATA_TYPE")).thenReturn(12);
Mockito.when(columnsResultSet.getString("REMARKS")).thenReturn("string column");
Mockito.when(columnsResultSet.getString("IS_NULLABLE")).thenReturn("YES");
Mockito.when(dbMetaData.getColumns("mycat", "mydb", "mytable", null)).thenReturn(columnsResultSet);
// Test describing table
final DBSchemaParser parser = new DBSchemaParser(dataSource, new KerberosTicketConfiguration());
final TableSchema schema = parser.describeTable("mydb", "mytable");
Assert.assertNotNull("Expected schema but table was not found", schema);
Assert.assertEquals("mytable", schema.getName());
Assert.assertEquals("mydb", schema.getSchemaName());
final List<Field> fields = schema.getFields();
Assert.assertNotNull("Expected schema to have fields", fields);
Assert.assertEquals(1, fields.size());
Assert.assertEquals("mycol", fields.get(0).getName());
Assert.assertEquals("VARCHAR", fields.get(0).getNativeDataType());
Assert.assertEquals("string", fields.get(0).getDerivedDataType());
Assert.assertEquals("string column", fields.get(0).getDescription());
Assert.assertTrue("Expected id field to be nullable", fields.get(0).isNullable());
Assert.assertFalse("Expected id field to be regular", fields.get(0).isPrimaryKey());
}
use of com.thinkbiganalytics.discovery.schema.TableSchema in project kylo by Teradata.
the class DBSchemaParserTest method describeTableMySql.
/**
* Verify describing a MySQL table.
*/
@Test
public void describeTableMySql() throws Exception {
// Mock data source
final DataSource dataSource = Mockito.mock(DataSource.class);
final Connection connection = Mockito.mock(Connection.class);
Mockito.when(dataSource.getConnection()).thenReturn(connection);
final DatabaseMetaData dbMetaData = Mockito.mock(DatabaseMetaData.class);
Mockito.when(dbMetaData.supportsCatalogsInIndexDefinitions()).thenReturn(true);
Mockito.when(connection.getMetaData()).thenReturn(dbMetaData);
final ResultSet tablesResultSet = Mockito.mock(ResultSet.class);
Mockito.when(tablesResultSet.next()).thenReturn(true);
Mockito.when(tablesResultSet.getString(1)).thenReturn("mydb");
Mockito.when(tablesResultSet.getString(3)).thenReturn("mytable");
Mockito.when(dbMetaData.getTables(Mockito.eq("mydb"), Mockito.eq("%"), Mockito.eq("mytable"), Mockito.any(String[].class))).thenReturn(tablesResultSet);
final ResultSet keysResultSet = Mockito.mock(ResultSet.class);
Mockito.when(keysResultSet.next()).thenReturn(true, false);
Mockito.when(keysResultSet.getString("COLUMN_NAME")).thenReturn("id");
Mockito.when(keysResultSet.getString("TABLE_CAT")).thenReturn("mydb");
Mockito.when(dbMetaData.getPrimaryKeys(null, "mydb", "mytable")).thenReturn(keysResultSet);
final ResultSet columnsResultSet = Mockito.mock(ResultSet.class);
Mockito.when(columnsResultSet.next()).thenReturn(true, true, false);
Mockito.when(columnsResultSet.getString("TABLE_CAT")).thenReturn("mydb");
Mockito.when(columnsResultSet.getString("COLUMN_NAME")).thenReturn("id", "mycol");
Mockito.when(columnsResultSet.getInt("DATA_TYPE")).thenReturn(-5, 12);
Mockito.when(columnsResultSet.getString("REMARKS")).thenReturn("primary key", "string column");
Mockito.when(columnsResultSet.getString("IS_NULLABLE")).thenReturn("NO", "YES");
Mockito.when(dbMetaData.getColumns("mydb", null, "mytable", null)).thenReturn(columnsResultSet);
// Test describing table
final DBSchemaParser parser = new DBSchemaParser(dataSource, new KerberosTicketConfiguration());
final TableSchema schema = parser.describeTable("mydb", "mytable");
Assert.assertNotNull("Expected schema but table was not found", schema);
Assert.assertEquals("mytable", schema.getName());
Assert.assertEquals("mydb", schema.getSchemaName());
final List<Field> fields = schema.getFields();
Assert.assertNotNull("Expected schema to have fields", fields);
Assert.assertTrue("Expected 2 fields but found 0", fields.size() >= 1);
Assert.assertEquals("id", fields.get(0).getName());
Assert.assertEquals("BIGINT", fields.get(0).getNativeDataType());
Assert.assertEquals("bigint", fields.get(0).getDerivedDataType());
Assert.assertEquals("primary key", fields.get(0).getDescription());
Assert.assertFalse("Expected id field to be non-nullable", fields.get(0).isNullable());
Assert.assertTrue("Expected id field to be primary key", fields.get(0).isPrimaryKey());
Assert.assertTrue("Expected 2 fields but found 1", fields.size() >= 2);
Assert.assertEquals("mycol", fields.get(1).getName());
Assert.assertEquals("VARCHAR", fields.get(1).getNativeDataType());
Assert.assertEquals("string", fields.get(1).getDerivedDataType());
Assert.assertEquals("string column", fields.get(1).getDescription());
Assert.assertTrue("Expected id field to be nullable", fields.get(1).isNullable());
Assert.assertFalse("Expected id field to be regular", fields.get(1).isPrimaryKey());
Assert.assertEquals(2, fields.size());
}
use of com.thinkbiganalytics.discovery.schema.TableSchema in project kylo by Teradata.
the class DatasourceController method describeTable.
/**
* Gets the schema of the specified table using the specified data source.
*
* @param idStr the data source id
* @param tableName the table name
* @param schema the schema name, or {@code null} to search all schemas
* @return the table and field details
*/
@GET
@Path("{id}/tables/{tableName}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Gets the schema of the specified table.", notes = "Connects to the database specified by the data source.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the table schema.", response = TableSchema.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response describeTable(@PathParam("id") final String idStr, @PathParam("tableName") final String tableName, @QueryParam("schema") final String schema) {
// Verify user has access to data source
final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> {
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(idStr));
return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId);
});
// Retrieve table description using system user
return metadata.read(() -> {
final TableSchema tableSchema = id.map(datasourceProvider::getDatasource).map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)).filter(JdbcDatasource.class::isInstance).map(JdbcDatasource.class::cast).map(datasource -> dbcpConnectionPoolTableInfo.describeTableForDatasource(datasource, schema, tableName)).orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr));
return Response.ok(tableSchema).build();
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.discovery.schema.TableSchema in project kylo by Teradata.
the class DatasourceController method getTablesAndColumns.
/**
* Gets the tables and their columns from the specified data source for given schema
*
* @param idStr the data source id
* @param schema the schema name, or {@code null} for all schemas
* @return the list of tables and their columns
*/
@GET
@Path("{id}/table-columns")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Gets the tables and their columns from the data source for given schema", notes = "Connects to the database specified by the data source.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns tables and columns", response = String.class, responseContainer = "List"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response getTablesAndColumns(@PathParam("id") final String idStr, @QueryParam("schema") final String schema) {
// Verify user has access to data source
final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> {
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(idStr));
return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId);
});
// Retrieve table names using system user
return metadata.read(() -> {
JdbcDatasource datasource = id.map(datasourceProvider::getDatasource).map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)).filter(JdbcDatasource.class::isInstance).map(JdbcDatasource.class::cast).orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr));
List<String> tableNamesForDatasource = dbcpConnectionPoolTableInfo.getTableNamesForDatasource(datasource, schema, null);
List<DatabaseMetadata> tables = new ArrayList<>();
if (tableNamesForDatasource != null) {
tables = tableNamesForDatasource.stream().flatMap(schemaNameDotTableName -> {
String tableName = schemaNameDotTableName.substring(schema.length() + 1);
TableSchema tableSchema = dbcpConnectionPoolTableInfo.describeTableForDatasource(datasource, schema, tableName);
return tableSchema.getFields().stream().map(field -> {
DefaultDatabaseMetadata meta = new DefaultDatabaseMetadata();
meta.setDatabaseName(schema);
meta.setColumnName(field.getName());
meta.setTableName(tableName);
return meta;
});
}).collect(Collectors.toList());
}
return Response.ok(tables).build();
}, MetadataAccess.SERVICE);
}
Aggregations