use of com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider in project kylo by Teradata.
the class DatasourceController method previewQuery.
/**
* Executes a query on the specified datasource.
*
* @param idStr the datasource id
* @param query the SQL query
* @return the SQL result
*/
@POST
@Path("{id}/preview/{schema}/{table}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("Executes a preview query appropriate for the type of datasource and returns the result.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the result.", response = QueryResult.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 400, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response previewQuery(@PathParam("id") final String idStr, @PathParam("schema") final String schema, @PathParam("table") final String tableName, @QueryParam("limit") @DefaultValue("10") final int limit) {
// Verify user has access to data source
final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> {
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(idStr));
return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId);
});
// Execute query
return metadata.read(() -> {
final QueryResult result = id.map(datasourceProvider::getDatasource).map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)).filter(JdbcDatasource.class::isInstance).map(JdbcDatasource.class::cast).map(datasource -> dbcpConnectionPoolTableInfo.executePreviewQueryForDatasource(datasource, schema, tableName, limit)).orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr));
return Response.ok(result).build();
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider in project kylo by Teradata.
the class DatasourceController method describeTable.
/**
* Gets the schema of the specified table using the specified data source.
*
* @param idStr the data source id
* @param tableName the table name
* @param schema the schema name, or {@code null} to search all schemas
* @return the table and field details
*/
@GET
@Path("{id}/tables/{tableName}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Gets the schema of the specified table.", notes = "Connects to the database specified by the data source.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the table schema.", response = TableSchema.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response describeTable(@PathParam("id") final String idStr, @PathParam("tableName") final String tableName, @QueryParam("schema") final String schema) {
// Verify user has access to data source
final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> {
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(idStr));
return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId);
});
// Retrieve table description using system user
return metadata.read(() -> {
final TableSchema tableSchema = id.map(datasourceProvider::getDatasource).map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)).filter(JdbcDatasource.class::isInstance).map(JdbcDatasource.class::cast).map(datasource -> dbcpConnectionPoolTableInfo.describeTableForDatasource(datasource, schema, tableName)).orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr));
return Response.ok(tableSchema).build();
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider in project kylo by Teradata.
the class DatasourceController method getTablesAndColumns.
/**
* Gets the tables and their columns from the specified data source for given schema
*
* @param idStr the data source id
* @param schema the schema name, or {@code null} for all schemas
* @return the list of tables and their columns
*/
@GET
@Path("{id}/table-columns")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Gets the tables and their columns from the data source for given schema", notes = "Connects to the database specified by the data source.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns tables and columns", response = String.class, responseContainer = "List"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response getTablesAndColumns(@PathParam("id") final String idStr, @QueryParam("schema") final String schema) {
// Verify user has access to data source
final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> {
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(idStr));
return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId);
});
// Retrieve table names using system user
return metadata.read(() -> {
JdbcDatasource datasource = id.map(datasourceProvider::getDatasource).map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)).filter(JdbcDatasource.class::isInstance).map(JdbcDatasource.class::cast).orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr));
List<String> tableNamesForDatasource = dbcpConnectionPoolTableInfo.getTableNamesForDatasource(datasource, schema, null);
List<DatabaseMetadata> tables = new ArrayList<>();
if (tableNamesForDatasource != null) {
tables = tableNamesForDatasource.stream().flatMap(schemaNameDotTableName -> {
String tableName = schemaNameDotTableName.substring(schema.length() + 1);
TableSchema tableSchema = dbcpConnectionPoolTableInfo.describeTableForDatasource(datasource, schema, tableName);
return tableSchema.getFields().stream().map(field -> {
DefaultDatabaseMetadata meta = new DefaultDatabaseMetadata();
meta.setDatabaseName(schema);
meta.setColumnName(field.getName());
meta.setTableName(tableName);
return meta;
});
}).collect(Collectors.toList());
}
return Response.ok(tables).build();
}, MetadataAccess.SERVICE);
}
use of com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider in project kylo by Teradata.
the class DerivedDatasourceFactory method ensureDataTransformationSourceDatasources.
/**
* Builds the list of data sources for the specified data transformation feed.
*
* @param feed the feed
* @return the list of data sources
* @throws NullPointerException if the feed has no data transformation
*/
@Nonnull
private Set<Datasource.ID> ensureDataTransformationSourceDatasources(@Nonnull final FeedMetadata feed) {
final Set<String> dataSetIds = new HashSet<>();
final Set<Datasource.ID> datasources = new HashSet<>();
if (feed.getSourceDataSets() != null) {
List<String> datasetIds = feed.getSourceDataSets().stream().map(ds -> ds.getId()).collect(Collectors.toList());
dataSetIds.addAll(datasetIds);
}
final List<String> catalogSources = feed.getDataTransformation().getCatalogDataSourceIds() != null ? feed.getDataTransformation().getCatalogDataSourceIds() : new ArrayList<>();
// Extract nodes in chart view model
@SuppressWarnings("unchecked") final Stream<Map<String, Object>> nodes = Optional.ofNullable(feed.getDataTransformation().getChartViewModel()).map(model -> (List<Map<String, Object>>) model.get("nodes")).map(Collection::stream).orElse(Stream.empty());
// Create a data source for each node
final DatasourceDefinition hiveDefinition = datasourceDefinitionProvider.findByProcessorType(DATA_TRANSFORMATION_HIVE_DEFINITION);
final DatasourceDefinition jdbcDefinition = datasourceDefinitionProvider.findByProcessorType(DATA_TRANSFORMATION_JDBC_DEFINITION);
nodes.forEach(node -> {
// Filter data sets
if (!StringUtils.equalsAnyIgnoreCase((String) node.get("datasourceId"), null, "HIVE") && node.get("dataset") != null && !Objects.equals(node.get("datasetMatchesUserDataSource"), Boolean.TRUE)) {
dataSetIds.add((String) node.get("datasourceId"));
return;
}
// Extract properties from node
DatasourceDefinition datasourceDefinition = null;
final Map<String, String> properties = new HashMap<>();
String userDatasourceId = (String) node.get("datasourceId");
if ((userDatasourceId == null && node.get("dataset") == null) || (userDatasourceId != null && userDatasourceId.equalsIgnoreCase("HIVE"))) {
final String name = (String) node.get("name");
datasourceDefinition = hiveDefinition;
properties.put(HIVE_SCHEMA_KEY, StringUtils.trim(StringUtils.substringBefore(name, ".")));
properties.put(HIVE_TABLE_KEY, StringUtils.trim(StringUtils.substringAfterLast(name, ".")));
} else if (userDatasourceId != null) {
final Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(userDatasourceId));
if (datasource != null) {
datasourceDefinition = jdbcDefinition;
properties.put(JDBC_CONNECTION_KEY, datasource.getName());
properties.put(JDBC_TABLE_KEY, (String) node.get("name"));
properties.putAll(parseDataTransformControllerServiceProperties(datasourceDefinition, datasource.getName()));
}
}
if (datasourceDefinition != null) {
// Create the derived data source
final String identityString = propertyExpressionResolver.resolveVariables(datasourceDefinition.getIdentityString(), properties);
final String title = datasourceDefinition.getTitle() != null ? propertyExpressionResolver.resolveVariables(datasourceDefinition.getTitle(), properties) : identityString;
final String desc = propertyExpressionResolver.resolveVariables(datasourceDefinition.getDescription(), properties);
final DerivedDatasource datasource = datasourceProvider.ensureDerivedDatasource(datasourceDefinition.getDatasourceType(), identityString, title, desc, new HashMap<>(properties));
datasources.add(datasource.getId());
}
});
// Build the data sources from the data source ids
if (dataSetIds.isEmpty()) {
final List<String> datasourceIds = Optional.ofNullable(feed.getDataTransformation()).map(FeedDataTransformation::getDatasourceIds).orElse(Collections.emptyList());
datasourceIds.stream().filter(id -> !dataSetIds.contains(id) && !catalogSources.contains(id)).map(datasourceProvider::resolve).forEach(datasources::add);
}
return datasources;
}
use of com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider in project kylo by Teradata.
the class DatasourceController method getTableNames.
/**
* Gets the table names from the specified data source.
*
* @param idStr the data source id
* @param schema the schema name, or {@code null} for all schemas
* @return the list of table names
*/
@GET
@Path("{id}/tables")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Gets the table names from the data source.", notes = "Connects to the database specified by the data source.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the table names.", response = String.class, responseContainer = "List"), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response getTableNames(@PathParam("id") final String idStr, @QueryParam("schema") final String schema, @QueryParam("tableName") final String tableName) {
// Verify user has access to data source
final Optional<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> id = metadata.read(() -> {
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(datasourceProvider.resolve(idStr));
return Optional.ofNullable(datasource).map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId);
});
// Retrieve table names using system user
return metadata.read(() -> {
final List<String> tables = id.map(datasourceProvider::getDatasource).map(ds -> datasourceTransform.toDatasource(ds, DatasourceModelTransform.Level.ADMIN)).filter(JdbcDatasource.class::isInstance).map(JdbcDatasource.class::cast).map(datasource -> dbcpConnectionPoolTableInfo.getTableNamesForDatasource(datasource, schema, tableName)).orElseThrow(() -> new NotFoundException("No JDBC datasource exists with the given ID: " + idStr));
return Response.ok(tables).build();
}, MetadataAccess.SERVICE);
}
Aggregations