use of com.thinkbiganalytics.hive.exceptions.ThriftConnectionException in project kylo by Teradata.
the class DataSourceController method createJdbcTableDataSet.
/**
* Gets the schema of the specified table using the specified data source.
*
* @param dataSourceId the data source id
* @param tableName the table name
* @param schema the schema name, or {@code null} to search all schemas
* @return the table and field details
*/
@POST
@Path("{id}/tables/{tableName}/dataset")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation(value = "Gets the schema of the specified table.", notes = "Connects to the database specified by the data source.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the table schema.", response = DataSetWithTableSchema.class), @ApiResponse(code = 403, message = "Access denied.", response = RestResponseStatus.class), @ApiResponse(code = 404, message = "A JDBC data source with that id does not exist.", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "NiFi or the database are unavailable.", response = RestResponseStatus.class) })
public Response createJdbcTableDataSet(@PathParam("id") final String dataSourceId, @PathParam("tableName") final String tableName, @QueryParam("schema") final String schema) {
// TODO Verify user has access to data source
// Require admin permission if the results should include unencrypted credentials.
final boolean encryptCredentials = true;
accessController.checkPermission(AccessController.SERVICES, encryptCredentials ? FeedServicesAccessControl.ACCESS_DATASOURCES : FeedServicesAccessControl.ADMIN_DATASOURCES);
DataSetWithTableSchema dataSetWithTableSchema = null;
boolean hasAccess = false;
try {
// ensure the user can read teh datasource
DataSource encryptedSource = metadataService.read(() -> {
return findDataSource(dataSourceId, true);
});
hasAccess = encryptedSource != null;
if (hasAccess) {
// fetch the datasource as a service account to get the creds.
DataSource dataSource = metadataService.read(() -> {
return findDataSource(dataSourceId, false);
}, MetadataAccess.SERVICE);
// describe the datasource and table
CatalogTableSchema tableSchema = tableManager.describeTable(dataSource, schema, tableName);
if (tableSchema != null) {
DataSet dataSet = new DataSet();
// assign the datasource to this dataset with encrypted credentials
dataSet.setDataSource(encryptedSource);
String fullTableName = (tableSchema.getTable() != null) ? tableSchema.getTable().getQualifiedIdentifier() : HiveUtils.quoteIdentifier(tableSchema.getSchemaName(), tableSchema.getName());
dataSet.setTitle(tableSchema.getSchemaName() + "." + tableSchema.getName());
DefaultDataSetTemplate defaultDataSetTemplate = DataSetUtil.mergeTemplates(dataSet);
List<String> paths = defaultDataSetTemplate.getPaths();
String format = defaultDataSetTemplate.getFormat();
Map<String, String> options = defaultDataSetTemplate.getOptions();
if (options == null) {
options = new HashMap<>();
}
if ("hive".equalsIgnoreCase(format.toLowerCase())) {
if (paths == null) {
paths = new ArrayList<>();
}
paths.add(fullTableName);
}
options.put("dbtable", fullTableName);
dataSet.setFormat(format);
dataSet.setPaths(paths);
dataSet.setOptions(options);
DataSet dataSet1 = dataSetService.findOrCreateDataSet(dataSet, encryptCredentials);
dataSetWithTableSchema = new DataSetWithTableSchema(dataSet1, tableSchema);
} else {
if (log.isErrorEnabled()) {
log.error("Failed to describe tables for schema [" + schema + "], table [" + tableName + "], dataSource [" + dataSourceId + "] ");
}
final RestResponseStatus status = new RestResponseStatus.ResponseStatusBuilder().message(getMessage("catalog.datasource.describeTable.error", tableName, schema)).url(request.getRequestURI()).buildError();
throw new InternalServerErrorException(Response.serverError().entity(status).build());
}
// });
} else {
// no acceess
final RestResponseStatus status = new RestResponseStatus.ResponseStatusBuilder().message(getMessage("catalog.datasource.forbidden")).url(request.getRequestURI()).buildError();
throw new InternalServerErrorException(Response.serverError().entity(status).build());
}
} catch (Exception e) {
if (exceptionTransformer.causesInChain(e)) {
throw new ThriftConnectionException(e);
}
final RestResponseStatus status = new RestResponseStatus.ResponseStatusBuilder().message(getMessage("catalog.datasource.describeTable.error", tableName, schema)).url(request.getRequestURI()).buildError();
throw new InternalServerErrorException(Response.serverError().entity(status).build());
}
return Response.ok(dataSetWithTableSchema).build();
}
use of com.thinkbiganalytics.hive.exceptions.ThriftConnectionException in project kylo by Teradata.
the class DataSourceController method listCredentials.
@GET
@Path("{id}/credentials")
@ApiOperation("List credentials for a data source")
@ApiResponses({ @ApiResponse(code = 200, message = "List of credentials", response = DataSetTable.class, responseContainer = "List"), @ApiResponse(code = 404, message = "Data source does not exist", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Failed to list credentials", response = RestResponseStatus.class) })
public Response listCredentials(@PathParam("id") final String dataSourceId) {
final boolean encrypted = true;
log.entry(dataSourceId, encrypted);
log.debug("List tables for catalog:{} encrypted:{}", encrypted);
try {
final DataSource dataSource = findDataSource(dataSourceId, true);
final Set<Principal> principals = SecurityContextUtil.getCurrentPrincipals();
final Map<String, String> credProps = this.credentialManager.getCredentials(dataSource, encrypted, principals);
DataSourceCredentials credentials = new DataSourceCredentials(credProps, encrypted);
return Response.ok(log.exit(credentials)).build();
} catch (final Exception e) {
if (exceptionTransformer.causesInChain(e)) {
throw new ThriftConnectionException(e);
}
log.error("Failed to retrieve credentials for datasource [{}] encrypted={}: " + e, dataSourceId, encrypted, e);
final RestResponseStatus status = new RestResponseStatus.ResponseStatusBuilder().message(getMessage("catalog.datasource.credentials.error", dataSourceId)).url(request.getRequestURI()).setDeveloperMessage(e).buildError();
throw new InternalServerErrorException(Response.serverError().entity(status).build());
}
}
use of com.thinkbiganalytics.hive.exceptions.ThriftConnectionException in project kylo by Teradata.
the class DataSourceController method doListFiles.
private List<DataSetFile> doListFiles(@QueryParam("path") String path, DataSource dataSource) {
final List<DataSetFile> files;
try {
log.debug("Listing files at path: {}", path);
files = fileManager.listFiles(path, dataSource);
} catch (final AccessDeniedException e) {
log.debug("Access denied accessing path: {}: {}", path, e, e);
throw new ForbiddenException(getMessage("catalog.datasource.listFiles.forbidden", path));
} catch (final CatalogException e) {
log.debug("Catalog exception when accessing path: {}: {}", path, e, e);
throw new BadRequestException(getMessage(e));
} catch (final Exception e) {
if (exceptionTransformer.causesInChain(e)) {
throw new ThriftConnectionException(e);
}
if (log.isErrorEnabled()) {
log.error("Failed to list data source files at path " + path + ": " + e, e);
}
final RestResponseStatus status = new RestResponseStatus.ResponseStatusBuilder().message(getMessage("catalog.datasource.listFiles.error", path)).url(request.getRequestURI()).setDeveloperMessage(e).buildError();
throw new InternalServerErrorException(Response.serverError().entity(status).build());
}
return files;
}
use of com.thinkbiganalytics.hive.exceptions.ThriftConnectionException in project kylo by Teradata.
the class DataSourceController method doListTables.
private List<DataSetTable> doListTables(@QueryParam("catalog") String catalogName, @QueryParam("schema") String schemaName, DataSource dataSource) {
final List<DataSetTable> tables;
try {
log.debug("List tables for catalog:{} schema:{}", catalogName, schemaName);
tables = tableManager.listCatalogsOrTables(dataSource, catalogName, schemaName);
} catch (final Exception e) {
if (exceptionTransformer.causesInChain(e)) {
throw new ThriftConnectionException(e);
}
if (log.isErrorEnabled()) {
log.error("Failed to list tables for catalog [" + catalogName + "] schema [" + schemaName + "]: " + e, e);
}
final RestResponseStatus status = new RestResponseStatus.ResponseStatusBuilder().message(getMessage("catalog.datasource.listTables.error", catalogName, schemaName)).url(request.getRequestURI()).setDeveloperMessage(e).buildError();
throw new InternalServerErrorException(Response.serverError().entity(status).build());
}
return tables;
}
Aggregations