use of com.thinkbiganalytics.kylo.catalog.rest.model.DataSource in project kylo by Teradata.
the class DataSourceController method listCredentials.
@GET
@Path("{id}/credentials")
@ApiOperation("List credentials for a data source")
@ApiResponses({ @ApiResponse(code = 200, message = "List of credentials", response = DataSetTable.class, responseContainer = "List"), @ApiResponse(code = 404, message = "Data source does not exist", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Failed to list credentials", response = RestResponseStatus.class) })
public Response listCredentials(@PathParam("id") final String dataSourceId) {
final boolean encrypted = true;
log.entry(dataSourceId, encrypted);
log.debug("List tables for catalog:{} encrypted:{}", encrypted);
try {
final DataSource dataSource = findDataSource(dataSourceId, true);
final Set<Principal> principals = SecurityContextUtil.getCurrentPrincipals();
final Map<String, String> credProps = this.credentialManager.getCredentials(dataSource, encrypted, principals);
DataSourceCredentials credentials = new DataSourceCredentials(credProps, encrypted);
return Response.ok(log.exit(credentials)).build();
} catch (final Exception e) {
if (exceptionTransformer.causesInChain(e)) {
throw new ThriftConnectionException(e);
}
log.error("Failed to retrieve credentials for datasource [{}] encrypted={}: " + e, dataSourceId, encrypted, e);
final RestResponseStatus status = new RestResponseStatus.ResponseStatusBuilder().message(getMessage("catalog.datasource.credentials.error", dataSourceId)).url(request.getRequestURI()).setDeveloperMessage(e).buildError();
throw new InternalServerErrorException(Response.serverError().entity(status).build());
}
}
use of com.thinkbiganalytics.kylo.catalog.rest.model.DataSource in project kylo by Teradata.
the class DataSourceController method createDataSource.
@POST
@ApiOperation("Create a new data source")
@ApiResponses({ @ApiResponse(code = 200, message = "Data source created", response = DataSource.class), @ApiResponse(code = 400, message = "Invalid connector", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Internal server error", response = RestResponseStatus.class) })
@Consumes(MediaType.APPLICATION_JSON)
public Response createDataSource(@Nonnull final CreateDataSourceEntity source) {
log.entry(source);
final DataSource dataSource;
try {
// TODO: Remove this check for the ID and force updates to use the PUT to updateDataSource() for a more typical REST API
if (StringUtils.isNotEmpty(source.getId())) {
return updateDataSource(source);
} else {
try {
dataSource = dataSourceService.createDataSource(source, source.isDetectSimilarNiFiControllerServices());
} catch (final CatalogException e) {
if (log.isDebugEnabled()) {
log.debug("Cannot create data source from request: " + source, e);
}
throw new BadRequestException(getMessage(e));
}
}
} catch (PotentialControllerServiceConflictException e) {
throw new WebApplicationException(Response.status(Status.CONFLICT).entity(e.getControllerServiceConflictEntity()).build());
} catch (DataSourceAlreadyExistsException e) {
throw new BadRequestException(e.getMessage());
}
return Response.ok(log.exit(dataSource)).build();
}
use of com.thinkbiganalytics.kylo.catalog.rest.model.DataSource in project kylo by Teradata.
the class DataSourceController method listTables.
@GET
@Path("{id}/tables")
@ApiOperation("List tables in a data source")
@ApiResponses({ @ApiResponse(code = 200, message = "List of tables", response = DataSetTable.class, responseContainer = "List"), @ApiResponse(code = 404, message = "Data source does not exist", response = RestResponseStatus.class), @ApiResponse(code = 500, message = "Failed to list tables", response = RestResponseStatus.class) })
public Response listTables(@PathParam("id") final String dataSourceId, @QueryParam("catalog") final String catalogName, @QueryParam("schema") final String schemaName) {
log.entry(dataSourceId, catalogName, schemaName);
// read as the user to see if they can access the datasource
boolean hasAccess = metadataService.read(() -> {
return findDataSource(dataSourceId, true) != null;
});
if (hasAccess) {
return metadataService.read(() -> {
// List tables
final DataSource dataSource = findDataSource(dataSourceId, false);
final List<DataSetTable> tables = doListTables(catalogName, schemaName, dataSource);
return Response.ok(log.exit(tables)).build();
}, MetadataAccess.SERVICE);
} else {
log.debug("Access denied accessing datasource : {}", dataSourceId);
throw new ForbiddenException(getMessage("catalog.datasource.forbidden"));
}
}
use of com.thinkbiganalytics.kylo.catalog.rest.model.DataSource in project kylo by Teradata.
the class TransformService method createShellTask.
/**
* Creates a new Spark shell transformation.
*/
@Nonnull
private DataSet createShellTask(@Nonnull final TransformRequest request) throws ScriptException {
log.entry(request);
// Build bindings list
final List<NamedParam> bindings = new ArrayList<>();
bindings.add(new NamedParamClass("sparkContextService", SparkContextService.class.getName(), sparkContextService));
if ((request.getDatasources() != null && !request.getDatasources().isEmpty()) || (request.getCatalogDataSources() != null && !request.getCatalogDataSources().isEmpty())) {
if (datasourceProviderFactory != null) {
List<Datasource> legacyDataSources = request.getDatasources() != null ? request.getDatasources() : new ArrayList<Datasource>();
List<DataSource> catalogDataSources = request.getCatalogDataSources() != null ? request.getCatalogDataSources() : new ArrayList<DataSource>();
final DatasourceProvider datasourceProvider = datasourceProviderFactory.getDatasourceProvider(legacyDataSources, catalogDataSources);
bindings.add(new NamedParamClass("datasourceProvider", DatasourceProvider.class.getName() + "[org.apache.spark.sql.DataFrame]", datasourceProvider));
} else {
throw log.throwing(new ScriptException("Script cannot be executed because no data source provider factory is available."));
}
}
if (request.getCatalogDatasets() != null && !request.getCatalogDatasets().isEmpty()) {
if (catalogDataSetProviderFactory != null) {
log.info("Creating new Shell task with {} data sets ", request.getCatalogDatasets().size());
final CatalogDataSetProvider catalogDataSetProvider = catalogDataSetProviderFactory.getDataSetProvider(request.getCatalogDatasets());
bindings.add(new NamedParamClass("catalogDataSetProvider", CatalogDataSetProvider.class.getName() + "[org.apache.spark.sql.DataFrame]", catalogDataSetProvider));
} else {
throw log.throwing(new ScriptException("Script cannot be executed because no data source provider factory is available."));
}
}
// Ensure SessionState is valid
if (SessionState.get() == null && sessionState != null) {
SessionState.setCurrentSessionState(sessionState);
}
// Execute script
final Object result;
try {
result = this.engine.eval(toTransformScript(request), bindings);
} catch (final Exception cause) {
throw log.throwing(new ScriptException(cause));
}
if (result instanceof DataSet) {
return log.exit((DataSet) result);
} else {
throw log.throwing(new IllegalStateException("Unexpected script result type: " + (result != null ? result.getClass() : null)));
}
}
use of com.thinkbiganalytics.kylo.catalog.rest.model.DataSource in project kylo by Teradata.
the class DataSourceProvider method toDataSource.
/**
* Converts a feed data source to a REST data source.
*/
@Nullable
@SuppressWarnings("squid:S3655")
private DataSource toDataSource(@Nullable final Datasource feedDataSource, @Nonnull final DatasourceModelTransform.Level level) {
// Transform to metadata data source
final com.thinkbiganalytics.metadata.rest.model.data.Datasource metadataDataSource;
if (feedDataSource != null) {
metadataDataSource = feedDataSourceTransform.toDatasource(feedDataSource, level);
} else {
return null;
}
// Add properties to data source
final DataSource dataSource = new DataSource();
dataSource.setId(metadataDataSource.getId());
dataSource.setTitle(metadataDataSource.getName());
// Set properties based on type
final Connector connector = new Connector();
final DefaultDataSetTemplate template;
if (metadataDataSource instanceof JdbcDatasource && getJdbcConnectorId().isPresent()) {
connector.setId(getJdbcConnectorId().get());
template = createTemplate((JdbcDatasource) metadataDataSource);
} else {
return null;
}
dataSource.setConnector(connector);
dataSource.setTemplate(template);
return dataSource;
}
Aggregations