use of com.thinkbiganalytics.spark.rest.model.JdbcDatasource in project kylo by Teradata.
the class AbstractDatasourceProvider method getTableFromDatasource.
@Nonnull
@Override
public final T getTableFromDatasource(@Nonnull final String table, @Nonnull final Datasource datasource, @Nonnull final SQLContext sqlContext) {
if (datasource instanceof JdbcDatasource) {
final JdbcDatasource jdbcDatasource = (JdbcDatasource) datasource;
final Properties properties = new Properties();
properties.put("driver", jdbcDatasource.getDatabaseDriverClassName());
if (StringUtils.isNotBlank(jdbcDatasource.getDatabaseUser())) {
properties.put("user", jdbcDatasource.getDatabaseUser());
}
if (StringUtils.isNotBlank(jdbcDatasource.getPassword())) {
properties.put("password", jdbcDatasource.getPassword());
}
return readJdbcTable(jdbcDatasource.getDatabaseConnectionUrl(), table, properties, sqlContext);
} else {
throw new IllegalArgumentException("Datasource does not provide tables: " + datasource);
}
}
use of com.thinkbiganalytics.spark.rest.model.JdbcDatasource in project kylo by Teradata.
the class TransformService method saveSql.
/**
* Executes and saves a Spark SQL request.
*/
@Nonnull
public SaveResponse saveSql(@Nonnull final String id, @Nonnull final SaveRequest save) {
log.entry(id, save);
// Create task
final Supplier<SaveResult> task;
final TransformRequest transform = getTransformRequest(id);
final JdbcDatasource transformDatasource = (transform.getDatasources() != null && transform.getDatasources().size() == 1 && transform.getDatasources().get(0) instanceof JdbcDatasource) ? (JdbcDatasource) transform.getDatasources().get(0) : null;
if (transformDatasource != null && save.getJdbc() != null && Objects.equal(transformDatasource.getId(), save.getJdbc().getId())) {
Preconditions.checkArgument(save.getTableName() != null, "Missing target table name.");
task = new SaveSqlStage(save.getTableName(), transform.getScript(), save.getJdbc());
} else {
task = createSaveTask(save, createSqlTask(transform));
}
// Submit job
final SaveResponse response = submitSaveJob(task);
return log.exit(response);
}
use of com.thinkbiganalytics.spark.rest.model.JdbcDatasource in project kylo by Teradata.
the class SparkShellProxyController method resolveDatasources.
/**
* Retrieves all details of the specified data sources.
*/
@Nonnull
private List<Datasource> resolveDatasources(@Nonnull final List<Datasource> sources) {
// Verify access to data sources
accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
final List<com.thinkbiganalytics.metadata.api.datasource.Datasource.ID> datasourceIds = metadata.read(() -> sources.stream().map(com.thinkbiganalytics.metadata.datasource.Datasource::getId).map(datasourceProvider::resolve).map(id -> {
final com.thinkbiganalytics.metadata.api.datasource.Datasource datasource = datasourceProvider.getDatasource(id);
if (datasource != null) {
return datasource.getId();
} else {
throw new BadRequestException("No datasource exists with the given ID: " + id);
}
}).collect(Collectors.toList()));
// Retrieve admin-level details
return metadata.read(() -> datasourceIds.stream().map(datasourceProvider::getDatasource).map(datasource -> {
if (datasource instanceof com.thinkbiganalytics.metadata.api.datasource.UserDatasource) {
return (com.thinkbiganalytics.metadata.datasource.Datasource) datasourceTransform.toDatasource(datasource, DatasourceModelTransform.Level.ADMIN);
} else {
throw new BadRequestException("Not a supported datasource: " + datasource.getClass().getSimpleName() + " " + datasource.getId());
}
}).map(datasource -> {
if (datasource instanceof com.thinkbiganalytics.metadata.datasource.JdbcDatasource) {
return new JdbcDatasource((com.thinkbiganalytics.metadata.datasource.JdbcDatasource) datasource);
} else {
throw new BadRequestException("Not a supported datasource: " + datasource.getClass().getSimpleName());
}
}).collect(Collectors.toList()), MetadataAccess.SERVICE);
}
Aggregations