use of com.thinkbiganalytics.spark.model.TransformResult in project kylo by Teradata.
the class TransformService method submitTransformJob.
/**
* Submits the specified task to be executed and returns the result.
*/
@Nonnull
private TransformResponse submitTransformJob(@Nonnull final Supplier<TransformResult> task, @Nullable final FieldPolicy[] policies) throws ScriptException {
log.entry(task, policies);
// Prepare script
Supplier<TransformResult> result = task;
if (policies != null && policies.length > 0 && validator != null) {
result = Suppliers.compose(new ValidationStage(policies, validator), result);
}
if (profiler != null) {
result = Suppliers.compose(new ProfileStage(profiler), result);
}
// Execute script
final String table = newTableName();
final TransformJob job = new TransformJob(table, Suppliers.compose(new ResponseStage(table), result), engine.getSparkContext());
tracker.submitJob(job);
// Build response
TransformResponse response;
try {
response = job.get(500, TimeUnit.MILLISECONDS);
tracker.removeJob(table);
} catch (final ExecutionException cause) {
throw log.throwing(new ScriptException(cause));
} catch (final InterruptedException | TimeoutException e) {
log.trace("Timeout waiting for script result", e);
response = new TransformResponse();
response.setProgress(0.0);
response.setStatus(TransformResponse.Status.PENDING);
response.setTable(table);
}
return log.exit(response);
}
use of com.thinkbiganalytics.spark.model.TransformResult in project kylo by Teradata.
the class TransformService method submitTransformJob.
/**
* Submits the specified task to be executed and returns the result.
*/
@Nonnull
private TransformResponse submitTransformJob(final Supplier<TransformResult> task, @Nonnull final TransformRequest request) throws ScriptException {
final FieldPolicy[] policies = getPolicies(request);
final PageSpec pageSpec = request.getPageSpec();
log.entry(task, policies);
// Prepare script
Supplier<TransformResult> result = task;
if (request.isDoValidate() && policies != null && policies.length > 0 && validator != null) {
result = Suppliers.compose(new ValidationStage(policies, validator), result);
}
if (request.isDoProfile() && profiler != null) {
result = Suppliers.compose(new ProfileStage(profiler), result);
}
return submitTransformJob(result, pageSpec);
}
use of com.thinkbiganalytics.spark.model.TransformResult in project kylo by Teradata.
the class ShellTransformStage method get.
@Override
public TransformResult get() {
final TransformResult result = new TransformResult();
result.setDataSet(dataSet.persist(StorageLevel.MEMORY_ONLY()));
result.setColumns(Arrays.<QueryResultColumn>asList(new QueryResultRowTransform(result.getDataSet().schema(), "", converterService).columns()));
return result;
}
use of com.thinkbiganalytics.spark.model.TransformResult in project kylo by Teradata.
the class SqlTransformStage method extractData.
@Override
public TransformResult extractData(@Nonnull final ResultSet rs) throws SQLException {
final ResultSetMetaData metaData = rs.getMetaData();
final TransformResult result = new TransformResult();
final StructType schema = extractSchema(metaData, result);
// Create data set
final Function0<Connection> getConnection = ScalaUtil.wrap(Suppliers.compose(JdbcUtil.getDataSourceConnection(), dataSource));
final Function1<ResultSet, Row> mapRow = ScalaUtil.wrap(new RowTransform());
// noinspection RedundantCast,unchecked
final ClassTag<Row> classTag = (ClassTag) ClassTag$.MODULE$.apply(Row.class);
final RDD<Row> rdd = new JdbcRDD<Row>(sqlContext.sparkContext(), getConnection, "SELECT * FROM (" + sql + ") rdd WHERE ? = ?", 1, 1, 1, mapRow, classTag);
result.setDataSet(sparkContextService.toDataSet(sqlContext, rdd.toJavaRDD(), schema));
return result;
}
Aggregations