use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class TransformService method cacheTransform.
/**
* Caches the specified transformation.
*/
@Nonnull
private TransformResponse cacheTransform(@Nonnull final TransformRequest request) {
final String id = newTableName();
transformCache.put(id, request);
final TransformResponse response = new TransformResponse();
response.setStatus(TransformResponse.Status.SUCCESS);
response.setTable(id);
return response;
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class TransformService method submitTransformJob.
/**
* Submits the specified task to be executed and returns the result.
*/
@Nonnull
private TransformResponse submitTransformJob(@Nonnull final Supplier<TransformResult> task, @Nullable final FieldPolicy[] policies) throws ScriptException {
log.entry(task, policies);
// Prepare script
Supplier<TransformResult> result = task;
if (policies != null && policies.length > 0 && validator != null) {
result = Suppliers.compose(new ValidationStage(policies, validator), result);
}
if (profiler != null) {
result = Suppliers.compose(new ProfileStage(profiler), result);
}
// Execute script
final String table = newTableName();
final TransformJob job = new TransformJob(table, Suppliers.compose(new ResponseStage(table), result), engine.getSparkContext());
tracker.submitJob(job);
// Build response
TransformResponse response;
try {
response = job.get(500, TimeUnit.MILLISECONDS);
tracker.removeJob(table);
} catch (final ExecutionException cause) {
throw log.throwing(new ScriptException(cause));
} catch (final InterruptedException | TimeoutException e) {
log.trace("Timeout waiting for script result", e);
response = new TransformResponse();
response.setProgress(0.0);
response.setStatus(TransformResponse.Status.PENDING);
response.setTable(table);
}
return log.exit(response);
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class SparkShellTransformControllerTest method createWithScriptException.
/**
* Verify response if a script exception is thrown.
*/
@Test
public void createWithScriptException() throws Exception {
// Create transform objects
TransformRequest request = new TransformRequest();
request.setScript("sqlContext.sql(\"SELECT * FROM invalid\")");
TransformService transformService = Mockito.mock(TransformService.class);
Mockito.when(transformService.execute(request)).thenThrow(new ScriptException("Invalid script"));
// Test script exception
SparkShellTransformController controller = new SparkShellTransformController();
controller.transformService = transformService;
Response response = controller.create(request);
Assert.assertEquals(Response.Status.INTERNAL_SERVER_ERROR, response.getStatusInfo());
TransformResponse entity = (TransformResponse) response.getEntity();
Assert.assertEquals("Invalid script", entity.getMessage());
Assert.assertEquals(TransformResponse.Status.ERROR, entity.getStatus());
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class TransformServiceTest method execute.
/**
* Verify executing a transformation request.
*/
@Test
@SuppressWarnings("unchecked")
public void execute() throws Exception {
// Mock data set
final DataSet dataSet = Mockito.mock(DataSet.class);
Mockito.when(dataSet.persist(Mockito.any(StorageLevel.class))).thenReturn(dataSet);
Mockito.when(dataSet.schema()).thenReturn(new StructType());
// Mock Spark context service
final SparkContextService sparkContextService = Mockito.mock(SparkContextService.class);
// Mock Spark script engine
final SparkScriptEngine engine = Mockito.mock(SparkScriptEngine.class);
Mockito.when(engine.eval(Mockito.anyString(), Mockito.anyListOf(NamedParam.class))).thenReturn(dataSet);
Mockito.when(engine.getSparkContext()).thenReturn(Mockito.mock(SparkContext.class));
// Test executing a request
final TransformRequest request = new TransformRequest();
request.setScript("sqlContext.range(1,10)");
final TransformService service = new TransformService(TransformScript.class, engine, sparkContextService, new MockJobTrackerService());
final TransformResponse response = service.execute(request);
Assert.assertEquals(TransformResponse.Status.PENDING, response.getStatus());
// Test eval arguments
final ArgumentCaptor<String> evalScript = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<List> evalBindings = ArgumentCaptor.forClass(List.class);
Mockito.verify(engine).eval(evalScript.capture(), evalBindings.capture());
String expectedScript = null;
try (InputStream stream = getClass().getResourceAsStream("transform-service-script1.scala")) {
expectedScript = IOUtils.toString(stream, "UTF-8");
}
if (expectedScript == null) {
throw new Exception("transform-service-script1.scala failed to load");
}
Assert.assertEquals(expectedScript, evalScript.getValue());
final List<NamedParam> bindings = evalBindings.getValue();
Assert.assertEquals(1, bindings.size());
Assert.assertEquals("sparkContextService", bindings.get(0).name());
Assert.assertEquals("com.thinkbiganalytics.spark.SparkContextService", bindings.get(0).tpe());
Assert.assertEquals(sparkContextService, bindings.get(0).value());
}
use of com.thinkbiganalytics.spark.rest.model.TransformResponse in project kylo by Teradata.
the class TransformServiceTest method executeWithDatasourceProviderFactory.
/**
* Verify executing a transformation request with a data source provider factory.
*/
@Test
@SuppressWarnings("unchecked")
public void executeWithDatasourceProviderFactory() throws Exception {
// Mock data set
final DataSet dataSet = Mockito.mock(DataSet.class);
Mockito.when(dataSet.persist(Mockito.any(StorageLevel.class))).thenReturn(dataSet);
Mockito.when(dataSet.schema()).thenReturn(new StructType());
// Mock Spark context service
final SparkContextService sparkContextService = Mockito.mock(SparkContextService.class);
// Mock Spark script engine
final SparkScriptEngine engine = Mockito.mock(SparkScriptEngine.class);
Mockito.when(engine.eval(Mockito.anyString(), Mockito.anyListOf(NamedParam.class))).thenReturn(dataSet);
Mockito.when(engine.getSparkContext()).thenReturn(Mockito.mock(SparkContext.class));
// Mock data source provider factory
final DatasourceProvider datasourceProvider = Mockito.mock(DatasourceProvider.class);
final DatasourceProviderFactory datasourceProviderFactory = Mockito.mock(DatasourceProviderFactory.class);
Mockito.when(datasourceProviderFactory.getDatasourceProvider(Mockito.anyCollectionOf(Datasource.class))).thenReturn(datasourceProvider);
// Mock profiler
final Profiler profiler = Mockito.mock(Profiler.class);
// Test executing a request
final TransformRequest request = new TransformRequest();
request.setDatasources(Collections.singletonList(Mockito.mock(Datasource.class)));
request.setScript("sqlContext.range(1,10)");
final TransformService service = new TransformService(TransformScript.class, engine, sparkContextService, new MockJobTrackerService());
service.setDatasourceProviderFactory(datasourceProviderFactory);
service.setProfiler(profiler);
final TransformResponse response = service.execute(request);
Assert.assertEquals(TransformResponse.Status.PENDING, response.getStatus());
// Test eval arguments
final ArgumentCaptor<String> evalScript = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<List> evalBindings = ArgumentCaptor.forClass(List.class);
Mockito.verify(engine).eval(evalScript.capture(), evalBindings.capture());
InputStream inputStream = getClass().getResourceAsStream("transform-service-script1.scala");
final String expectedScript = IOUtils.toString(inputStream, "UTF-8");
inputStream.close();
Assert.assertEquals(expectedScript, evalScript.getValue());
final List<NamedParam> bindings = evalBindings.getValue();
Assert.assertEquals(2, bindings.size());
Assert.assertEquals("sparkContextService", bindings.get(0).name());
Assert.assertEquals("com.thinkbiganalytics.spark.SparkContextService", bindings.get(0).tpe());
Assert.assertEquals(sparkContextService, bindings.get(0).value());
Assert.assertEquals("datasourceProvider", bindings.get(1).name());
Assert.assertEquals("com.thinkbiganalytics.spark.shell.DatasourceProvider[org.apache.spark.sql.DataFrame]", bindings.get(1).tpe());
Assert.assertEquals(datasourceProvider, bindings.get(1).value());
}
Aggregations