use of org.pentaho.di.core.sql.SQL in project pdi-dataservice-server-plugin by pentaho.
the class ExecutorQueryService method prepareQuery.
@Override
public Query prepareQuery(String sqlString, IDataServiceClientService.StreamingMode windowMode, long windowSize, long windowEvery, long windowLimit, final Map<String, String> parameters) throws KettleException {
SQL sql = new SQL(sqlString);
Query query;
try {
IMetaStore metaStore = metastoreLocator != null ? metastoreLocator.getMetastore() : null;
DataServiceExecutor executor = resolver.createBuilder(sql).rowLimit(0).windowMode(windowMode).windowSize(windowSize).windowEvery(windowEvery).windowLimit(windowLimit).parameters(parameters).metastore(metaStore).build();
query = new ExecutorQuery(executor);
} catch (Exception e) {
Throwables.propagateIfInstanceOf(e, KettleException.class);
throw new KettleException(e);
}
return query;
}
use of org.pentaho.di.core.sql.SQL in project pdi-dataservice-server-plugin by pentaho.
the class ExecutorQueryService method prepareQuery.
@Override
public Query prepareQuery(String sqlString, int maxRows, Map<String, String> parameters) throws KettleException {
SQL sql = new SQL(sqlString);
Query query;
try {
IMetaStore metaStore = metastoreLocator != null ? metastoreLocator.getMetastore() : null;
DataServiceExecutor executor = resolver.createBuilder(sql).rowLimit(maxRows).parameters(parameters).metastore(metaStore).build();
query = new ExecutorQuery(executor);
} catch (Exception e) {
Throwables.propagateIfInstanceOf(e, KettleException.class);
throw new KettleException(e);
}
return query;
}
use of org.pentaho.di.core.sql.SQL in project pdi-dataservice-server-plugin by pentaho.
the class DataServiceExecutorTest method testExecuteQueryNoResults.
@Test
public void testExecuteQueryNoResults() throws Exception {
SQL sql = new SQL("SELECT * FROM " + DATA_SERVICE_NAME);
StepInterface serviceStep = serviceTrans.findRunThread(DATA_SERVICE_STEP);
StepInterface resultStep = genTrans.findRunThread(RESULT_STEP_NAME);
when(serviceTrans.getTransMeta().listParameters()).thenReturn(new String[0]);
PushDownOptimizationMeta optimization = mock(PushDownOptimizationMeta.class);
when(optimization.isEnabled()).thenReturn(true);
dataService.getPushDownOptimizationMeta().add(optimization);
IMetaStore metastore = mock(IMetaStore.class);
DataServiceExecutor executor = new DataServiceExecutor.Builder(sql, dataService, context).serviceTrans(serviceTrans).sqlTransGenerator(sqlTransGenerator).genTrans(genTrans).metastore(metastore).build();
ArgumentCaptor<String> objectIds = ArgumentCaptor.forClass(String.class);
verify(serviceTrans).setContainerObjectId(objectIds.capture());
when(serviceTrans.getContainerObjectId()).thenReturn(objectIds.getValue());
verify(genTrans).setContainerObjectId(objectIds.capture());
when(genTrans.getContainerObjectId()).thenReturn(objectIds.getValue());
verify(serviceTrans).setMetaStore(metastore);
verify(genTrans).setMetaStore(metastore);
RowProducer sqlTransRowProducer = mock(RowProducer.class);
when(genTrans.addRowProducer(INJECTOR_STEP_NAME, 0)).thenReturn(sqlTransRowProducer);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
// Start Execution
executor.executeQuery(new DataOutputStream(outputStream));
// Check header was written
assertThat(outputStream.size(), greaterThan(0));
outputStream.reset();
InOrder genTransStartup = inOrder(genTrans, resultStep);
InOrder serviceTransStartup = inOrder(optimization, serviceTrans, serviceStep);
ArgumentCaptor<RowListener> listenerArgumentCaptor = ArgumentCaptor.forClass(RowListener.class);
ArgumentCaptor<StepListener> resultStepListener = ArgumentCaptor.forClass(StepListener.class);
ArgumentCaptor<TransListener> transListenerCaptor = ArgumentCaptor.forClass(TransListener.class);
genTransStartup.verify(genTrans).addTransListener(transListenerCaptor.capture());
genTransStartup.verify(genTrans).addRowProducer(INJECTOR_STEP_NAME, 0);
genTransStartup.verify(resultStep).addStepListener(resultStepListener.capture());
genTransStartup.verify(resultStep).addRowListener(listenerArgumentCaptor.capture());
RowListener clientRowListener = listenerArgumentCaptor.getValue();
genTransStartup.verify(genTrans).startThreads();
serviceTransStartup.verify(optimization).activate(executor);
serviceTransStartup.verify(serviceStep).addRowListener(listenerArgumentCaptor.capture());
serviceTransStartup.verify(serviceTrans).startThreads();
// Verify linkage
RowListener serviceRowListener = listenerArgumentCaptor.getValue();
assertNotNull(serviceRowListener);
// Push row from service to sql Trans
RowMetaInterface rowMeta = genTrans.getTransMeta().getStepFields(RESULT_STEP_NAME);
doReturn(true).when(serviceTrans).isRunning();
resultStepListener.getValue().stepFinished(genTrans, resultStep.getStepMeta(), resultStep);
verify(serviceTrans).stopAll();
// Verify Service Trans finished
ArgumentCaptor<StepListener> serviceStepListener = ArgumentCaptor.forClass(StepListener.class);
verify(serviceStep).addStepListener(serviceStepListener.capture());
serviceStepListener.getValue().stepFinished(serviceTrans, serviceStep.getStepMeta(), serviceStep);
verify(sqlTransRowProducer).finished();
// finish transformation, so that the listener runs
transListenerCaptor.getValue().transFinished(genTrans);
InOrder writeRows = inOrder(rowMeta);
ArgumentCaptor<DataOutputStream> streamCaptor = ArgumentCaptor.forClass(DataOutputStream.class);
writeRows.verify(rowMeta).writeMeta(streamCaptor.capture());
DataOutputStream dataOutputStream = streamCaptor.getValue();
writeRows.verify(rowMeta, times(0)).writeData(same(dataOutputStream), argThat(arrayWithSize(1)));
writeRows.verifyNoMoreInteractions();
executor.waitUntilFinished();
verify(serviceTrans).waitUntilFinished();
verify(genTrans).waitUntilFinished();
}
use of org.pentaho.di.core.sql.SQL in project pdi-dataservice-server-plugin by pentaho.
the class DataServiceExecutorTest method testTimeLimitAux.
private StreamingServiceTransExecutor testTimeLimitAux(long userLimit, long metaLimit, String kettleLimit) throws Exception {
when(genTrans.isFinishedOrStopped()).thenReturn(true);
SQL sql = new SQL("SELECT * FROM " + DATA_SERVICE_NAME);
when(serviceTrans.getTransMeta().listParameters()).thenReturn(new String[0]);
when(sqlTransGenerator.getSql()).thenReturn(sql);
System.setProperty(DataServiceConstants.TIME_LIMIT_PROPERTY, kettleLimit);
dataService.setStreaming(true);
dataService.setTimeLimit(metaLimit);
IMetaStore metastore = mock(IMetaStore.class);
DataServiceExecutor executor = new DataServiceExecutor.Builder(sql, dataService, context).serviceTrans(serviceTrans).sqlTransGenerator(sqlTransGenerator).genTrans(genTrans).metastore(metastore).timeLimit(userLimit).windowMode(IDataServiceClientService.StreamingMode.ROW_BASED).windowSize(1).windowEvery(0).windowLimit(0).build();
StreamingServiceTransExecutor exec = context.getServiceTransExecutor(dataService.getName());
context.removeServiceTransExecutor(dataService.getName());
return exec;
}
use of org.pentaho.di.core.sql.SQL in project pdi-dataservice-server-plugin by pentaho.
the class DataServiceExecutorTest method testStopMixedconditions.
@Test
public void testStopMixedconditions() throws KettleException {
String sql = "SELECT * FROM " + DATA_SERVICE_NAME;
when(serviceTrans.isRunning()).thenReturn(true);
when(genTrans.isRunning()).thenReturn(true);
DataServiceExecutor executor = new DataServiceExecutor.Builder(new SQL(sql), dataService, context).serviceTrans(serviceTrans).sqlTransGenerator(sqlTransGenerator).genTrans(genTrans).build();
when(serviceTrans.isRunning()).thenReturn(false);
when(genTrans.isRunning()).thenReturn(true);
executor.stop();
verify(serviceTrans, times(0)).stopAll();
verify(genTrans, times(1)).stopAll();
when(serviceTrans.isRunning()).thenReturn(true);
when(genTrans.isRunning()).thenReturn(false);
executor.stop();
verify(serviceTrans, times(1)).stopAll();
verify(genTrans, times(1)).stopAll();
}
Aggregations