use of org.pentaho.di.trans.step.StepAdapter in project pdi-dataservice-server-plugin by pentaho.
the class DefaultTransWiring method run.
@Override
public void run() {
// This is where we will inject the rows from the service transformation step
//
final RowProducer rowProducer;
final Trans serviceTrans = dataServiceExecutor.getServiceTrans();
final Trans genTrans = dataServiceExecutor.getGenTrans();
try {
rowProducer = dataServiceExecutor.addRowProducer();
} catch (KettleException e) {
throw Throwables.propagate(e);
}
// Now connect the 2 transformations with listeners and injector
//
StepInterface serviceStep = serviceTrans.findRunThread(dataServiceExecutor.getService().getStepname());
if (serviceStep == null) {
throw Throwables.propagate(new KettleException("Service step is not accessible"));
}
serviceStep.addRowListener(new DefaultTransWiringRowAdapter(serviceTrans, genTrans, rowProducer));
// Let the other transformation know when there are no more rows
//
serviceStep.addStepListener(new StepAdapter() {
@Override
public void stepFinished(Trans trans, StepMeta stepMeta, StepInterface step) {
rowProducer.finished();
}
});
dataServiceExecutor.getGenTrans().findRunThread(dataServiceExecutor.getResultStepName()).addStepListener(new StepAdapter() {
@Override
public void stepFinished(Trans trans, StepMeta stepMeta, StepInterface step) {
if (serviceTrans.isRunning()) {
trans.getLogChannel().logBasic("Query finished, stopping service transformation");
serviceTrans.stopAll();
}
}
});
}
use of org.pentaho.di.trans.step.StepAdapter in project pdi-dataservice-server-plugin by pentaho.
the class ServiceObserverTest method verifyCachedRowIsCloned.
@Test
public void verifyCachedRowIsCloned() throws Exception {
when(stepInterface.isStopped()).thenReturn(false);
observer.run();
verify(stepInterface).addRowListener(rowAdapterCaptor.capture());
verify(stepInterface).addStepListener(stepAdapterCaptor.capture());
RowAdapter rowAdapter = rowAdapterCaptor.getValue();
StepAdapter stepAdapter = stepAdapterCaptor.getValue();
Object[] clonedRow = new Object[0];
when(rowMeta.cloneRow(row)).thenReturn(clonedRow);
rowAdapter.rowWrittenEvent(rowMeta, row);
verify(rowMeta).cloneRow(row);
stepAdapter.stepFinished(null, null, stepInterface);
CachedService cachedService = observer.get();
assertThat(cachedService.getRowMetaAndData().get(0).getData(), is(clonedRow));
}
use of org.pentaho.di.trans.step.StepAdapter in project pdi-dataservice-server-plugin by pentaho.
the class ServiceObserver method run.
@Override
public void run() {
StepInterface serviceStep = executor.getServiceTrans().findRunThread(executor.getService().getStepname());
serviceStep.addRowListener(new RowAdapter() {
@Override
public synchronized void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) {
Object[] clonedRow;
try {
clonedRow = rowMeta.cloneRow(row);
} catch (KettleValueException e) {
setException(e);
return;
}
rowMetaAndData.add(new RowMetaAndData(rowMeta, clonedRow));
latch.countDown();
}
});
serviceStep.addStepListener(new StepAdapter() {
@Override
public void stepFinished(Trans trans, StepMeta stepMeta, StepInterface step) {
isRunning = false;
if (executor.getGenTrans().getErrors() > 0) {
setException(new KettleException("Dynamic transformation finished with errors, could not cache results"));
} else if (step.isStopped()) {
set(CachedService.partial(rowMetaAndData, executor));
} else {
set(CachedService.complete(rowMetaAndData));
}
}
});
}
use of org.pentaho.di.trans.step.StepAdapter in project pentaho-kettle by pentaho.
the class Trans method startThreads.
/**
* Starts the threads prepared by prepareThreads(). Before you start the threads, you can add RowListeners to them.
*
* @throws KettleException if there is a communication error with a remote output socket.
*/
public void startThreads() throws KettleException {
// Now prepare to start all the threads...
//
nrOfFinishedSteps = 0;
nrOfActiveSteps = 0;
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.TransformationStartThreads.id, this);
fireTransStartedListeners();
for (int i = 0; i < steps.size(); i++) {
final StepMetaDataCombi sid = steps.get(i);
sid.step.markStart();
sid.step.initBeforeStart();
// also attach a Step Listener to detect when we're done...
//
StepListener stepListener = new StepListener() {
@Override
public void stepActive(Trans trans, StepMeta stepMeta, StepInterface step) {
nrOfActiveSteps++;
if (nrOfActiveSteps == 1) {
// PDI-5229 sync added
synchronized (transListeners) {
for (TransListener listener : transListeners) {
listener.transActive(Trans.this);
}
}
}
}
@Override
public void stepFinished(Trans trans, StepMeta stepMeta, StepInterface step) {
synchronized (Trans.this) {
nrOfFinishedSteps++;
if (nrOfFinishedSteps >= steps.size()) {
// Set the finished flag
//
setFinished(true);
// Grab the performance statistics one last time (if enabled)
//
addStepPerformanceSnapShot();
try {
fireTransFinishedListeners();
} catch (Exception e) {
step.setErrors(step.getErrors() + 1L);
log.logError(getName() + " : " + BaseMessages.getString(PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd"), e);
}
}
//
if (step.getErrors() > 0) {
log.logMinimal(BaseMessages.getString(PKG, "Trans.Log.TransformationDetectedErrors"));
log.logMinimal(BaseMessages.getString(PKG, "Trans.Log.TransformationIsKillingTheOtherSteps"));
killAllNoWait();
}
}
}
};
//
if (sid.step instanceof BaseStep) {
((BaseStep) sid.step).getStepListeners().add(0, stepListener);
} else {
sid.step.addStepListener(stepListener);
}
}
if (transMeta.isCapturingStepPerformanceSnapShots()) {
stepPerformanceSnapshotSeqNr = new AtomicInteger(0);
stepPerformanceSnapShots = new ConcurrentHashMap<>();
// Calculate the maximum number of snapshots to be kept in memory
//
String limitString = environmentSubstitute(transMeta.getStepPerformanceCapturingSizeLimit());
if (Utils.isEmpty(limitString)) {
limitString = EnvUtil.getSystemProperty(Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT);
}
stepPerformanceSnapshotSizeLimit = Const.toInt(limitString, 0);
// Set a timer to collect the performance data from the running threads...
//
stepPerformanceSnapShotTimer = new Timer("stepPerformanceSnapShot Timer: " + transMeta.getName());
TimerTask timerTask = new TimerTask() {
@Override
public void run() {
if (!isFinished()) {
addStepPerformanceSnapShot();
}
}
};
stepPerformanceSnapShotTimer.schedule(timerTask, 100, transMeta.getStepPerformanceCapturingDelay());
}
// Now start a thread to monitor the running transformation...
//
setFinished(false);
setPaused(false);
setStopped(false);
transFinishedBlockingQueue = new ArrayBlockingQueue<>(TRANS_FINISHED_BLOCKING_QUEUE_SIZE);
TransListener transListener = new TransAdapter() {
@Override
public void transFinished(Trans trans) {
try {
shutdownHeartbeat(trans != null ? trans.heartbeat : null);
if (trans != null && transMeta.getParent() == null && trans.parentJob == null && trans.parentTrans == null) {
if (log.isDetailed() && transMeta.getMetaFileCache() != null) {
transMeta.getMetaFileCache().logCacheSummary(log);
}
transMeta.setMetaFileCache(null);
}
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.TransformationFinish.id, trans);
} catch (KettleException e) {
throw new RuntimeException("Error calling extension point at end of transformation", e);
}
//
if (transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null) {
stepPerformanceSnapShotTimer.cancel();
}
transMeta.disposeEmbeddedMetastoreProvider();
setFinished(true);
// no longer running
setRunning(false);
log.snap(Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP);
// If the user ran with metrics gathering enabled and a metrics logging table is configured, add another
// listener...
//
MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable();
if (metricsLogTable.isDefined()) {
try {
writeMetricsInformation();
} catch (Exception e) {
log.logError("Error writing metrics information", e);
errors.incrementAndGet();
}
}
//
if (transMeta.isUsingUniqueConnections()) {
trans.closeUniqueDatabaseConnections(getResult());
}
// release unused vfs connections
KettleVFS.freeUnusedResources();
}
};
// This should always be done first so that the other listeners achieve a clean state to start from (setFinished and
// so on)
//
transListeners.add(0, transListener);
setRunning(true);
switch(transMeta.getTransformationType()) {
case Normal:
//
for (int i = 0; i < steps.size(); i++) {
final StepMetaDataCombi combi = steps.get(i);
RunThread runThread = new RunThread(combi);
Thread thread = new Thread(runThread);
thread.setName(getName() + " - " + combi.stepname);
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.StepBeforeStart.id, combi);
// Call an extension point at the end of the step
//
combi.step.addStepListener(new StepAdapter() {
@Override
public void stepFinished(Trans trans, StepMeta stepMeta, StepInterface step) {
try {
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.StepFinished.id, combi);
} catch (KettleException e) {
throw new RuntimeException("Unexpected error in calling extension point upon step finish", e);
}
}
});
thread.start();
}
break;
case SerialSingleThreaded:
new Thread(new Runnable() {
@Override
public void run() {
try {
//
for (StepMetaDataCombi combi : steps) {
combi.step.setUsingThreadPriorityManagment(false);
}
//
// This is a single threaded version...
//
// Sort the steps from start to finish...
//
Collections.sort(steps, new Comparator<StepMetaDataCombi>() {
@Override
public int compare(StepMetaDataCombi c1, StepMetaDataCombi c2) {
boolean c1BeforeC2 = transMeta.findPrevious(c2.stepMeta, c1.stepMeta);
if (c1BeforeC2) {
return -1;
} else {
return 1;
}
}
});
boolean[] stepDone = new boolean[steps.size()];
int nrDone = 0;
while (nrDone < steps.size() && !isStopped()) {
for (int i = 0; i < steps.size() && !isStopped(); i++) {
StepMetaDataCombi combi = steps.get(i);
if (!stepDone[i]) {
boolean cont = combi.step.processRow(combi.meta, combi.data);
if (!cont) {
stepDone[i] = true;
nrDone++;
}
}
}
}
} catch (Exception e) {
errors.addAndGet(1);
log.logError("Error executing single threaded", e);
} finally {
for (StepMetaDataCombi combi : steps) {
combi.step.dispose(combi.meta, combi.data);
combi.step.markStop();
}
}
}
}).start();
break;
case SingleThreaded:
//
break;
default:
break;
}
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.TransformationStart.id, this);
heartbeat = startHeartbeat(getHeartbeatIntervalInSeconds());
if (steps.isEmpty()) {
fireTransFinishedListeners();
}
if (log.isDetailed()) {
log.logDetailed(BaseMessages.getString(PKG, "Trans.Log.TransformationHasAllocated", String.valueOf(steps.size()), String.valueOf(rowsets.size())));
}
}
Aggregations