use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class AbstractIntegrationTest method runTest.
protected void runTest(JobSpecification spec) throws Exception {
JobId jobId = executeTest(spec);
hcc.waitForCompletion(jobId);
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class AbstractMultiNCIntegrationTest method runTest.
protected void runTest(JobSpecification spec) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(spec.toJSON().asText());
}
JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(jobId.toString());
}
int nReaders = 1;
FrameManager resultDisplayFrameMgr = new FrameManager(spec.getFrameSize());
VSizeFrame resultFrame = new VSizeFrame(resultDisplayFrameMgr);
IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor();
if (!spec.getResultSetIds().isEmpty()) {
IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, spec.getResultSetIds().get(0));
ObjectMapper om = new ObjectMapper();
ArrayNode resultRecords = om.createArrayNode();
ByteBufferInputStream bbis = new ByteBufferInputStream();
int readSize = reader.read(resultFrame);
while (readSize > 0) {
try {
frameTupleAccessor.reset(resultFrame.getBuffer());
for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
int start = frameTupleAccessor.getTupleStartOffset(tIndex);
int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
bbis.setByteBuffer(resultFrame.getBuffer(), start);
byte[] recordBytes = new byte[length];
bbis.read(recordBytes, 0, length);
resultRecords.add(new String(recordBytes, 0, length));
}
} finally {
try {
bbis.close();
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
readSize = reader.read(resultFrame);
}
}
hcc.waitForCompletion(jobId);
dumpOutputFiles();
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class SleepOperatorDescriptor method cancelAfterWaitForCompletion.
private void cancelAfterWaitForCompletion(JobSpecification spec) throws Exception {
JobId jobId = startJob(spec);
// A thread for canceling the job.
Thread thread = new Thread(() -> {
try {
synchronized (this) {
// Make sure waitForCompletion be called first.
this.wait(500);
}
cancelJob(jobId);
} catch (Exception e) {
e.printStackTrace();
}
});
// Cancels the job.
thread.start();
// Checks the resulting Exception.
boolean exceptionMatched = false;
try {
waitForCompletion(jobId);
} catch (Exception e) {
exceptionMatched = true;
Assert.assertTrue(e instanceof HyracksException);
HyracksException hyracksException = (HyracksException) e;
Assert.assertTrue(hyracksException.getErrorCode() == ErrorCode.JOB_CANCELED);
} finally {
Assert.assertTrue(exceptionMatched);
}
thread.join();
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class SleepOperatorDescriptor method cancelWithoutWait.
private void cancelWithoutWait(JobSpecification spec) throws Exception {
JobId jobId = startJob(spec);
cancelJob(jobId);
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class JobStatusAPIIntegrationTest method testJobRun.
@Test
public void testJobRun() throws Exception {
JobId jId = startJob();
ObjectNode res = getJobRun(jId);
Assert.assertTrue(res.has("result"));
ObjectNode jobRun = (ObjectNode) res.get("result");
Assert.assertTrue(jobRun.has("job-id"));
Assert.assertTrue(JobId.parse(jobRun.get("job-id").asText()).equals(jId));
checkActivityCluster(jobRun);
stopJob(jId);
Assert.assertEquals("TERMINATED", getJobStatus(jId));
}
Aggregations