use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class NestedPlansAccumulatingAggregatorFactory method assemblePipeline.
private IFrameWriter assemblePipeline(AlgebricksPipeline subplan, IFrameWriter writer, IHyracksTaskContext ctx) throws HyracksDataException {
// plug the operators
IFrameWriter start = writer;
IPushRuntimeFactory[] runtimeFactories = subplan.getRuntimeFactories();
RecordDescriptor[] recordDescriptors = subplan.getRecordDescriptors();
for (int i = runtimeFactories.length - 1; i >= 0; i--) {
IPushRuntime newRuntime = runtimeFactories[i].createPushRuntime(ctx);
newRuntime.setFrameWriter(0, start, recordDescriptors[i]);
if (i > 0) {
newRuntime.setInputRecordDescriptor(0, recordDescriptors[i - 1]);
} else {
// the nts has the same input and output rec. desc.
newRuntime.setInputRecordDescriptor(0, recordDescriptors[0]);
}
start = newRuntime;
}
return start;
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class MultiThreadTaskEmulator method runInParallel.
public void runInParallel(final IFrameWriter[] writers, final List<IFrame>[] inputFrames) throws Exception {
final Semaphore sem = new Semaphore(writers.length - 1);
List<Exception> exceptions = Collections.synchronizedList(new ArrayList<>());
for (int i = 1; i < writers.length; i++) {
sem.acquire();
final IFrameWriter writer = writers[i];
final List<IFrame> inputFrame = inputFrames[i];
executor.execute(() -> {
executeOneWriter(writer, inputFrame, exceptions);
sem.release();
});
}
final IFrameWriter writer = writers[0];
final List<IFrame> inputFrame = inputFrames[0];
executeOneWriter(writer, inputFrame, exceptions);
sem.acquire(writers.length - 1);
for (int i = 0; i < exceptions.size(); i++) {
exceptions.get(i).printStackTrace();
if (i == exceptions.size() - 1) {
throw exceptions.get(i);
}
}
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class Task method run.
@Override
public void run() {
Thread ct = Thread.currentThread();
String threadName = ct.getName();
// the thread is not escaped from interruption.
if (!addPendingThread(ct)) {
exceptions.add(new InterruptedException("Task " + getTaskAttemptId() + " was aborted!"));
ExceptionUtils.setNodeIds(exceptions, ncs.getId());
ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, this, exceptions));
return;
}
ct.setName(displayName + ":" + taskAttemptId + ":" + 0);
try {
try {
operator.initialize();
if (collectors.length > 0) {
final Semaphore sem = new Semaphore(collectors.length - 1);
for (int i = 1; i < collectors.length; ++i) {
final IPartitionCollector collector = collectors[i];
final IFrameWriter writer = operator.getInputFrameWriter(i);
sem.acquire();
final int cIdx = i;
executorService.execute(new Runnable() {
@Override
public void run() {
Thread thread = Thread.currentThread();
// the thread is not escaped from interruption.
if (!addPendingThread(thread)) {
return;
}
String oldName = thread.getName();
thread.setName(displayName + ":" + taskAttemptId + ":" + cIdx);
thread.setPriority(Thread.MIN_PRIORITY);
try {
pushFrames(collector, inputChannelsFromConnectors.get(cIdx), writer);
} catch (HyracksDataException e) {
synchronized (Task.this) {
exceptions.add(e);
}
} finally {
thread.setName(oldName);
sem.release();
removePendingThread(thread);
}
}
});
}
try {
pushFrames(collectors[0], inputChannelsFromConnectors.get(0), operator.getInputFrameWriter(0));
} finally {
sem.acquire(collectors.length - 1);
}
}
} finally {
operator.deinitialize();
}
NodeControllerService ncs = joblet.getNodeController();
ncs.getWorkQueue().schedule(new NotifyTaskCompleteWork(ncs, this));
} catch (Exception e) {
exceptions.add(e);
} finally {
ct.setName(threadName);
close();
removePendingThread(ct);
}
if (!exceptions.isEmpty()) {
for (Exception e : exceptions) {
e.printStackTrace();
}
NodeControllerService ncs = joblet.getNodeController();
ExceptionUtils.setNodeIds(exceptions, ncs.getId());
ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, this, exceptions));
}
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class DatasetPartitionManager method createDatasetPartitionWriter.
@Override
public IFrameWriter createDatasetPartitionWriter(IHyracksTaskContext ctx, ResultSetId rsId, boolean orderedResult, boolean asyncMode, int partition, int nPartitions) throws HyracksException {
DatasetPartitionWriter dpw;
JobId jobId = ctx.getJobletContext().getJobId();
synchronized (this) {
dpw = new DatasetPartitionWriter(ctx, this, jobId, rsId, asyncMode, orderedResult, partition, nPartitions, datasetMemoryManager, fileFactory);
ResultSetMap rsIdMap = (ResultSetMap) partitionResultStateMap.computeIfAbsent(jobId, k -> new ResultSetMap());
ResultState[] resultStates = rsIdMap.createOrGetResultStates(rsId, nPartitions);
resultStates[partition] = dpw.getResultState();
}
LOGGER.fine("Initialized partition writer: JobId: " + jobId + ":partition: " + partition);
return dpw;
}
use of org.apache.hyracks.api.comm.IFrameWriter in project asterixdb by apache.
the class AbstractExternalSortRunMerger method process.
public void process() throws HyracksDataException {
IFrameWriter finalWriter = null;
try {
if (runs.isEmpty()) {
finalWriter = prepareSkipMergingFinalResultWriter(writer);
finalWriter.open();
if (sorter != null) {
try {
if (sorter.hasRemaining()) {
sorter.flush(finalWriter);
}
} finally {
sorter.close();
}
}
} else {
/** recycle sort buffer */
if (sorter != null) {
sorter.close();
}
finalWriter = prepareFinalMergeResultWriter(writer);
finalWriter.open();
int maxMergeWidth = framesLimit - 1;
inFrames = new ArrayList<>(maxMergeWidth);
outputFrame = new VSizeFrame(ctx);
List<GeneratedRunFileReader> partialRuns = new ArrayList<>(maxMergeWidth);
int stop = runs.size();
currentGenerationRunAvailable.set(0, stop);
while (true) {
int unUsed = selectPartialRuns(maxMergeWidth * ctx.getInitialFrameSize(), runs, partialRuns, currentGenerationRunAvailable, stop);
prepareFrames(unUsed, inFrames, partialRuns);
if (!currentGenerationRunAvailable.isEmpty() || stop < runs.size()) {
GeneratedRunFileReader reader;
if (partialRuns.size() == 1) {
if (!currentGenerationRunAvailable.isEmpty()) {
throw new HyracksDataException("The record is too big to put into the merging frame, please" + " allocate more sorting memory");
} else {
reader = partialRuns.get(0);
}
} else {
RunFileWriter mergeFileWriter = prepareIntermediateMergeRunFile();
IFrameWriter mergeResultWriter = prepareIntermediateMergeResultWriter(mergeFileWriter);
try {
mergeResultWriter.open();
merge(mergeResultWriter, partialRuns);
} catch (Throwable t) {
mergeResultWriter.fail();
throw t;
} finally {
mergeResultWriter.close();
}
reader = mergeFileWriter.createReader();
}
runs.add(reader);
if (currentGenerationRunAvailable.isEmpty()) {
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.fine("generated runs:" + stop);
}
runs.subList(0, stop).clear();
currentGenerationRunAvailable.clear();
currentGenerationRunAvailable.set(0, runs.size());
stop = runs.size();
}
} else {
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.fine("final runs:" + stop);
}
merge(finalWriter, partialRuns);
break;
}
}
}
} catch (Exception e) {
if (finalWriter != null) {
finalWriter.fail();
}
throw HyracksDataException.create(e);
} finally {
try {
if (finalWriter != null) {
finalWriter.close();
}
} finally {
for (RunFileReader reader : runs) {
try {
// close is idempotent.
reader.close();
} catch (Exception e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(Level.WARNING, e.getMessage(), e);
}
}
}
}
}
}
Aggregations