use of org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider in project asterixdb by apache.
the class TestNodeController method getInsertPipeline.
public Pair<LSMInsertDeleteOperatorNodePushable, CommitRuntime> getInsertPipeline(IHyracksTaskContext ctx, Dataset dataset, IAType[] primaryKeyTypes, ARecordType recordType, ARecordType metaType, ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyProperties, int[] filterFields, int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators, StorageComponentProvider storageComponentProvider) throws AlgebricksException, HyracksDataException {
PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType, mergePolicyFactory, mergePolicyProperties, filterFields, primaryKeyIndexes, primaryKeyIndicators, storageComponentProvider);
IndexOperation op = IndexOperation.INSERT;
IModificationOperationCallbackFactory modOpCallbackFactory = new PrimaryIndexModificationOperationCallbackFactory(getTxnJobId(), dataset.getDatasetId(), primaryIndexInfo.primaryKeyIndexes, TXN_SUBSYSTEM_PROVIDER, Operation.get(op), ResourceType.LSM_BTREE);
IRecordDescriptorProvider recordDescProvider = primaryIndexInfo.getInsertRecordDescriptorProvider();
IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), primaryIndexInfo.fileSplitProvider);
LSMInsertDeleteOperatorNodePushable insertOp = new LSMInsertDeleteOperatorNodePushable(ctx, PARTITION, primaryIndexInfo.primaryIndexInsertFieldsPermutations, recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0), op, true, indexHelperFactory, modOpCallbackFactory, null);
CommitRuntime commitOp = new CommitRuntime(ctx, getTxnJobId(), dataset.getDatasetId(), primaryIndexInfo.primaryKeyIndexes, false, true, PARTITION, true);
insertOp.setOutputFrameWriter(0, commitOp, primaryIndexInfo.rDesc);
commitOp.setInputRecordDescriptor(0, primaryIndexInfo.rDesc);
return Pair.of(insertOp, commitOp);
}
use of org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider in project asterixdb by apache.
the class FramewriterTest method createWriters.
/**
* @return a list of writers to test. these writers can be of the same type but behave differently based on included mocks
* @throws HyracksDataException
* @throws IndexException
*/
public IFrameWriter[] createWriters() throws HyracksDataException {
ArrayList<BTreeSearchOperatorNodePushable> writers = new ArrayList<>();
Pair<IIndexDataflowHelperFactory, ISearchOperationCallbackFactory>[] pairs = pairs();
IRecordDescriptorProvider[] recordDescProviders = mockRecDescProviders();
int partition = 0;
IHyracksTaskContext[] ctxs = mockIHyracksTaskContext();
int[] keys = { 0 };
boolean lowKeyInclusive = true;
boolean highKeyInclusive = true;
for (Pair<IIndexDataflowHelperFactory, ISearchOperationCallbackFactory> pair : pairs) {
for (IRecordDescriptorProvider recordDescProvider : recordDescProviders) {
for (IHyracksTaskContext ctx : ctxs) {
BTreeSearchOperatorNodePushable writer = new BTreeSearchOperatorNodePushable(ctx, partition, recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0), keys, keys, lowKeyInclusive, highKeyInclusive, keys, keys, pair.getLeft(), false, false, null, pair.getRight(), false);
writers.add(writer);
}
}
}
// Create the framewriter using the mocks
return writers.toArray(new IFrameWriter[writers.size()]);
}
use of org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider in project asterixdb by apache.
the class StartTasksWork method run.
@Override
public void run() {
Task task = null;
try {
NCServiceContext serviceCtx = ncs.getContext();
Joblet joblet = getOrCreateLocalJoblet(deploymentId, jobId, serviceCtx, acgBytes);
final ActivityClusterGraph acg = joblet.getActivityClusterGraph();
IRecordDescriptorProvider rdp = new IRecordDescriptorProvider() {
@Override
public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
ActivityCluster ac = acg.getActivityMap().get(aid);
IConnectorDescriptor conn = ac.getActivityOutputMap().get(aid).get(outputIndex);
return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
@Override
public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
ActivityCluster ac = acg.getActivityMap().get(aid);
IConnectorDescriptor conn = ac.getActivityInputMap().get(aid).get(inputIndex);
return ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
};
for (TaskAttemptDescriptor td : taskDescriptors) {
TaskAttemptId taId = td.getTaskAttemptId();
TaskId tid = taId.getTaskId();
ActivityId aid = tid.getActivityId();
ActivityCluster ac = acg.getActivityMap().get(aid);
IActivity han = ac.getActivityMap().get(aid);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Initializing " + taId + " -> " + han);
}
final int partition = tid.getPartition();
List<IConnectorDescriptor> inputs = ac.getActivityInputMap().get(aid);
task = new Task(joblet, taId, han.getClass().getName(), ncs.getExecutor(), ncs, createInputChannels(td, inputs));
IOperatorNodePushable operator = han.createPushRuntime(task, rdp, partition, td.getPartitionCount());
List<IPartitionCollector> collectors = new ArrayList<>();
if (inputs != null) {
for (int i = 0; i < inputs.size(); ++i) {
IConnectorDescriptor conn = inputs.get(i);
IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("input: " + i + ": " + conn.getConnectorId());
}
RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
IPartitionCollector collector = createPartitionCollector(td, partition, task, i, conn, recordDesc, cPolicy);
collectors.add(collector);
}
}
List<IConnectorDescriptor> outputs = ac.getActivityOutputMap().get(aid);
if (outputs != null) {
for (int i = 0; i < outputs.size(); ++i) {
final IConnectorDescriptor conn = outputs.get(i);
RecordDescriptor recordDesc = ac.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
IConnectorPolicy cPolicy = connectorPoliciesMap.get(conn.getConnectorId());
IPartitionWriterFactory pwFactory = createPartitionWriterFactory(task, cPolicy, jobId, conn, partition, taId, flags);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("output: " + i + ": " + conn.getConnectorId());
}
IFrameWriter writer = conn.createPartitioner(task, recordDesc, pwFactory, partition, td.getPartitionCount(), td.getOutputPartitionCounts()[i]);
operator.setOutputFrameWriter(i, writer, recordDesc);
}
}
task.setTaskRuntime(collectors.toArray(new IPartitionCollector[collectors.size()]), operator);
joblet.addTask(task);
task.start();
}
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Failure starting a task", e);
// notify cc of start task failure
List<Exception> exceptions = new ArrayList<>();
ExceptionUtils.setNodeIds(exceptions, ncs.getId());
ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, task, exceptions));
}
}
use of org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider in project asterixdb by apache.
the class FramewriterTest method mockRecDescProviders.
private IRecordDescriptorProvider[] mockRecDescProviders() {
RecordDescriptor rDesc = mockRecordDescriptor();
IRecordDescriptorProvider rDescProvider = Mockito.mock(IRecordDescriptorProvider.class);
Mockito.when(rDescProvider.getInputRecordDescriptor(Mockito.any(), Mockito.anyInt())).thenReturn(rDesc);
Mockito.when(rDescProvider.getOutputRecordDescriptor(Mockito.any(), Mockito.anyInt())).thenReturn(rDesc);
return new IRecordDescriptorProvider[] { rDescProvider };
}
use of org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider in project asterixdb by apache.
the class SuperActivity method createPushRuntime.
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions) throws HyracksDataException {
final Map<ActivityId, IActivity> startActivities = new HashMap<ActivityId, IActivity>();
Map<ActivityId, IActivity> activities = getActivityMap();
for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
/**
* extract start activities
*/
List<IConnectorDescriptor> conns = getActivityInputMap().get(entry.getKey());
if (conns == null || conns.isEmpty()) {
startActivities.put(entry.getKey(), entry.getValue());
}
}
/**
* wrap a RecordDescriptorProvider for the super activity
*/
IRecordDescriptorProvider wrappedRecDescProvider = new IRecordDescriptorProvider() {
@Override
public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
if (startActivities.get(aid) != null) {
/**
* if the activity is a start (input boundary) activity
*/
int superActivityInputChannel = SuperActivity.this.getClusterInputIndex(Pair.of(aid, inputIndex));
if (superActivityInputChannel >= 0) {
return recordDescProvider.getInputRecordDescriptor(activityId, superActivityInputChannel);
}
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
/**
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityInputMap().get(aid).get(inputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
/**
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
ActivityCluster ac = entry.getValue();
for (Entry<ActivityId, IActivity> saEntry : ac.getActivityMap().entrySet()) {
SuperActivity sa = (SuperActivity) saEntry.getValue();
if (sa.getActivityMap().get(aid) != null) {
List<IConnectorDescriptor> conns = sa.getActivityInputMap().get(aid);
if (conns != null && conns.size() >= inputIndex) {
IConnectorDescriptor conn = conns.get(inputIndex);
return sa.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
} else {
int superActivityInputChannel = sa.getClusterInputIndex(Pair.of(aid, inputIndex));
if (superActivityInputChannel >= 0) {
return recordDescProvider.getInputRecordDescriptor(sa.getActivityId(), superActivityInputChannel);
}
}
}
}
}
return null;
}
@Override
public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
/**
* if the activity is an output-boundary activity
*/
int superActivityOutputChannel = SuperActivity.this.getClusterOutputIndex(Pair.of(aid, outputIndex));
if (superActivityOutputChannel >= 0) {
return recordDescProvider.getOutputRecordDescriptor(activityId, superActivityOutputChannel);
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
/**
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityOutputMap().get(aid).get(outputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
/**
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
ActivityCluster ac = entry.getValue();
for (Entry<ActivityId, IActivity> saEntry : ac.getActivityMap().entrySet()) {
SuperActivity sa = (SuperActivity) saEntry.getValue();
if (sa.getActivityMap().get(aid) != null) {
List<IConnectorDescriptor> conns = sa.getActivityOutputMap().get(aid);
if (conns != null && conns.size() >= outputIndex) {
IConnectorDescriptor conn = conns.get(outputIndex);
return sa.getConnectorRecordDescriptorMap().get(conn.getConnectorId());
} else {
superActivityOutputChannel = sa.getClusterOutputIndex(Pair.of(aid, outputIndex));
if (superActivityOutputChannel >= 0) {
return recordDescProvider.getOutputRecordDescriptor(sa.getActivityId(), superActivityOutputChannel);
}
}
}
}
}
return null;
}
};
return new SuperActivityOperatorNodePushable(this, startActivities, ctx, wrappedRecDescProvider, partition, nPartitions);
}
Aggregations