use of org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor in project asterixdb by apache.
the class SecondaryIndexOperationsHelper method createAssignOp.
protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec, int numSecondaryKeyFields, RecordDescriptor secondaryRecDesc) throws AlgebricksException {
int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
int[] projectionList = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) {
outColumns[i] = numPrimaryKeys + i;
}
int projCount = 0;
for (int i = 0; i < numSecondaryKeyFields; i++) {
projectionList[projCount++] = numPrimaryKeys + i;
}
for (int i = 0; i < numPrimaryKeys; i++) {
projectionList[projCount++] = i;
}
if (numFilterFields > 0) {
projectionList[projCount] = numPrimaryKeys + numSecondaryKeyFields;
}
IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
sefs[i] = secondaryFieldAccessEvalFactories[i];
}
AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixAssignOp, primaryPartitionConstraint);
return asterixAssignOp;
}
use of org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor in project asterixdb by apache.
the class SecondaryIndexOperationsHelper method createCastOp.
protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType) {
CastTypeDescriptor castFuncDesc = (CastTypeDescriptor) CastTypeDescriptor.FACTORY.createFunctionDescriptor();
castFuncDesc.setImmutableStates(enforcedItemType, itemType);
int[] outColumns = new int[1];
int[] projectionList = new int[(dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys];
int recordIdx;
//external datascan operator returns a record as the first field, instead of the last in internal case
if (dsType == DatasetType.EXTERNAL) {
recordIdx = 0;
outColumns[0] = 0;
} else {
recordIdx = numPrimaryKeys;
outColumns[0] = numPrimaryKeys;
}
for (int i = 0; i <= numPrimaryKeys; i++) {
projectionList[i] = i;
}
if (dataset.hasMetaPart()) {
projectionList[numPrimaryKeys + 1] = numPrimaryKeys + 1;
}
IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
sefs[0] = castFuncDesc.createEvaluatorFactory(castEvalFact);
AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
}
use of org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor in project asterixdb by apache.
the class FlushDatasetUtil method flushDataset.
public static void flushDataset(IHyracksClientConnection hcc, MetadataProvider metadataProvider, String dataverseName, String datasetName, String indexName) throws Exception {
CompilerProperties compilerProperties = metadataProvider.getApplicationContext().getCompilerProperties();
int frameSize = compilerProperties.getFrameSize();
JobSpecification spec = new JobSpecification(frameSize);
RecordDescriptor[] rDescs = new RecordDescriptor[] { new RecordDescriptor(new ISerializerDeserializer[] {}) };
AlgebricksMetaOperatorDescriptor emptySource = new AlgebricksMetaOperatorDescriptor(spec, 0, 1, new IPushRuntimeFactory[] { new EmptyTupleSourceRuntimeFactory() }, rDescs);
org.apache.asterix.common.transactions.JobId jobId = JobIdFactory.generateJobId();
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
FlushDatasetOperatorDescriptor flushOperator = new FlushDatasetOperatorDescriptor(spec, jobId, dataset.getDatasetId());
spec.connect(new OneToOneConnectorDescriptor(spec), emptySource, 0, flushOperator, 0);
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, indexName);
AlgebricksPartitionConstraint primaryPartitionConstraint = primarySplitsAndConstraint.second;
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, emptySource, primaryPartitionConstraint);
JobEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(jobId, true);
spec.setJobletEventListenerFactory(jobEventListenerFactory);
JobUtils.runJob(hcc, spec, true);
}
use of org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor in project asterixdb by apache.
the class JobBuilder method buildAsterixComponents.
private void buildAsterixComponents() {
for (ILogicalOperator aop : microOps.keySet()) {
addMicroOpToMetaRuntimeOp(aop);
}
for (Integer k : metaAsterixOpSkeletons.keySet()) {
List<Pair<IPushRuntimeFactory, RecordDescriptor>> opContents = metaAsterixOpSkeletons.get(k);
AlgebricksMetaOperatorDescriptor amod = buildMetaAsterixOpDesc(opContents);
metaAsterixOps.put(k, amod);
}
}
use of org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor in project asterixdb by apache.
the class JobBuilder method buildMetaAsterixOpDesc.
private AlgebricksMetaOperatorDescriptor buildMetaAsterixOpDesc(List<Pair<IPushRuntimeFactory, RecordDescriptor>> opContents) {
int n = opContents.size();
IPushRuntimeFactory[] runtimeFactories = new IPushRuntimeFactory[n];
RecordDescriptor[] internalRecordDescriptors = new RecordDescriptor[n];
int i = 0;
for (Pair<IPushRuntimeFactory, RecordDescriptor> p : opContents) {
runtimeFactories[i] = p.first;
internalRecordDescriptors[i] = p.second;
i++;
}
ILogicalOperator lastLogicalOp = revMicroOpMap.get(runtimeFactories[n - 1]);
ArrayList<ILogicalOperator> outOps = outEdges.get(lastLogicalOp);
int outArity = (outOps == null) ? 0 : outOps.size();
ILogicalOperator firstLogicalOp = revMicroOpMap.get(runtimeFactories[0]);
ArrayList<ILogicalOperator> inOps = inEdges.get(firstLogicalOp);
int inArity = (inOps == null) ? 0 : inOps.size();
return new AlgebricksMetaOperatorDescriptor(jobSpec, inArity, outArity, runtimeFactories, internalRecordDescriptors);
}
Aggregations