Search in sources :

Example 46 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class AbstractMinMaxAggregateFunction method step.

@Override
public void step(IFrameTupleReference tuple) throws HyracksDataException {
    if (skipStep()) {
        return;
    }
    eval.evaluate(tuple, inputVal);
    ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
    if (typeTag == ATypeTag.MISSING || typeTag == ATypeTag.NULL) {
        processNull();
        return;
    } else if (aggType == ATypeTag.SYSTEM_NULL) {
        if (typeTag == ATypeTag.SYSTEM_NULL) {
            // Ignore.
            return;
        }
        // First value encountered. Set type, comparator, and initial value.
        aggType = typeTag;
        // Set comparator.
        IBinaryComparatorFactory cmpFactory = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(aggType, isMin);
        cmp = cmpFactory.createBinaryComparator();
        // Initialize min value.
        outputVal.assign(inputVal);
    } else if (typeTag != ATypeTag.SYSTEM_NULL && !ATypeHierarchy.isCompatible(typeTag, aggType)) {
        throw new IncompatibleTypeException("min/max", typeTag.serialize(), aggType.serialize());
    } else {
        // by a global aggregator, it is simple ignored.
        if (typeTag == ATypeTag.SYSTEM_NULL) {
            processSystemNull();
            return;
        }
        if (ATypeHierarchy.canPromote(aggType, typeTag)) {
            tpc = ATypeHierarchy.getTypePromoteComputer(aggType, typeTag);
            aggType = typeTag;
            cmp = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(aggType, isMin).createBinaryComparator();
            if (tpc != null) {
                tempValForCasting.reset();
                try {
                    tpc.convertType(outputVal.getByteArray(), outputVal.getStartOffset() + 1, outputVal.getLength() - 1, tempValForCasting.getDataOutput());
                } catch (IOException e) {
                    throw new HyracksDataException(e);
                }
                outputVal.assign(tempValForCasting);
            }
            if (cmp.compare(inputVal.getByteArray(), inputVal.getStartOffset(), inputVal.getLength(), outputVal.getByteArray(), outputVal.getStartOffset(), outputVal.getLength()) < 0) {
                outputVal.assign(inputVal);
            }
        } else {
            tpc = ATypeHierarchy.getTypePromoteComputer(typeTag, aggType);
            if (tpc != null) {
                tempValForCasting.reset();
                try {
                    tpc.convertType(inputVal.getByteArray(), inputVal.getStartOffset() + 1, inputVal.getLength() - 1, tempValForCasting.getDataOutput());
                } catch (IOException e) {
                    throw new HyracksDataException(e);
                }
                if (cmp.compare(tempValForCasting.getByteArray(), tempValForCasting.getStartOffset(), tempValForCasting.getLength(), outputVal.getByteArray(), outputVal.getStartOffset(), outputVal.getLength()) < 0) {
                    outputVal.assign(tempValForCasting);
                }
            } else {
                if (cmp.compare(inputVal.getByteArray(), inputVal.getStartOffset(), inputVal.getLength(), outputVal.getByteArray(), outputVal.getStartOffset(), outputVal.getLength()) < 0) {
                    outputVal.assign(inputVal);
                }
            }
        }
    }
}
Also used : ATypeTag(org.apache.asterix.om.types.ATypeTag) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IncompatibleTypeException(org.apache.asterix.runtime.exceptions.IncompatibleTypeException) IOException(java.io.IOException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 47 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class LSMInvertedIndexTestUtils method compareActualAndExpectedIndexesRangeSearch.

/**
     * Compares actual and expected indexes using the rangeSearch() method of the inverted-index accessor.
     */
public static void compareActualAndExpectedIndexesRangeSearch(LSMInvertedIndexTestContext testCtx) throws HyracksDataException {
    IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
    int tokenFieldCount = invIndex.getTokenTypeTraits().length;
    int invListFieldCount = invIndex.getInvListTypeTraits().length;
    IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) invIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
    IIndexCursor invIndexCursor = invIndexAccessor.createRangeSearchCursor();
    MultiComparator tokenCmp = MultiComparator.create(invIndex.getTokenCmpFactories());
    IBinaryComparatorFactory[] tupleCmpFactories = new IBinaryComparatorFactory[tokenFieldCount + invListFieldCount];
    for (int i = 0; i < tokenFieldCount; i++) {
        tupleCmpFactories[i] = invIndex.getTokenCmpFactories()[i];
    }
    for (int i = 0; i < invListFieldCount; i++) {
        tupleCmpFactories[tokenFieldCount + i] = invIndex.getInvListCmpFactories()[i];
    }
    MultiComparator tupleCmp = MultiComparator.create(tupleCmpFactories);
    RangePredicate nullPred = new RangePredicate(null, null, true, true, tokenCmp, tokenCmp);
    invIndexAccessor.rangeSearch(invIndexCursor, nullPred);
    // Helpers for generating a serialized inverted-list element from a CheckTuple from the expected index.
    ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
    ArrayTupleBuilder expectedBuilder = new ArrayTupleBuilder(fieldSerdes.length);
    ArrayTupleReference expectedTuple = new ArrayTupleReference();
    Iterator<CheckTuple> expectedIter = testCtx.getCheckTuples().iterator();
    // Compare index elements.
    try {
        while (invIndexCursor.hasNext() && expectedIter.hasNext()) {
            invIndexCursor.next();
            ITupleReference actualTuple = invIndexCursor.getTuple();
            CheckTuple expected = expectedIter.next();
            OrderedIndexTestUtils.createTupleFromCheckTuple(expected, expectedBuilder, expectedTuple, fieldSerdes);
            if (tupleCmp.compare(actualTuple, expectedTuple) != 0) {
                fail("Index entries differ for token '" + expected.getField(0) + "'.");
            }
        }
        if (expectedIter.hasNext()) {
            fail("Indexes do not match. Actual index is missing entries.");
        }
        if (invIndexCursor.hasNext()) {
            fail("Indexes do not match. Actual index contains too many entries.");
        }
    } finally {
        invIndexCursor.close();
    }
}
Also used : RangePredicate(org.apache.hyracks.storage.am.btree.impls.RangePredicate) MultiComparator(org.apache.hyracks.storage.common.MultiComparator) ArrayTupleReference(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IInvertedIndexAccessor(org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) CheckTuple(org.apache.hyracks.storage.am.common.CheckTuple) ITupleReference(org.apache.hyracks.dataflow.common.data.accessors.ITupleReference) IIndexCursor(org.apache.hyracks.storage.common.IIndexCursor) IInvertedIndex(org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex)

Example 48 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class SortGroupByPOperator method contributeRuntimeOperator.

@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
    List<LogicalVariable> gbyCols = getGbyColumns();
    int[] keys = JobGenHelper.variablesToFieldIndexes(gbyCols, inputSchemas[0]);
    GroupByOperator gby = (GroupByOperator) op;
    int numFds = gby.getDecorList().size();
    int[] fdColumns = new int[numFds];
    int j = 0;
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) {
        ILogicalExpression expr = p.second.getValue();
        if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
            throw new AlgebricksException("Sort group-by expects variable references.");
        }
        VariableReferenceExpression v = (VariableReferenceExpression) expr;
        LogicalVariable decor = v.getVariableReference();
        fdColumns[j++] = inputSchemas[0].findVariable(decor);
    }
    if (gby.getNestedPlans().size() != 1) {
        throw new AlgebricksException("Sort group-by currently works only for one nested plan with one root containing" + "an aggregate and a nested-tuple-source.");
    }
    ILogicalPlan p0 = gby.getNestedPlans().get(0);
    if (p0.getRoots().size() != 1) {
        throw new AlgebricksException("Sort group-by currently works only for one nested plan with one root containing" + "an aggregate and a nested-tuple-source.");
    }
    Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
    AggregateOperator aggOp = (AggregateOperator) r0.getValue();
    IPartialAggregationTypeComputer partialAggregationTypeComputer = context.getPartialAggregationTypeComputer();
    List<Object> intermediateTypes = new ArrayList<Object>();
    int n = aggOp.getExpressions().size();
    IAggregateEvaluatorFactory[] aff = new IAggregateEvaluatorFactory[n];
    int i = 0;
    IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
    IVariableTypeEnvironment aggOpInputEnv = context.getTypeEnvironment(aggOp.getInputs().get(0).getValue());
    IVariableTypeEnvironment outputEnv = context.getTypeEnvironment(op);
    for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) {
        AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) exprRef.getValue();
        aff[i++] = expressionRuntimeProvider.createAggregateFunctionFactory(aggFun, aggOpInputEnv, inputSchemas, context);
        intermediateTypes.add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider()));
    }
    int[] keyAndDecFields = new int[keys.length + fdColumns.length];
    for (i = 0; i < keys.length; ++i) {
        keyAndDecFields[i] = keys[i];
    }
    for (i = 0; i < fdColumns.length; i++) {
        keyAndDecFields[keys.length + i] = fdColumns[i];
    }
    List<LogicalVariable> keyAndDecVariables = new ArrayList<LogicalVariable>();
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getGroupByList()) {
        keyAndDecVariables.add(p.first);
    }
    for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) {
        keyAndDecVariables.add(GroupByOperator.getDecorVariable(p));
    }
    for (LogicalVariable var : keyAndDecVariables) {
        aggOpInputEnv.setVarType(var, outputEnv.getVarType(var));
    }
    compileSubplans(inputSchemas[0], gby, opSchema, context);
    IOperatorDescriptorRegistry spec = builder.getJobSpec();
    IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[gbyCols.size()];
    IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
    i = 0;
    for (LogicalVariable v : gbyCols) {
        Object type = aggOpInputEnv.getVarType(v);
        if (orderColumns[i].getOrder() == OrderKind.ASC) {
            compFactories[i] = bcfProvider.getBinaryComparatorFactory(type, true);
        } else {
            compFactories[i] = bcfProvider.getBinaryComparatorFactory(type, false);
        }
        i++;
    }
    RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
    IAggregateEvaluatorFactory[] merges = new IAggregateEvaluatorFactory[n];
    List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
    IOperatorSchema[] localInputSchemas = new IOperatorSchema[1];
    localInputSchemas[0] = new OperatorSchemaImpl();
    for (i = 0; i < n; i++) {
        AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
        aggFun.getUsedVariables(usedVars);
    }
    i = 0;
    for (Object type : intermediateTypes) {
        aggOpInputEnv.setVarType(usedVars.get(i++), type);
    }
    for (LogicalVariable keyVar : keyAndDecVariables) {
        localInputSchemas[0].addVariable(keyVar);
    }
    for (LogicalVariable usedVar : usedVars) {
        localInputSchemas[0].addVariable(usedVar);
    }
    for (i = 0; i < n; i++) {
        AggregateFunctionCallExpression mergeFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
        merges[i] = expressionRuntimeProvider.createAggregateFunctionFactory(mergeFun, aggOpInputEnv, localInputSchemas, context);
    }
    RecordDescriptor partialAggRecordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), localInputSchemas[0], context);
    IAggregatorDescriptorFactory aggregatorFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(aff, keyAndDecFields);
    IAggregatorDescriptorFactory mergeFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(merges, keyAndDecFields);
    INormalizedKeyComputerFactory normalizedKeyFactory = null;
    INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
    if (nkcfProvider == null) {
        normalizedKeyFactory = null;
    }
    Object type = aggOpInputEnv.getVarType(gbyCols.get(0));
    normalizedKeyFactory = orderColumns[0].getOrder() == OrderKind.ASC ? nkcfProvider.getNormalizedKeyComputerFactory(type, true) : nkcfProvider.getNormalizedKeyComputerFactory(type, false);
    SortGroupByOperatorDescriptor gbyOpDesc = new SortGroupByOperatorDescriptor(spec, frameLimit, keys, keyAndDecFields, normalizedKeyFactory, compFactories, aggregatorFactory, mergeFactory, partialAggRecordDescriptor, recordDescriptor, false);
    contributeOpDesc(builder, gby, gbyOpDesc);
    ILogicalOperator src = op.getInputs().get(0).getValue();
    builder.contributeGraphEdge(src, 0, op, 0);
}
Also used : RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) IOperatorSchema(org.apache.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema) ArrayList(java.util.ArrayList) SimpleAlgebricksAccumulatingAggregatorFactory(org.apache.hyracks.algebricks.runtime.operators.aggreg.SimpleAlgebricksAccumulatingAggregatorFactory) IAggregateEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory) IBinaryComparatorFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider) IExpressionRuntimeProvider(org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider) AggregateOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator) IPartialAggregationTypeComputer(org.apache.hyracks.algebricks.core.algebra.expressions.IPartialAggregationTypeComputer) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) AggregateFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IOperatorDescriptorRegistry(org.apache.hyracks.api.job.IOperatorDescriptorRegistry) OperatorSchemaImpl(org.apache.hyracks.algebricks.core.jobgen.impl.OperatorSchemaImpl) IAggregatorDescriptorFactory(org.apache.hyracks.dataflow.std.group.IAggregatorDescriptorFactory) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) INormalizedKeyComputerFactory(org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory) INormalizedKeyComputerFactoryProvider(org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment) SortGroupByOperatorDescriptor(org.apache.hyracks.dataflow.std.group.sort.SortGroupByOperatorDescriptor)

Example 49 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class SortMergeExchangePOperator method createConnectorDescriptor.

@Override
public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
    int n = sortColumns.length;
    int[] sortFields = new int[n];
    IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
    IBinaryHashFunctionFactory[] hashFuns = new IBinaryHashFunctionFactory[n];
    IVariableTypeEnvironment env = context.getTypeEnvironment(op);
    INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
    INormalizedKeyComputerFactory nkcf = null;
    for (int i = 0; i < n; i++) {
        sortFields[i] = opSchema.findVariable(sortColumns[i].getColumn());
        Object type = env.getVarType(sortColumns[i].getColumn());
        IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
        comps[i] = bcfp.getBinaryComparatorFactory(type, sortColumns[i].getOrder() == OrderKind.ASC);
        IBinaryHashFunctionFactoryProvider bhffp = context.getBinaryHashFunctionFactoryProvider();
        hashFuns[i] = bhffp.getBinaryHashFunctionFactory(type);
        if (i == 0 && nkcfProvider != null && type != null) {
            nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, sortColumns[i].getOrder() == OrderKind.ASC);
        }
    }
    ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(sortFields, hashFuns);
    IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comps, nkcf);
    return new Pair<IConnectorDescriptor, TargetConstraint>(conn, TargetConstraint.ONE);
}
Also used : ITuplePartitionComputerFactory(org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) IBinaryHashFunctionFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider) MToNPartitioningMergingConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IBinaryHashFunctionFactory(org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory) IBinaryComparatorFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider) FieldHashPartitionComputerFactory(org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory) TargetConstraint(org.apache.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder.TargetConstraint) INormalizedKeyComputerFactory(org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory) INormalizedKeyComputerFactoryProvider(org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 50 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class JobGenHelper method variablesToAscBinaryComparatorFactories.

public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(List<LogicalVariable> varLogical, int start, int size, IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
    IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[size];
    IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
    for (int i = 0; i < size; i++) {
        Object type = env.getVarType(varLogical.get(start + i));
        compFactories[i] = bcfProvider.getBinaryComparatorFactory(type, true);
    }
    return compFactories;
}
Also used : IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IBinaryComparatorFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider)

Aggregations

IBinaryComparatorFactory (org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory)86 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)45 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)25 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)25 Test (org.junit.Test)25 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)24 ArrayTupleReference (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference)22 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)19 UTF8StringSerializerDeserializer (org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer)19 ITreeIndex (org.apache.hyracks.storage.am.common.api.ITreeIndex)17 IIndexAccessor (org.apache.hyracks.storage.common.IIndexAccessor)16 JobSpecification (org.apache.hyracks.api.job.JobSpecification)15 OneToOneConnectorDescriptor (org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor)15 IBinaryComparatorFactoryProvider (org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider)14 FieldHashPartitionComputerFactory (org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory)14 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)13 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)11 IVariableTypeEnvironment (org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment)11 MToNPartitioningMergingConnectorDescriptor (org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor)10 IPrimitiveValueProviderFactory (org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory)10