Search in sources :

Example 6 with INormalizedKeyComputerFactoryProvider

use of org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider in project asterixdb by apache.

the class SortMergeExchangePOperator method createConnectorDescriptor.

@Override
public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
    int n = sortColumns.length;
    int[] sortFields = new int[n];
    IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
    IBinaryHashFunctionFactory[] hashFuns = new IBinaryHashFunctionFactory[n];
    IVariableTypeEnvironment env = context.getTypeEnvironment(op);
    INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
    INormalizedKeyComputerFactory nkcf = null;
    for (int i = 0; i < n; i++) {
        sortFields[i] = opSchema.findVariable(sortColumns[i].getColumn());
        Object type = env.getVarType(sortColumns[i].getColumn());
        IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
        comps[i] = bcfp.getBinaryComparatorFactory(type, sortColumns[i].getOrder() == OrderKind.ASC);
        IBinaryHashFunctionFactoryProvider bhffp = context.getBinaryHashFunctionFactoryProvider();
        hashFuns[i] = bhffp.getBinaryHashFunctionFactory(type);
        if (i == 0 && nkcfProvider != null && type != null) {
            nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, sortColumns[i].getOrder() == OrderKind.ASC);
        }
    }
    ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(sortFields, hashFuns);
    IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comps, nkcf);
    return new Pair<IConnectorDescriptor, TargetConstraint>(conn, TargetConstraint.ONE);
}
Also used : ITuplePartitionComputerFactory(org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) IBinaryHashFunctionFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider) MToNPartitioningMergingConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IBinaryHashFunctionFactory(org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory) IBinaryComparatorFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider) FieldHashPartitionComputerFactory(org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory) TargetConstraint(org.apache.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder.TargetConstraint) INormalizedKeyComputerFactory(org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory) INormalizedKeyComputerFactoryProvider(org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 7 with INormalizedKeyComputerFactoryProvider

use of org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider in project asterixdb by apache.

the class HashPartitionMergeExchangePOperator method createConnectorDescriptor.

@Override
public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
    int[] keys = new int[partitionFields.size()];
    IBinaryHashFunctionFactory[] hashFunctionFactories = new IBinaryHashFunctionFactory[partitionFields.size()];
    IVariableTypeEnvironment env = context.getTypeEnvironment(op);
    {
        int i = 0;
        IBinaryHashFunctionFactoryProvider hashFunProvider = context.getBinaryHashFunctionFactoryProvider();
        for (LogicalVariable v : partitionFields) {
            keys[i] = opSchema.findVariable(v);
            hashFunctionFactories[i] = hashFunProvider.getBinaryHashFunctionFactory(env.getVarType(v));
            ++i;
        }
    }
    ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(keys, hashFunctionFactories);
    int n = orderColumns.size();
    int[] sortFields = new int[n];
    IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[n];
    INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
    INormalizedKeyComputerFactory nkcf = null;
    int j = 0;
    for (OrderColumn oc : orderColumns) {
        LogicalVariable var = oc.getColumn();
        sortFields[j] = opSchema.findVariable(var);
        Object type = env.getVarType(var);
        IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
        comparatorFactories[j] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC);
        if (j == 0 && nkcfProvider != null && type != null) {
            nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, oc.getOrder() == OrderKind.ASC);
        }
        j++;
    }
    IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comparatorFactories, nkcf);
    return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null);
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) ITuplePartitionComputerFactory(org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) IBinaryHashFunctionFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider) MToNPartitioningMergingConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor) OrderColumn(org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IBinaryHashFunctionFactory(org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory) FieldHashPartitionComputerFactory(org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory) IBinaryComparatorFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider) TargetConstraint(org.apache.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder.TargetConstraint) INormalizedKeyComputerFactory(org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory) INormalizedKeyComputerFactoryProvider(org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment) Pair(org.apache.hyracks.algebricks.common.utils.Pair)

Example 8 with INormalizedKeyComputerFactoryProvider

use of org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider in project asterixdb by apache.

the class InMemoryStableSortPOperator method contributeRuntimeOperator.

@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
    RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
    int n = sortColumns.length;
    int[] sortFields = new int[n];
    IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
    int i = 0;
    INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
    INormalizedKeyComputerFactory nkcf = null;
    IVariableTypeEnvironment env = context.getTypeEnvironment(op);
    for (OrderColumn oc : sortColumns) {
        LogicalVariable var = oc.getColumn();
        sortFields[i] = opSchema.findVariable(var);
        Object type = env.getVarType(var);
        OrderKind order = oc.getOrder();
        if (i == 0 && nkcfProvider != null && type != null) {
            nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, order == OrderKind.ASC);
        }
        IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
        comps[i] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC);
        i++;
    }
    IPushRuntimeFactory runtime = new InMemorySortRuntimeFactory(sortFields, nkcf, comps, null);
    builder.contributeMicroOperator(op, runtime, recDescriptor);
    ILogicalOperator src = op.getInputs().get(0).getValue();
    builder.contributeGraphEdge(src, 0, op, 0);
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) InMemorySortRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.sort.InMemorySortRuntimeFactory) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) OrderColumn(org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) IBinaryComparatorFactoryProvider(org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider) IPushRuntimeFactory(org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory) INormalizedKeyComputerFactory(org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory) OrderKind(org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind) INormalizedKeyComputerFactoryProvider(org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment)

Aggregations

INormalizedKeyComputerFactoryProvider (org.apache.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider)8 IBinaryComparatorFactory (org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory)8 INormalizedKeyComputerFactory (org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory)8 IVariableTypeEnvironment (org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment)7 IBinaryComparatorFactoryProvider (org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider)7 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)6 OrderColumn (org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn)5 Pair (org.apache.hyracks.algebricks.common.utils.Pair)4 TargetConstraint (org.apache.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder.TargetConstraint)4 OrderKind (org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind)4 IConnectorDescriptor (org.apache.hyracks.api.dataflow.IConnectorDescriptor)4 ITuplePartitionComputerFactory (org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory)4 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)4 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)3 IOperatorDescriptorRegistry (org.apache.hyracks.api.job.IOperatorDescriptorRegistry)3 MToNPartitioningMergingConnectorDescriptor (org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor)3 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)2 IBinaryHashFunctionFactoryProvider (org.apache.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider)2 IBinaryHashFunctionFactory (org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory)2 FieldHashPartitionComputerFactory (org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory)2