use of org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory in project asterixdb by apache.
the class SortMergeExchangePOperator method createConnectorDescriptor.
@Override
public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
int n = sortColumns.length;
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
IBinaryHashFunctionFactory[] hashFuns = new IBinaryHashFunctionFactory[n];
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
INormalizedKeyComputerFactory nkcf = null;
for (int i = 0; i < n; i++) {
sortFields[i] = opSchema.findVariable(sortColumns[i].getColumn());
Object type = env.getVarType(sortColumns[i].getColumn());
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
comps[i] = bcfp.getBinaryComparatorFactory(type, sortColumns[i].getOrder() == OrderKind.ASC);
IBinaryHashFunctionFactoryProvider bhffp = context.getBinaryHashFunctionFactoryProvider();
hashFuns[i] = bhffp.getBinaryHashFunctionFactory(type);
if (i == 0 && nkcfProvider != null && type != null) {
nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, sortColumns[i].getOrder() == OrderKind.ASC);
}
}
ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(sortFields, hashFuns);
IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comps, nkcf);
return new Pair<IConnectorDescriptor, TargetConstraint>(conn, TargetConstraint.ONE);
}
use of org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory in project asterixdb by apache.
the class HashPartitionMergeExchangePOperator method createConnectorDescriptor.
@Override
public Pair<IConnectorDescriptor, TargetConstraint> createConnectorDescriptor(IConnectorDescriptorRegistry spec, ILogicalOperator op, IOperatorSchema opSchema, JobGenContext context) throws AlgebricksException {
int[] keys = new int[partitionFields.size()];
IBinaryHashFunctionFactory[] hashFunctionFactories = new IBinaryHashFunctionFactory[partitionFields.size()];
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
{
int i = 0;
IBinaryHashFunctionFactoryProvider hashFunProvider = context.getBinaryHashFunctionFactoryProvider();
for (LogicalVariable v : partitionFields) {
keys[i] = opSchema.findVariable(v);
hashFunctionFactories[i] = hashFunProvider.getBinaryHashFunctionFactory(env.getVarType(v));
++i;
}
}
ITuplePartitionComputerFactory tpcf = new FieldHashPartitionComputerFactory(keys, hashFunctionFactories);
int n = orderColumns.size();
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[n];
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
INormalizedKeyComputerFactory nkcf = null;
int j = 0;
for (OrderColumn oc : orderColumns) {
LogicalVariable var = oc.getColumn();
sortFields[j] = opSchema.findVariable(var);
Object type = env.getVarType(var);
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
comparatorFactories[j] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC);
if (j == 0 && nkcfProvider != null && type != null) {
nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, oc.getOrder() == OrderKind.ASC);
}
j++;
}
IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comparatorFactories, nkcf);
return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null);
}
use of org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory in project asterixdb by apache.
the class ExternalGroupByPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
List<LogicalVariable> gbyCols = getGbyColumns();
int[] keys = JobGenHelper.variablesToFieldIndexes(gbyCols, inputSchemas[0]);
GroupByOperator gby = (GroupByOperator) op;
int numFds = gby.getDecorList().size();
int[] fdColumns = new int[numFds];
int j = 0;
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) {
ILogicalExpression expr = p.second.getValue();
if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
throw new AlgebricksException("pre-sorted group-by expects variable references.");
}
VariableReferenceExpression v = (VariableReferenceExpression) expr;
LogicalVariable decor = v.getVariableReference();
fdColumns[j++] = inputSchemas[0].findVariable(decor);
}
if (gby.getNestedPlans().size() != 1) {
throw new AlgebricksException("External group-by currently works only for one nested plan with one root containing" + "an aggregate and a nested-tuple-source.");
}
ILogicalPlan p0 = gby.getNestedPlans().get(0);
if (p0.getRoots().size() != 1) {
throw new AlgebricksException("External group-by currently works only for one nested plan with one root containing" + "an aggregate and a nested-tuple-source.");
}
Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
AggregateOperator aggOp = (AggregateOperator) r0.getValue();
IPartialAggregationTypeComputer partialAggregationTypeComputer = context.getPartialAggregationTypeComputer();
List<Object> intermediateTypes = new ArrayList<Object>();
int n = aggOp.getExpressions().size();
ISerializedAggregateEvaluatorFactory[] aff = new ISerializedAggregateEvaluatorFactory[n];
int i = 0;
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IVariableTypeEnvironment aggOpInputEnv = context.getTypeEnvironment(aggOp.getInputs().get(0).getValue());
IVariableTypeEnvironment outputEnv = context.getTypeEnvironment(op);
for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) {
AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) exprRef.getValue();
aff[i++] = expressionRuntimeProvider.createSerializableAggregateFunctionFactory(aggFun, aggOpInputEnv, inputSchemas, context);
intermediateTypes.add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider()));
}
int[] keyAndDecFields = new int[keys.length + fdColumns.length];
for (i = 0; i < keys.length; ++i) {
keyAndDecFields[i] = keys[i];
}
for (i = 0; i < fdColumns.length; i++) {
keyAndDecFields[keys.length + i] = fdColumns[i];
}
List<LogicalVariable> keyAndDecVariables = new ArrayList<LogicalVariable>();
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getGroupByList()) {
keyAndDecVariables.add(p.first);
}
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) {
keyAndDecVariables.add(GroupByOperator.getDecorVariable(p));
}
for (LogicalVariable var : keyAndDecVariables) {
aggOpInputEnv.setVarType(var, outputEnv.getVarType(var));
}
compileSubplans(inputSchemas[0], gby, opSchema, context);
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(gbyCols, aggOpInputEnv, context);
RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
IBinaryHashFunctionFamily[] hashFunctionFactories = JobGenHelper.variablesToBinaryHashFunctionFamilies(gbyCols, aggOpInputEnv, context);
ISerializedAggregateEvaluatorFactory[] merges = new ISerializedAggregateEvaluatorFactory[n];
List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
IOperatorSchema[] localInputSchemas = new IOperatorSchema[1];
localInputSchemas[0] = new OperatorSchemaImpl();
for (i = 0; i < n; i++) {
AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
aggFun.getUsedVariables(usedVars);
}
i = 0;
for (Object type : intermediateTypes) {
aggOpInputEnv.setVarType(usedVars.get(i++), type);
}
for (LogicalVariable keyVar : keyAndDecVariables) {
localInputSchemas[0].addVariable(keyVar);
}
for (LogicalVariable usedVar : usedVars) {
localInputSchemas[0].addVariable(usedVar);
}
for (i = 0; i < n; i++) {
AggregateFunctionCallExpression mergeFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
merges[i] = expressionRuntimeProvider.createSerializableAggregateFunctionFactory(mergeFun, aggOpInputEnv, localInputSchemas, context);
}
IAggregatorDescriptorFactory aggregatorFactory = new SerializableAggregatorDescriptorFactory(aff);
IAggregatorDescriptorFactory mergeFactory = new SerializableAggregatorDescriptorFactory(merges);
INormalizedKeyComputerFactory normalizedKeyFactory = JobGenHelper.variablesToAscNormalizedKeyComputerFactory(gbyCols, aggOpInputEnv, context);
// Calculates the hash table size (# of unique hash values) based on the budget and a tuple size.
int memoryBudgetInBytes = context.getFrameSize() * frameLimit;
int groupByColumnsCount = gby.getGroupByList().size() + numFds;
int hashTableSize = ExternalGroupOperatorDescriptor.calculateGroupByTableCardinality(memoryBudgetInBytes, groupByColumnsCount, context.getFrameSize());
ExternalGroupOperatorDescriptor gbyOpDesc = new ExternalGroupOperatorDescriptor(spec, hashTableSize, inputSize, keyAndDecFields, frameLimit, comparatorFactories, normalizedKeyFactory, aggregatorFactory, mergeFactory, recordDescriptor, recordDescriptor, new HashSpillableTableFactory(hashFunctionFactories));
contributeOpDesc(builder, gby, gbyOpDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
}
use of org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory in project asterixdb by apache.
the class InMemoryStableSortPOperator method contributeRuntimeOperator.
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
int n = sortColumns.length;
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
int i = 0;
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
INormalizedKeyComputerFactory nkcf = null;
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
for (OrderColumn oc : sortColumns) {
LogicalVariable var = oc.getColumn();
sortFields[i] = opSchema.findVariable(var);
Object type = env.getVarType(var);
OrderKind order = oc.getOrder();
if (i == 0 && nkcfProvider != null && type != null) {
nkcf = nkcfProvider.getNormalizedKeyComputerFactory(type, order == OrderKind.ASC);
}
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
comps[i] = bcfp.getBinaryComparatorFactory(type, oc.getOrder() == OrderKind.ASC);
i++;
}
IPushRuntimeFactory runtime = new InMemorySortRuntimeFactory(sortFields, nkcf, comps, null);
builder.contributeMicroOperator(op, runtime, recDescriptor);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
}
Aggregations