Search in sources :

Example 31 with ILogicalOperator

use of org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator in project asterixdb by apache.

the class AssignPOperator method computeDeliveredProperties.

@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
    AssignOperator assignOp = (AssignOperator) op;
    ILogicalOperator op2 = op.getInputs().get(0).getValue();
    deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
    if (assignOp.getExplicitOrderingProperty() != null) {
        deliveredProperties.getLocalProperties().add(assignOp.getExplicitOrderingProperty());
    }
}
Also used : ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator)

Example 32 with ILogicalOperator

use of org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator in project asterixdb by apache.

the class BulkloadPOperator method contributeRuntimeOperator.

@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
    InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op;
    assert insertDeleteOp.getOperation() == Kind.INSERT;
    assert insertDeleteOp.isBulkload();
    IMetadataProvider mp = context.getMetadataProvider();
    IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
    JobSpecification spec = builder.getJobSpec();
    RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
    Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, primaryKeys, payload, additionalFilteringKeys, additionalNonFilterVars, inputDesc, context, spec, true);
    builder.contributeHyracksOperator(insertDeleteOp, runtimeAndConstraints.first);
    builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
    ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue();
    builder.contributeGraphEdge(src, 0, insertDeleteOp, 0);
}
Also used : InsertDeleteUpsertOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) JobSpecification(org.apache.hyracks.api.job.JobSpecification) IMetadataProvider(org.apache.hyracks.algebricks.core.algebra.metadata.IMetadataProvider) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment)

Example 33 with ILogicalOperator

use of org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator in project asterixdb by apache.

the class DataSourceScanPOperator method contributeRuntimeOperator.

@SuppressWarnings("unchecked")
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
    DataSourceScanOperator scan = (DataSourceScanOperator) op;
    IMetadataProvider mp = context.getMetadataProvider();
    IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
    List<LogicalVariable> vars = scan.getVariables();
    List<LogicalVariable> projectVars = scan.getProjectVariables();
    Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = mp.getScannerRuntime(dataSource, vars, projectVars, scan.isProjectPushed(), scan.getMinFilterVars(), scan.getMaxFilterVars(), opSchema, typeEnv, context, builder.getJobSpec(), implConfig);
    builder.contributeHyracksOperator(scan, p.first);
    if (p.second != null) {
        builder.contributeAlgebricksPartitionConstraint(p.first, p.second);
    }
    ILogicalOperator srcExchange = scan.getInputs().get(0).getValue();
    builder.contributeGraphEdge(srcExchange, 0, scan, 0);
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) DataSourceScanOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IMetadataProvider(org.apache.hyracks.algebricks.core.algebra.metadata.IMetadataProvider) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment)

Example 34 with ILogicalOperator

use of org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator in project asterixdb by apache.

the class DataSourceScanPOperator method getRequiredPropertiesForChildren.

@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op, IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
    if (op.getInputs().isEmpty()) {
        return emptyUnaryRequirements();
    }
    ILogicalOperator childOp = op.getInputs().get(0).getValue();
    // Empty tuple source is a special case that can be partitioned in the same way as the data scan.
    if (childOp.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE) {
        return emptyUnaryRequirements();
    }
    INodeDomain domain = dataSource.getDomain();
    return new PhysicalRequirements(new StructuralPropertiesVector[] { new StructuralPropertiesVector(new BroadcastPartitioningProperty(domain), null) }, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
Also used : StructuralPropertiesVector(org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector) BroadcastPartitioningProperty(org.apache.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) INodeDomain(org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain) PhysicalRequirements(org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements)

Example 35 with ILogicalOperator

use of org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator in project asterixdb by apache.

the class NestedLoopJoinPOperator method contributeRuntimeOperator.

@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException {
    AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op;
    RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
    IOperatorSchema[] conditionInputSchemas = new IOperatorSchema[1];
    conditionInputSchemas[0] = propagatedSchema;
    IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
    IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(join.getCondition().getValue(), context.getTypeEnvironment(op), conditionInputSchemas, context);
    ITuplePairComparatorFactory comparatorFactory = new TuplePairEvaluatorFactory(cond, context.getBinaryBooleanInspectorFactory());
    IOperatorDescriptorRegistry spec = builder.getJobSpec();
    IOperatorDescriptor opDesc = null;
    switch(kind) {
        case INNER:
            {
                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, false, null);
                break;
            }
        case LEFT_OUTER:
            {
                IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1].getSize()];
                for (int j = 0; j < nonMatchWriterFactories.length; j++) {
                    nonMatchWriterFactories[j] = context.getMissingWriterFactory();
                }
                opDesc = new NestedLoopJoinOperatorDescriptor(spec, comparatorFactory, recDescriptor, memSize, true, nonMatchWriterFactories);
                break;
            }
        default:
            {
                throw new NotImplementedException();
            }
    }
    contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
    ILogicalOperator src1 = op.getInputs().get(0).getValue();
    builder.contributeGraphEdge(src1, 0, op, 0);
    ILogicalOperator src2 = op.getInputs().get(1).getValue();
    builder.contributeGraphEdge(src2, 0, op, 1);
}
Also used : RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) IOperatorSchema(org.apache.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema) NotImplementedException(org.apache.hyracks.algebricks.common.exceptions.NotImplementedException) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) IOperatorDescriptorRegistry(org.apache.hyracks.api.job.IOperatorDescriptorRegistry) AbstractBinaryJoinOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) IExpressionRuntimeProvider(org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) NestedLoopJoinOperatorDescriptor(org.apache.hyracks.dataflow.std.join.NestedLoopJoinOperatorDescriptor) ITuplePairComparatorFactory(org.apache.hyracks.api.dataflow.value.ITuplePairComparatorFactory)

Aggregations

ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)355 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)196 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)130 Mutable (org.apache.commons.lang3.mutable.Mutable)125 ArrayList (java.util.ArrayList)119 AbstractLogicalOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator)117 VariableReferenceExpression (org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression)86 ILogicalPlan (org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan)73 AssignOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator)57 Pair (org.apache.hyracks.algebricks.common.utils.Pair)53 MutableObject (org.apache.commons.lang3.mutable.MutableObject)51 HashSet (java.util.HashSet)46 AbstractFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression)43 ScalarFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)36 GroupByOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator)36 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)33 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)29 AggregateOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator)28 SubplanOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator)26 AggregateFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression)25