Search in sources :

Example 6 with AssignRuntimeFactory

use of org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory in project asterixdb by apache.

the class PushRuntimeTest method etsAssignSubplanProjectWrite.

@Test
public void etsAssignSubplanProjectWrite() throws Exception {
    JobSpecification spec = new JobSpecification(FRAME_SIZE);
    IntegerConstantEvalFactory const1 = new IntegerConstantEvalFactory(400);
    IntegerConstantEvalFactory const2 = new IntegerConstantEvalFactory(3);
    EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
    RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
    AssignRuntimeFactory assign1 = new AssignRuntimeFactory(new int[] { 0 }, new IScalarEvaluatorFactory[] { const1 }, new int[] { 0 });
    RecordDescriptor assign1Desc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
    NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
    AssignRuntimeFactory assign2 = new AssignRuntimeFactory(new int[] { 1 }, new IScalarEvaluatorFactory[] { new IntegerAddEvalFactory(new TupleFieldEvaluatorFactory(0), const2) }, new int[] { 0, 1 });
    RecordDescriptor assign2Desc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
    StreamProjectRuntimeFactory project1 = new StreamProjectRuntimeFactory(new int[] { 1 });
    RecordDescriptor project1Desc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
    AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, assign2, project1 }, new RecordDescriptor[] { assign1Desc, assign2Desc, project1Desc });
    SubplanRuntimeFactory subplan = new SubplanRuntimeFactory(pipeline, new IMissingWriterFactory[] { NoopMissingWriterFactory.INSTANCE }, assign1Desc, null);
    RecordDescriptor subplanDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
    StreamProjectRuntimeFactory project2 = new StreamProjectRuntimeFactory(new int[] { 1 });
    RecordDescriptor project2Desc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
    String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignSubplanProjectWrite.out";
    File outFile = new File(filePath);
    SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, project2Desc);
    AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, assign1, subplan, project2, writer }, new RecordDescriptor[] { etsDesc, assign1Desc, subplanDesc, project2Desc, null });
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
    spec.addRoot(algebricksOp);
    AlgebricksHyracksIntegrationUtil.runJob(spec);
    StringBuilder buf = new StringBuilder();
    readFileToString(outFile, buf);
    Assert.assertEquals("403", buf.toString());
    outFile.delete();
}
Also used : TupleFieldEvaluatorFactory(org.apache.hyracks.algebricks.runtime.evaluators.TupleFieldEvaluatorFactory) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) SubplanRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.meta.SubplanRuntimeFactory) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) NestedTupleSourceRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.NestedTupleSourceRuntimeFactory) SinkWriterRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory) EmptyTupleSourceRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory) StreamProjectRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory) JobSpecification(org.apache.hyracks.api.job.JobSpecification) AlgebricksPipeline(org.apache.hyracks.algebricks.runtime.base.AlgebricksPipeline) File(java.io.File) Test(org.junit.Test)

Example 7 with AssignRuntimeFactory

use of org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory in project asterixdb by apache.

the class PushRuntimeTest method etsAssignProjectWrite.

@Test
public void etsAssignProjectWrite() throws Exception {
    JobSpecification spec = new JobSpecification(FRAME_SIZE);
    IntegerConstantEvalFactory const1 = new IntegerConstantEvalFactory(400);
    IntegerConstantEvalFactory const2 = new IntegerConstantEvalFactory(3);
    EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
    RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
    AssignRuntimeFactory assign = new AssignRuntimeFactory(new int[] { 0, 1 }, new IScalarEvaluatorFactory[] { const1, const2 }, new int[] { 0, 1 });
    RecordDescriptor assignDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
    StreamProjectRuntimeFactory project = new StreamProjectRuntimeFactory(new int[] { 1 });
    RecordDescriptor projectDesc = new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
    String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignProjectWrite.out";
    File outFile = new File(filePath);
    SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE, projectDesc);
    AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, assign, project, writer }, new RecordDescriptor[] { etsDesc, assignDesc, projectDesc, null });
    PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
    spec.addRoot(algebricksOp);
    AlgebricksHyracksIntegrationUtil.runJob(spec);
    StringBuilder buf = new StringBuilder();
    readFileToString(outFile, buf);
    Assert.assertEquals("3", buf.toString());
    outFile.delete();
}
Also used : SinkWriterRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) EmptyTupleSourceRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory) StreamProjectRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory) AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) JobSpecification(org.apache.hyracks.api.job.JobSpecification) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) File(java.io.File) Test(org.junit.Test)

Example 8 with AssignRuntimeFactory

use of org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory in project asterixdb by apache.

the class SecondaryIndexOperationsHelper method createAssignOp.

protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec, int numSecondaryKeyFields, RecordDescriptor secondaryRecDesc) throws AlgebricksException {
    int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
    int[] projectionList = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
    for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) {
        outColumns[i] = numPrimaryKeys + i;
    }
    int projCount = 0;
    for (int i = 0; i < numSecondaryKeyFields; i++) {
        projectionList[projCount++] = numPrimaryKeys + i;
    }
    for (int i = 0; i < numPrimaryKeys; i++) {
        projectionList[projCount++] = i;
    }
    if (numFilterFields > 0) {
        projectionList[projCount] = numPrimaryKeys + numSecondaryKeyFields;
    }
    IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
    for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
        sefs[i] = secondaryFieldAccessEvalFactories[i];
    }
    AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
    AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
    AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixAssignOp, primaryPartitionConstraint);
    return asterixAssignOp;
}
Also used : AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)

Example 9 with AssignRuntimeFactory

use of org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory in project asterixdb by apache.

the class SecondaryIndexOperationsHelper method createCastOp.

protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType) {
    CastTypeDescriptor castFuncDesc = (CastTypeDescriptor) CastTypeDescriptor.FACTORY.createFunctionDescriptor();
    castFuncDesc.setImmutableStates(enforcedItemType, itemType);
    int[] outColumns = new int[1];
    int[] projectionList = new int[(dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys];
    int recordIdx;
    //external datascan operator returns a record as the first field, instead of the last in internal case
    if (dsType == DatasetType.EXTERNAL) {
        recordIdx = 0;
        outColumns[0] = 0;
    } else {
        recordIdx = numPrimaryKeys;
        outColumns[0] = numPrimaryKeys;
    }
    for (int i = 0; i <= numPrimaryKeys; i++) {
        projectionList[i] = i;
    }
    if (dataset.hasMetaPart()) {
        projectionList[numPrimaryKeys + 1] = numPrimaryKeys + 1;
    }
    IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
    IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
    sefs[0] = castFuncDesc.createEvaluatorFactory(castEvalFact);
    AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
    return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
}
Also used : CastTypeDescriptor(org.apache.asterix.runtime.evaluators.functions.CastTypeDescriptor) AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) ColumnAccessEvalFactory(org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)

Example 10 with AssignRuntimeFactory

use of org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory in project asterixdb by apache.

the class SecondaryIndexOperationsHelper method createExternalAssignOp.

protected AlgebricksMetaOperatorDescriptor createExternalAssignOp(JobSpecification spec, int numSecondaryKeys, RecordDescriptor secondaryRecDesc) throws AlgebricksException {
    int[] outColumns = new int[numSecondaryKeys];
    int[] projectionList = new int[numSecondaryKeys + numPrimaryKeys];
    for (int i = 0; i < numSecondaryKeys; i++) {
        outColumns[i] = i + numPrimaryKeys + 1;
        projectionList[i] = i + numPrimaryKeys + 1;
    }
    IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
    for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
        sefs[i] = secondaryFieldAccessEvalFactories[i];
    }
    //add External RIDs to the projection list
    for (int i = 0; i < numPrimaryKeys; i++) {
        projectionList[numSecondaryKeys + i] = i + 1;
    }
    AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
    return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
}
Also used : AlgebricksMetaOperatorDescriptor(org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor) AssignRuntimeFactory(org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory) AlgebricksPartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)

Aggregations

AssignRuntimeFactory (org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory)10 AlgebricksMetaOperatorDescriptor (org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor)9 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)6 JobSpecification (org.apache.hyracks.api.job.JobSpecification)6 EmptyTupleSourceRuntimeFactory (org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory)5 Test (org.junit.Test)5 File (java.io.File)4 AlgebricksPartitionConstraint (org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint)4 IScalarEvaluatorFactory (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)4 SinkWriterRuntimeFactory (org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory)4 StreamProjectRuntimeFactory (org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory)2 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 List (java.util.List)1 Map (java.util.Map)1 LSMTreeInsertDeleteOperatorDescriptor (org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor)1 JobId (org.apache.asterix.common.transactions.JobId)1 FeedConnectionId (org.apache.asterix.external.feed.management.FeedConnectionId)1 FeedCollectOperatorDescriptor (org.apache.asterix.external.operators.FeedCollectOperatorDescriptor)1 FeedIntakeOperatorDescriptor (org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor)1