Search in sources :

Example 56 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class OrderedIndexExamplesTest method bulkOrderVerificationExample.

/**
     * Bulk load failure example. Repeatedly loads a tree with 1,000 tuples, of
     * which one tuple at each possible position does not conform to the
     * expected order. We expect the bulk load to fail with an exception.
     */
@Test
public void bulkOrderVerificationExample() throws Exception {
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Bulk load order verification example");
    }
    // Declare fields.
    int fieldCount = 2;
    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
    typeTraits[0] = IntegerPointable.TYPE_TRAITS;
    typeTraits[1] = IntegerPointable.TYPE_TRAITS;
    // declare keys
    int keyFieldCount = 1;
    IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
    cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
    Random rnd = new Random();
    ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
    ArrayTupleReference tuple = new ArrayTupleReference();
    // This is only used for the LSM-BTree.
    int[] bloomFilterKeyFields = new int[keyFieldCount];
    bloomFilterKeyFields[0] = 0;
    int ins = 1000;
    for (int i = 1; i < ins; i++) {
        ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null, null);
        treeIndex.create();
        treeIndex.activate();
        // Load sorted records, and expect to fail at tuple i.
        IIndexBulkLoader bulkLoader = treeIndex.createBulkLoader(0.7f, true, ins, true);
        for (int j = 0; j < ins; j++) {
            if (j > i) {
                fail("Bulk load failure test unexpectedly succeeded past tuple: " + j);
            }
            int key = j;
            if (j == i) {
                int swapElementCase = Math.abs(rnd.nextInt()) % 2;
                if (swapElementCase == 0) {
                    // Element equal to previous element.
                    key--;
                } else {
                    // Element smaller than previous element.
                    key -= Math.abs(Math.random() % (ins - 1)) + 1;
                }
            }
            TupleUtils.createIntegerTuple(tb, tuple, key, 5);
            try {
                bulkLoader.add(tuple);
            } catch (HyracksDataException e) {
                if (e.getErrorCode() == ErrorCode.UNSORTED_LOAD_INPUT || e.getErrorCode() == ErrorCode.DUPLICATE_KEY || e.getErrorCode() == ErrorCode.DUPLICATE_LOAD_INPUT) {
                    if (j != i) {
                        fail("Unexpected exception: " + e.getMessage());
                    }
                    // Success.
                    break;
                } else {
                    throw e;
                }
            }
        }
        treeIndex.deactivate();
        treeIndex.destroy();
    }
}
Also used : ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) Random(java.util.Random) IIndexBulkLoader(org.apache.hyracks.storage.common.IIndexBulkLoader) ArrayTupleReference(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) ITreeIndex(org.apache.hyracks.storage.am.common.api.ITreeIndex) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) Test(org.junit.Test)

Example 57 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class OrderedIndexExamplesTest method twoFixedLengthKeysOneFixedLengthValueExample.

/**
     * Composite Key Example (Non-Unique Index). Create a tree index with two
     * fixed-length key fields and one fixed-length value field. Fill index with
     * random values using insertions (not bulk load) Perform scans and range
     * search.
     */
@Test
public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Composite Key Test");
    }
    // Declare fields.
    int fieldCount = 3;
    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
    typeTraits[0] = IntegerPointable.TYPE_TRAITS;
    typeTraits[1] = IntegerPointable.TYPE_TRAITS;
    typeTraits[2] = IntegerPointable.TYPE_TRAITS;
    // Declare field serdes.
    ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
    // declare keys
    int keyFieldCount = 2;
    IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
    cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
    cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
    // This is only used for the LSM-BTree.
    int[] bloomFilterKeyFields = new int[keyFieldCount];
    bloomFilterKeyFields[0] = 0;
    bloomFilterKeyFields[1] = 1;
    ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null, null);
    treeIndex.create();
    treeIndex.activate();
    long start = System.currentTimeMillis();
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Inserting into tree...");
    }
    ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
    ArrayTupleReference tuple = new ArrayTupleReference();
    IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
    int numInserts = 10000;
    for (int i = 0; i < 10000; i++) {
        int f0 = rnd.nextInt() % 2000;
        int f1 = rnd.nextInt() % 1000;
        int f2 = 5;
        TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
        if (LOGGER.isLoggable(Level.INFO)) {
            if (i % 1000 == 0) {
                LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
            }
        }
        try {
            indexAccessor.insert(tuple);
        } catch (HyracksDataException e) {
            if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
                throw e;
            }
        }
    }
    long end = System.currentTimeMillis();
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
    }
    orderedScan(indexAccessor, fieldSerdes);
    diskOrderScan(indexAccessor, fieldSerdes);
    // Build low key.
    ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
    ArrayTupleReference lowKey = new ArrayTupleReference();
    TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -3);
    // Build high key.
    ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
    ArrayTupleReference highKey = new ArrayTupleReference();
    TupleUtils.createIntegerTuple(highKeyTb, highKey, 3);
    // Prefix-Range search in [-3, 3]
    rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey, null, null);
    treeIndex.validate();
    treeIndex.deactivate();
    treeIndex.destroy();
}
Also used : ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) ArrayTupleReference(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IIndexAccessor(org.apache.hyracks.storage.common.IIndexAccessor) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) ITreeIndex(org.apache.hyracks.storage.am.common.api.ITreeIndex) Test(org.junit.Test)

Example 58 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class OrderedIndexExamplesTest method updateExample.

/**
     * Update example. Create a BTree with one variable-length key field and one
     * variable-length value field. Fill B-tree with random values using
     * insertions, then update entries one-by-one. Repeat procedure a few times
     * on same BTree.
     */
@Test
public void updateExample() throws Exception {
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Update example");
    }
    // Declare fields.
    int fieldCount = 2;
    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
    typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
    typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
    // Declare field serdes.
    ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
    // Declare keys.
    int keyFieldCount = 1;
    IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
    cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
    // This is only used for the LSM-BTree.
    int[] bloomFilterKeyFields = new int[keyFieldCount];
    bloomFilterKeyFields[0] = 0;
    ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null, null);
    treeIndex.create();
    treeIndex.activate();
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("Inserting into tree...");
    }
    IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
    ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
    ArrayTupleReference tuple = new ArrayTupleReference();
    int maxLength = 10;
    int ins = 10000;
    String[] keys = new String[10000];
    for (int i = 0; i < ins; i++) {
        String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
        String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
        TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
        keys[i] = f0;
        if (LOGGER.isLoggable(Level.INFO)) {
            if (i % 1000 == 0) {
                LOGGER.info("Inserting " + i);
            }
        }
        try {
            indexAccessor.insert(tuple);
        } catch (HyracksDataException e) {
            if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
                throw e;
            }
        }
    }
    // Print before doing any updates.
    orderedScan(indexAccessor, fieldSerdes);
    int runs = 3;
    for (int run = 0; run < runs; run++) {
        if (LOGGER.isLoggable(Level.INFO)) {
            LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
            LOGGER.info("Updating BTree");
        }
        for (int i = 0; i < ins; i++) {
            // Generate a new random value for f1.
            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
            TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
            if (LOGGER.isLoggable(Level.INFO)) {
                if (i % 1000 == 0) {
                    LOGGER.info("Updating " + i);
                }
            }
            indexAccessor.update(tuple);
        }
        // Do another scan after a round of updates.
        orderedScan(indexAccessor, fieldSerdes);
    }
    treeIndex.validate();
    treeIndex.deactivate();
    treeIndex.destroy();
}
Also used : ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) ArrayTupleReference(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) UTF8StringSerializerDeserializer(org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IIndexAccessor(org.apache.hyracks.storage.common.IIndexAccessor) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) ITreeIndex(org.apache.hyracks.storage.am.common.api.ITreeIndex) Test(org.junit.Test)

Example 59 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class OrderedIndexExamplesTest method pageSplitTestExample.

/**
     * This test the btree page split. Originally this test didn't pass since
     * the btree was spliting by cardinality and not size. Thus, we might end up
     * with a situation where there is not enough space to insert the new tuple
     * after the split which will throw an error and the split won't be
     * propagated to upper level; thus, the tree is corrupted. Now, it split
     * page by size. The correct behavior on abnormally large keys/values.
     */
@Test
public void pageSplitTestExample() throws Exception {
    if (LOGGER.isLoggable(Level.INFO)) {
        LOGGER.info("BTree page split test.");
    }
    // Declare fields.
    int fieldCount = 2;
    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
    typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
    typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
    // Declare field serdes.
    ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
    // Declare keys.
    int keyFieldCount = 1;
    IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
    cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
    // This is only used for the LSM-BTree.
    int[] bloomFilterKeyFields = new int[keyFieldCount];
    bloomFilterKeyFields[0] = 0;
    ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null, null);
    treeIndex.create();
    treeIndex.activate();
    ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
    ArrayTupleReference tuple = new ArrayTupleReference();
    IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
    String key = "111";
    String data = "XXX";
    TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
    indexAccessor.insert(tuple);
    key = "222";
    data = "XXX";
    TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
    indexAccessor.insert(tuple);
    key = "333";
    data = "XXX";
    TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
    indexAccessor.insert(tuple);
    key = "444";
    data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
    TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
    indexAccessor.insert(tuple);
    key = "555";
    data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
    TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
    indexAccessor.insert(tuple);
    key = "666";
    data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
    TupleUtils.createTuple(tb, tuple, fieldSerdes, key, data);
    indexAccessor.insert(tuple);
    treeIndex.validate();
    treeIndex.deactivate();
    treeIndex.destroy();
}
Also used : ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) ArrayTupleReference(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) ArrayTupleBuilder(org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder) UTF8StringSerializerDeserializer(org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) IIndexAccessor(org.apache.hyracks.storage.common.IIndexAccessor) ITreeIndex(org.apache.hyracks.storage.am.common.api.ITreeIndex) Test(org.junit.Test)

Example 60 with IBinaryComparatorFactory

use of org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory in project asterixdb by apache.

the class WordCountMain method createJob.

private static JobSpecification createJob(FileSplit[] inSplits, FileSplit[] outSplits, String algo, int htSize, int frameLimit, String format, int frameSize) {
    JobSpecification spec = new JobSpecification(frameSize);
    IFileSplitProvider splitsProvider = new ConstantFileSplitProvider(inSplits);
    RecordDescriptor wordDesc = new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
    FileScanOperatorDescriptor wordScanner = new FileScanOperatorDescriptor(spec, splitsProvider, new WordTupleParserFactory(), wordDesc);
    createPartitionConstraint(spec, wordScanner, inSplits);
    RecordDescriptor groupResultDesc = new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE });
    IOperatorDescriptor gBy;
    int[] keys = new int[] { 0 };
    if ("hash".equalsIgnoreCase(algo)) {
        gBy = new ExternalGroupOperatorDescriptor(spec, htSize, fileSize, keys, frameLimit, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, new UTF8StringNormalizedKeyComputerFactory(), new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false), new FloatSumFieldAggregatorFactory(5, false) }), new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false), new FloatSumFieldAggregatorFactory(3, false) }), groupResultDesc, groupResultDesc, new HashSpillableTableFactory(new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
        createPartitionConstraint(spec, gBy, outSplits);
        IConnectorDescriptor scanGroupConn = new MToNPartitioningConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(keys, new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
        spec.connect(scanGroupConn, wordScanner, 0, gBy, 0);
    } else {
        IBinaryComparatorFactory[] cfs = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
        IOperatorDescriptor sorter = "memsort".equalsIgnoreCase(algo) ? new InMemorySortOperatorDescriptor(spec, keys, new UTF8StringNormalizedKeyComputerFactory(), cfs, wordDesc) : new ExternalSortOperatorDescriptor(spec, frameLimit, keys, new UTF8StringNormalizedKeyComputerFactory(), cfs, wordDesc);
        createPartitionConstraint(spec, sorter, outSplits);
        IConnectorDescriptor scanSortConn = new MToNPartitioningConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(keys, new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
        spec.connect(scanSortConn, wordScanner, 0, sorter, 0);
        gBy = new PreclusteredGroupOperatorDescriptor(spec, keys, new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), groupResultDesc);
        createPartitionConstraint(spec, gBy, outSplits);
        OneToOneConnectorDescriptor sortGroupConn = new OneToOneConnectorDescriptor(spec);
        spec.connect(sortGroupConn, sorter, 0, gBy, 0);
    }
    IFileSplitProvider outSplitProvider = new ConstantFileSplitProvider(outSplits);
    IOperatorDescriptor writer = "text".equalsIgnoreCase(format) ? new PlainFileWriterOperatorDescriptor(spec, outSplitProvider, ",") : new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
    createPartitionConstraint(spec, writer, outSplits);
    IConnectorDescriptor gbyPrinterConn = new OneToOneConnectorDescriptor(spec);
    spec.connect(gbyPrinterConn, gBy, 0, writer, 0);
    spec.addRoot(writer);
    return spec;
}
Also used : WordTupleParserFactory(org.apache.hyracks.examples.text.WordTupleParserFactory) IFileSplitProvider(org.apache.hyracks.dataflow.std.file.IFileSplitProvider) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) HashSpillableTableFactory(org.apache.hyracks.dataflow.std.group.HashSpillableTableFactory) CountFieldAggregatorFactory(org.apache.hyracks.dataflow.std.group.aggregators.CountFieldAggregatorFactory) OneToOneConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor) UTF8StringSerializerDeserializer(org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer) IBinaryHashFunctionFactory(org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory) FileScanOperatorDescriptor(org.apache.hyracks.dataflow.std.file.FileScanOperatorDescriptor) JobSpecification(org.apache.hyracks.api.job.JobSpecification) UTF8StringNormalizedKeyComputerFactory(org.apache.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory) IConnectorDescriptor(org.apache.hyracks.api.dataflow.IConnectorDescriptor) MultiFieldsAggregatorFactory(org.apache.hyracks.dataflow.std.group.aggregators.MultiFieldsAggregatorFactory) ExternalGroupOperatorDescriptor(org.apache.hyracks.dataflow.std.group.external.ExternalGroupOperatorDescriptor) InMemorySortOperatorDescriptor(org.apache.hyracks.dataflow.std.sort.InMemorySortOperatorDescriptor) ConstantFileSplitProvider(org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider) IBinaryComparatorFactory(org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory) MToNPartitioningConnectorDescriptor(org.apache.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor) FieldHashPartitionComputerFactory(org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory) IOperatorDescriptor(org.apache.hyracks.api.dataflow.IOperatorDescriptor) PlainFileWriterOperatorDescriptor(org.apache.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor) FloatSumFieldAggregatorFactory(org.apache.hyracks.dataflow.std.group.aggregators.FloatSumFieldAggregatorFactory) ExternalSortOperatorDescriptor(org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor) PreclusteredGroupOperatorDescriptor(org.apache.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor) IntSumFieldAggregatorFactory(org.apache.hyracks.dataflow.std.group.aggregators.IntSumFieldAggregatorFactory) FrameFileWriterOperatorDescriptor(org.apache.hyracks.dataflow.std.file.FrameFileWriterOperatorDescriptor)

Aggregations

IBinaryComparatorFactory (org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory)86 ITypeTraits (org.apache.hyracks.api.dataflow.value.ITypeTraits)45 RecordDescriptor (org.apache.hyracks.api.dataflow.value.RecordDescriptor)25 ArrayTupleBuilder (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder)25 Test (org.junit.Test)25 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)24 ArrayTupleReference (org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference)22 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)19 UTF8StringSerializerDeserializer (org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer)19 ITreeIndex (org.apache.hyracks.storage.am.common.api.ITreeIndex)17 IIndexAccessor (org.apache.hyracks.storage.common.IIndexAccessor)16 JobSpecification (org.apache.hyracks.api.job.JobSpecification)15 OneToOneConnectorDescriptor (org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor)15 IBinaryComparatorFactoryProvider (org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider)14 FieldHashPartitionComputerFactory (org.apache.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory)14 IFileSplitProvider (org.apache.hyracks.dataflow.std.file.IFileSplitProvider)13 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)11 IVariableTypeEnvironment (org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment)11 MToNPartitioningMergingConnectorDescriptor (org.apache.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor)10 IPrimitiveValueProviderFactory (org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory)10