Search in sources :

Example 1 with WritableComparator

use of org.apache.hadoop.io.WritableComparator in project hadoop by apache.

the class OverrideRecordReader method fillJoinCollector.

/**
   * Instead of filling the JoinCollector with iterators from all
   * data sources, fill only the rightmost for this key.
   * This not only saves space by discarding the other sources, but
   * it also emits the number of key-value pairs in the preferred
   * RecordReader instead of repeating that stream n times, where
   * n is the cardinality of the cross product of the discarded
   * streams for the given key.
   */
protected void fillJoinCollector(K iterkey) throws IOException, InterruptedException {
    final PriorityQueue<ComposableRecordReader<K, ?>> q = getRecordReaderQueue();
    if (q != null && !q.isEmpty()) {
        int highpos = -1;
        ArrayList<ComposableRecordReader<K, ?>> list = new ArrayList<ComposableRecordReader<K, ?>>(kids.length);
        q.peek().key(iterkey);
        final WritableComparator cmp = getComparator();
        while (0 == cmp.compare(q.peek().key(), iterkey)) {
            ComposableRecordReader<K, ?> t = q.poll();
            if (-1 == highpos || list.get(highpos).id() < t.id()) {
                highpos = list.size();
            }
            list.add(t);
            if (q.isEmpty())
                break;
        }
        ComposableRecordReader<K, ?> t = list.remove(highpos);
        t.accept(jc, iterkey);
        for (ComposableRecordReader<K, ?> rr : list) {
            rr.skip(iterkey);
        }
        list.add(t);
        for (ComposableRecordReader<K, ?> rr : list) {
            if (rr.hasNext()) {
                q.add(rr);
            }
        }
    }
}
Also used : WritableComparator(org.apache.hadoop.io.WritableComparator) ArrayList(java.util.ArrayList)

Example 2 with WritableComparator

use of org.apache.hadoop.io.WritableComparator in project hadoop by apache.

the class OverrideRecordReader method fillJoinCollector.

/**
   * Instead of filling the JoinCollector with iterators from all
   * data sources, fill only the rightmost for this key.
   * This not only saves space by discarding the other sources, but
   * it also emits the number of key-value pairs in the preferred
   * RecordReader instead of repeating that stream n times, where
   * n is the cardinality of the cross product of the discarded
   * streams for the given key.
   */
protected void fillJoinCollector(K iterkey) throws IOException {
    final PriorityQueue<ComposableRecordReader<K, ?>> q = getRecordReaderQueue();
    if (!q.isEmpty()) {
        int highpos = -1;
        ArrayList<ComposableRecordReader<K, ?>> list = new ArrayList<ComposableRecordReader<K, ?>>(kids.length);
        q.peek().key(iterkey);
        final WritableComparator cmp = getComparator();
        while (0 == cmp.compare(q.peek().key(), iterkey)) {
            ComposableRecordReader<K, ?> t = q.poll();
            if (-1 == highpos || list.get(highpos).id() < t.id()) {
                highpos = list.size();
            }
            list.add(t);
            if (q.isEmpty())
                break;
        }
        ComposableRecordReader<K, ?> t = list.remove(highpos);
        t.accept(jc, iterkey);
        for (ComposableRecordReader<K, ?> rr : list) {
            rr.skip(iterkey);
        }
        list.add(t);
        for (ComposableRecordReader<K, ?> rr : list) {
            if (rr.hasNext()) {
                q.add(rr);
            }
        }
    }
}
Also used : WritableComparator(org.apache.hadoop.io.WritableComparator) ArrayList(java.util.ArrayList)

Example 3 with WritableComparator

use of org.apache.hadoop.io.WritableComparator in project hive by apache.

the class TestVectorBetweenIn method doBetweenInVariation.

private boolean doBetweenInVariation(Random random, String typeName, boolean tryDecimal64, BetweenInVariation betweenInVariation, int subVariation) throws Exception {
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    boolean isDecimal64 = checkDecimal64(tryDecimal64, typeInfo);
    DataTypePhysicalVariation dataTypePhysicalVariation = (isDecimal64 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
    final int decimal64Scale = (isDecimal64 ? ((DecimalTypeInfo) typeInfo).getScale() : 0);
    // ----------------------------------------------------------------------------------------------
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    final int valueCount = 10 + random.nextInt(10);
    List<Object> valueList = new ArrayList<Object>(valueCount);
    for (int i = 0; i < valueCount; i++) {
        valueList.add(VectorRandomRowSource.randomWritable(random, typeInfo, objectInspector, dataTypePhysicalVariation, /* allowNull */
        false));
    }
    final boolean isBetween = (betweenInVariation == BetweenInVariation.FILTER_BETWEEN || betweenInVariation == BetweenInVariation.FILTER_NOT_BETWEEN || betweenInVariation == BetweenInVariation.PROJECTION_BETWEEN || betweenInVariation == BetweenInVariation.PROJECTION_NOT_BETWEEN);
    List<Object> compareList = new ArrayList<Object>();
    List<Object> sortedList = new ArrayList<Object>(valueCount);
    sortedList.addAll(valueList);
    Object exampleObject = valueList.get(0);
    WritableComparator writableComparator = WritableComparator.get((Class<? extends WritableComparable>) exampleObject.getClass());
    sortedList.sort(writableComparator);
    final boolean isInvert;
    if (isBetween) {
        // FILTER_BETWEEN
        // FILTER_NOT_BETWEEN
        // PROJECTION_BETWEEN
        // PROJECTION_NOT_BETWEEN
        isInvert = (betweenInVariation == BetweenInVariation.FILTER_NOT_BETWEEN || betweenInVariation == BetweenInVariation.PROJECTION_NOT_BETWEEN);
        switch(subVariation) {
            case 0:
                // Range covers all values exactly.
                compareList.add(sortedList.get(0));
                compareList.add(sortedList.get(valueCount - 1));
                break;
            case 1:
                // Exclude the first and last sorted.
                compareList.add(sortedList.get(1));
                compareList.add(sortedList.get(valueCount - 2));
                break;
            case 2:
                // Only last 2 sorted.
                compareList.add(sortedList.get(valueCount - 2));
                compareList.add(sortedList.get(valueCount - 1));
                break;
            case 3:
            case 4:
            case 5:
            case 6:
                {
                    // Choose 2 adjacent in the middle.
                    Object min = sortedList.get(5);
                    Object max = sortedList.get(6);
                    compareList.add(min);
                    compareList.add(max);
                    if (subVariation == 4) {
                        removeValue(valueList, min);
                    } else if (subVariation == 5) {
                        removeValue(valueList, max);
                    } else if (subVariation == 6) {
                        removeValue(valueList, min);
                        removeValue(valueList, max);
                    }
                }
                break;
            default:
                return false;
        }
    } else {
        // FILTER_IN.
        // PROJECTION_IN.
        isInvert = false;
        switch(subVariation) {
            case 0:
                // All values.
                compareList.addAll(valueList);
                break;
            case 1:
                // Don't include the first and last sorted.
                for (int i = 1; i < valueCount - 1; i++) {
                    compareList.add(valueList.get(i));
                }
                break;
            case 2:
                // The even ones.
                for (int i = 2; i < valueCount; i += 2) {
                    compareList.add(valueList.get(i));
                }
                break;
            case 3:
                {
                    // Choose 2 adjacent in the middle.
                    Object min = sortedList.get(5);
                    Object max = sortedList.get(6);
                    compareList.add(min);
                    compareList.add(max);
                    if (subVariation == 4) {
                        removeValue(valueList, min);
                    } else if (subVariation == 5) {
                        removeValue(valueList, max);
                    } else if (subVariation == 6) {
                        removeValue(valueList, min);
                        removeValue(valueList, max);
                    }
                }
                break;
            default:
                return false;
        }
    }
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createValueList(typeInfo, valueList);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    generationSpecList.add(generationSpec);
    explicitDataTypePhysicalVariationList.add(dataTypePhysicalVariation);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<String> columns = new ArrayList<String>();
    String col1Name = rowSource.columnNames().get(0);
    columns.add(col1Name);
    final ExprNodeDesc col1Expr = new ExprNodeColumnDesc(typeInfo, col1Name, "table", false);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    if (isBetween) {
        children.add(new ExprNodeConstantDesc(Boolean.valueOf(isInvert)));
    }
    children.add(col1Expr);
    for (Object compareObject : compareList) {
        ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(typeInfo, VectorRandomRowSource.getNonWritableObject(compareObject, typeInfo, objectInspector));
        children.add(constDesc);
    }
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf;
    final ObjectInspector outputObjectInspector;
    if (isBetween) {
        udf = new GenericUDFBetween();
        // First argument is boolean invert. Arguments 1..3 are inspectors for range limits...
        ObjectInspector[] argumentOIs = new ObjectInspector[4];
        argumentOIs[0] = PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
        argumentOIs[1] = objectInspector;
        argumentOIs[2] = objectInspector;
        argumentOIs[3] = objectInspector;
        outputObjectInspector = udf.initialize(argumentOIs);
    } else {
        final int compareCount = compareList.size();
        udf = new GenericUDFIn();
        ObjectInspector[] argumentOIs = new ObjectInspector[compareCount];
        ConstantObjectInspector constantObjectInspector = (ConstantObjectInspector) children.get(1).getWritableObjectInspector();
        for (int i = 0; i < compareCount; i++) {
            argumentOIs[i] = constantObjectInspector;
        }
        outputObjectInspector = udf.initialize(argumentOIs);
    }
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
    return executeTestModesAndVerify(typeInfo, betweenInVariation, compareList, columns, columnNames, children, udf, exprDesc, randomRows, rowSource, batchSource, outputTypeInfo, /* skipAdaptor */
    false);
}
Also used : ArrayList(java.util.ArrayList) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) GenericUDFBetween(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) WritableComparator(org.apache.hadoop.io.WritableComparator) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFIn(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 4 with WritableComparator

use of org.apache.hadoop.io.WritableComparator in project hive by apache.

the class TestVectorIndex method doIndexOnRandomDataType.

private boolean doIndexOnRandomDataType(Random random, boolean isList, String keyTypeName, String elementRootTypeName, boolean allowNulls, boolean isScalarIndex) throws Exception {
    String elementTypeName = VectorRandomRowSource.getDecoratedTypeName(random, elementRootTypeName, SupportedTypes.ALL, /* allowedTypeNameSet */
    null, /* depth */
    0, /* maxDepth */
    3);
    TypeInfo elementTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(elementTypeName);
    ObjectInspector elementObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(elementTypeInfo);
    // ----------------------------------------------------------------------------------------------
    final TypeInfo keyTypeInfo;
    if (isList) {
        keyTypeInfo = TypeInfoFactory.intTypeInfo;
    } else {
        keyTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(keyTypeName);
    }
    final ObjectInspector keyObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(keyTypeInfo);
    Object exampleObject = (isList ? ((WritableIntObjectInspector) keyObjectInspector).create(0) : VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
    false));
    WritableComparator writableComparator = WritableComparator.get((Class<? extends WritableComparable>) exampleObject.getClass());
    final int allKeyCount = 10 + random.nextInt(10);
    final int keyCount = 5 + random.nextInt(allKeyCount / 2);
    List<Object> allKeyList = new ArrayList<Object>(allKeyCount);
    Set<Object> allKeyTreeSet = new TreeSet<Object>(writableComparator);
    int fillAllKeyCount = 0;
    while (fillAllKeyCount < allKeyCount) {
        Object object;
        if (isList) {
            WritableIntObjectInspector writableOI = (WritableIntObjectInspector) keyObjectInspector;
            int index = random.nextInt(keyCount);
            object = writableOI.create(index);
            while (allKeyTreeSet.contains(object)) {
                index = (random.nextBoolean() ? random.nextInt() : (random.nextBoolean() ? -1 : keyCount));
                object = writableOI.create(index);
            }
        } else {
            do {
                object = VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
                false);
            } while (allKeyTreeSet.contains(object));
        }
        allKeyList.add(object);
        allKeyTreeSet.add(object);
        fillAllKeyCount++;
    }
    List<Object> keyList = new ArrayList<Object>();
    Set<Object> keyTreeSet = new TreeSet<Object>(writableComparator);
    int fillKeyCount = 0;
    while (fillKeyCount < keyCount) {
        Object newKey = allKeyList.get(random.nextInt(allKeyCount));
        if (keyTreeSet.contains(newKey)) {
            continue;
        }
        keyList.add(newKey);
        keyTreeSet.add(newKey);
        fillKeyCount++;
    }
    // ----------------------------------------------------------------------------------------------
    final TypeInfo typeInfo;
    if (isList) {
        ListTypeInfo listTypeInfo = new ListTypeInfo();
        listTypeInfo.setListElementTypeInfo(elementTypeInfo);
        typeInfo = listTypeInfo;
    } else {
        MapTypeInfo mapTypeInfo = new MapTypeInfo();
        mapTypeInfo.setMapKeyTypeInfo(keyTypeInfo);
        mapTypeInfo.setMapValueTypeInfo(elementTypeInfo);
        typeInfo = mapTypeInfo;
    }
    final String typeName = typeInfo.getTypeName();
    final ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    int columnNum = 1;
    ExprNodeDesc keyColExpr;
    if (!isScalarIndex) {
        generationSpecList.add(GenerationSpec.createValueList(keyTypeInfo, keyList));
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + columnNum++;
        columns.add(columnName);
        keyColExpr = new ExprNodeColumnDesc(keyTypeInfo, columnName, "table", false);
    } else {
        Object scalarWritable = keyList.get(random.nextInt(keyCount));
        final Object scalarObject = VectorRandomRowSource.getNonWritableObject(scalarWritable, keyTypeInfo, keyObjectInspector);
        keyColExpr = new ExprNodeConstantDesc(keyTypeInfo, scalarObject);
    }
    /*
    System.out.println("*DEBUG* typeName " + typeName);
    System.out.println("*DEBUG* keyColExpr " + keyColExpr.toString());
    System.out.println("*DEBUG* keyList " + keyList.toString());
    System.out.println("*DEBUG* allKeyList " + allKeyList.toString());
    */
    generationSpecList.add(GenerationSpec.createValueList(typeInfo, keyList));
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    String columnName = "col" + columnNum++;
    columns.add(columnName);
    ExprNodeDesc listOrMapColExpr;
    listOrMapColExpr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
    children.add(listOrMapColExpr);
    children.add(keyColExpr);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    allowNulls, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf = new GenericUDFIndex();
    ObjectInspector[] argumentOIs = new ObjectInspector[2];
    argumentOIs[0] = objectInspector;
    argumentOIs[1] = keyObjectInspector;
    final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(elementTypeInfo, udf, children);
    System.out.println("here");
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[IndexTestMode.count][];
    for (int i = 0; i < IndexTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        IndexTestMode indexTestMode = IndexTestMode.values()[i];
        switch(indexTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), elementObjectInspector, outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, indexTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + indexTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < IndexTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            IndexTestMode indexTestMode = IndexTestMode.values()[v];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            }
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) TreeSet(java.util.TreeSet) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) WritableComparator(org.apache.hadoop.io.WritableComparator) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDFIndex(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIndex) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 5 with WritableComparator

use of org.apache.hadoop.io.WritableComparator in project Cloud9 by lintool.

the class TripleOfIntsTest method testComparison2.

@Test
public void testComparison2() throws IOException {
    WritableComparator comparator = new TripleOfInts.Comparator();
    TripleOfInts threeInts1 = new TripleOfInts(1, 2, 3);
    TripleOfInts threeInts2 = new TripleOfInts(1, 2, 3);
    TripleOfInts threeInts3 = new TripleOfInts(1, 2, 2);
    TripleOfInts threeInts4 = new TripleOfInts(1, 1, 3);
    TripleOfInts threeInts5 = new TripleOfInts(0, 2, 3);
    assertTrue(WritableComparatorTestHarness.compare(comparator, threeInts1, threeInts2) == 0);
    assertTrue(WritableComparatorTestHarness.compare(comparator, threeInts1, threeInts3) > 0);
    assertTrue(WritableComparatorTestHarness.compare(comparator, threeInts1, threeInts4) > 0);
    assertTrue(WritableComparatorTestHarness.compare(comparator, threeInts1, threeInts5) > 0);
    assertTrue(WritableComparatorTestHarness.compare(comparator, threeInts2, threeInts3) > 0);
    assertTrue(WritableComparatorTestHarness.compare(comparator, threeInts2, threeInts4) > 0);
}
Also used : WritableComparator(org.apache.hadoop.io.WritableComparator) WritableComparator(org.apache.hadoop.io.WritableComparator) Test(org.junit.Test)

Aggregations

WritableComparator (org.apache.hadoop.io.WritableComparator)5 ArrayList (java.util.ArrayList)4 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)2 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)2 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)2 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)2 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)2 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)2 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)2 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)2 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)2 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)2 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)2 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)2 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)2 TreeSet (java.util.TreeSet)1 GenericUDFBetween (org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween)1 GenericUDFIn (org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn)1 GenericUDFIndex (org.apache.hadoop.hive.ql.udf.generic.GenericUDFIndex)1 StandardStructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector)1