Search in sources :

Example 6 with Complex

use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.

the class TestThriftObjectInspectors method testThriftObjectInspectors.

public void testThriftObjectInspectors() throws Throwable {
    try {
        ObjectInspector oi1 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
        ObjectInspector oi2 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
        assertEquals(oi1, oi2);
        // metadata
        assertEquals(Category.STRUCT, oi1.getCategory());
        StructObjectInspector soi = (StructObjectInspector) oi1;
        List<? extends StructField> fields = soi.getAllStructFieldRefs();
        assertEquals(10, fields.size());
        assertEquals(fields.get(0), soi.getStructFieldRef("aint"));
        // null
        for (int i = 0; i < fields.size(); i++) {
            assertNull(soi.getStructFieldData(null, fields.get(i)));
        }
        ObjectInspector oi = ObjectInspectorFactory.getReflectionObjectInspector(PropValueUnion.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
        assertNotNull(oi.toString());
        // real object
        Complex c = new Complex();
        c.setAint(1);
        c.setAString("test");
        List<Integer> c2 = Arrays.asList(new Integer[] { 1, 2, 3 });
        c.setLint(c2);
        List<String> c3 = Arrays.asList(new String[] { "one", "two" });
        c.setLString(c3);
        List<IntString> c4 = new ArrayList<IntString>();
        c.setLintString(c4);
        c.setMStringString(null);
        c.setAttributes(null);
        c.setUnionField1(null);
        c.setUnionField2(null);
        c.setUnionField3(null);
        assertEquals(1, soi.getStructFieldData(c, fields.get(0)));
        assertEquals("test", soi.getStructFieldData(c, fields.get(1)));
        assertEquals(c2, soi.getStructFieldData(c, fields.get(2)));
        assertEquals(c3, soi.getStructFieldData(c, fields.get(3)));
        assertEquals(c4, soi.getStructFieldData(c, fields.get(4)));
        assertNull(soi.getStructFieldData(c, fields.get(5)));
        assertNull(soi.getStructFieldData(c, fields.get(6)));
        assertNull(soi.getStructFieldData(c, fields.get(7)));
        assertNull(soi.getStructFieldData(c, fields.get(8)));
        assertNull(soi.getStructFieldData(c, fields.get(9)));
        ArrayList<Object> cfields = new ArrayList<Object>();
        for (int i = 0; i < 10; i++) {
            cfields.add(soi.getStructFieldData(c, fields.get(i)));
        }
        assertEquals(cfields, soi.getStructFieldsDataAsList(c));
        // sub fields
        assertEquals(PrimitiveObjectInspectorFactory.javaIntObjectInspector, fields.get(0).getFieldObjectInspector());
        assertEquals(PrimitiveObjectInspectorFactory.javaStringObjectInspector, fields.get(1).getFieldObjectInspector());
        assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector), fields.get(2).getFieldObjectInspector());
        assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector), fields.get(3).getFieldObjectInspector());
        assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getReflectionObjectInspector(IntString.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT)), fields.get(4).getFieldObjectInspector());
        assertEquals(ObjectInspectorFactory.getStandardMapObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.javaStringObjectInspector), fields.get(5).getFieldObjectInspector());
    } catch (Throwable e) {
        e.printStackTrace();
        throw e;
    }
}
Also used : ArrayList(java.util.ArrayList) SetIntString(org.apache.hadoop.hive.serde2.thrift.test.SetIntString) IntString(org.apache.hadoop.hive.serde2.thrift.test.IntString) Complex(org.apache.hadoop.hive.serde2.thrift.test.Complex) SetIntString(org.apache.hadoop.hive.serde2.thrift.test.SetIntString) IntString(org.apache.hadoop.hive.serde2.thrift.test.IntString)

Example 7 with Complex

use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.

the class CreateSequenceFile method main.

public static void main(String[] args) throws Exception {
    // Read parameters
    int lines = 10;
    List<String> extraArgs = new ArrayList<String>();
    for (int ai = 0; ai < args.length; ai++) {
        if (args[ai].equals("-line") && ai + 1 < args.length) {
            lines = Integer.parseInt(args[ai + 1]);
            ai++;
        } else {
            extraArgs.add(args[ai]);
        }
    }
    if (extraArgs.size() != 1) {
        usage();
    }
    JobConf conf = new JobConf(CreateSequenceFile.class);
    ThriftSerializer serializer = new ThriftSerializer();
    // Open files
    SequenceFile.Writer writer = new SequenceFile.Writer(FileSystem.get(conf), conf, new Path(extraArgs.get(0)), BytesWritable.class, BytesWritable.class);
    // write to file
    BytesWritable key = new BytesWritable();
    Random rand = new Random(20081215);
    for (int i = 0; i < lines; i++) {
        ArrayList<Integer> alist = new ArrayList<Integer>();
        alist.add(i);
        alist.add(i * 2);
        alist.add(i * 3);
        ArrayList<String> slist = new ArrayList<String>();
        slist.add("" + i * 10);
        slist.add("" + i * 100);
        slist.add("" + i * 1000);
        ArrayList<IntString> islist = new ArrayList<IntString>();
        islist.add(new IntString(i * i, "" + i * i * i, i));
        HashMap<String, String> hash = new HashMap<String, String>();
        hash.put("key_" + i, "value_" + i);
        Map<String, Map<String, Map<String, PropValueUnion>>> unionMap = new HashMap<String, Map<String, Map<String, PropValueUnion>>>();
        Map<String, Map<String, PropValueUnion>> erMap = new HashMap<String, Map<String, PropValueUnion>>();
        Map<String, PropValueUnion> attrMap = new HashMap<String, PropValueUnion>();
        erMap.put("erVal" + i, attrMap);
        attrMap.put("value_" + i, PropValueUnion.doubleValue(1.0));
        unionMap.put("key_" + i, erMap);
        Complex complex = new Complex(rand.nextInt(), "record_" + (new Integer(i)).toString(), alist, slist, islist, hash, unionMap, PropValueUnion.stringValue("test" + i), PropValueUnion.unionMStringString(hash), PropValueUnion.lString(slist));
        Writable value = serializer.serialize(complex);
        writer.append(key, value);
    }
    // Add an all-null record
    Complex complex = new Complex(0, null, null, null, null, null, null, null, null, null);
    Writable value = serializer.serialize(complex);
    writer.append(key, value);
    // Close files
    writer.close();
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) PropValueUnion(org.apache.hadoop.hive.serde2.thrift.test.PropValueUnion) Writable(org.apache.hadoop.io.Writable) BytesWritable(org.apache.hadoop.io.BytesWritable) IntString(org.apache.hadoop.hive.serde2.thrift.test.IntString) Complex(org.apache.hadoop.hive.serde2.thrift.test.Complex) IntString(org.apache.hadoop.hive.serde2.thrift.test.IntString) SequenceFile(org.apache.hadoop.io.SequenceFile) Random(java.util.Random) JobConf(org.apache.hadoop.mapred.JobConf) Path(org.apache.hadoop.fs.Path) BytesWritable(org.apache.hadoop.io.BytesWritable) HashMap(java.util.HashMap) Map(java.util.Map)

Example 8 with Complex

use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.

the class Vectorizer method getValidateDataTypeErrorMsg.

public static String getValidateDataTypeErrorMsg(String type, VectorExpressionDescriptor.Mode mode, boolean allowComplex, boolean isVectorizationComplexTypesEnabled) {
    type = type.toLowerCase();
    boolean result = supportedDataTypesPattern.matcher(type).matches();
    if (result && mode == VectorExpressionDescriptor.Mode.PROJECTION && type.equals("void")) {
        return "Vectorizing data type void not supported when mode = PROJECTION";
    }
    if (!result) {
        TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type);
        if (typeInfo.getCategory() != Category.PRIMITIVE) {
            if (allowComplex && isVectorizationComplexTypesEnabled) {
                return null;
            } else if (!allowComplex) {
                return "Vectorizing complex type " + typeInfo.getCategory() + " not supported";
            } else {
                return "Vectorizing complex type " + typeInfo.getCategory() + " not enabled (" + type + ") since " + GroupByDesc.getComplexTypeEnabledCondition(isVectorizationComplexTypesEnabled);
            }
        }
    }
    return (result ? null : "Vectorizing data type " + type + " not supported");
}
Also used : TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 9 with Complex

use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.

the class VerifyFast method doVerifyDeserializeRead.

public static void doVerifyDeserializeRead(DeserializeRead deserializeRead, TypeInfo typeInfo, Object object, boolean isNull) throws IOException {
    if (isNull) {
        if (object != null) {
            TestCase.fail("Field reports null but object is not null (class " + object.getClass().getName() + ", " + object.toString() + ")");
        }
        return;
    } else if (object == null) {
        TestCase.fail("Field report not null but object is null");
    }
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case BOOLEAN:
                        {
                            boolean value = deserializeRead.currentBoolean;
                            if (!(object instanceof BooleanWritable)) {
                                TestCase.fail("Boolean expected writable not Boolean");
                            }
                            boolean expected = ((BooleanWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case BYTE:
                        {
                            byte value = deserializeRead.currentByte;
                            if (!(object instanceof ByteWritable)) {
                                TestCase.fail("Byte expected writable not Byte");
                            }
                            byte expected = ((ByteWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                            }
                        }
                        break;
                    case SHORT:
                        {
                            short value = deserializeRead.currentShort;
                            if (!(object instanceof ShortWritable)) {
                                TestCase.fail("Short expected writable not Short");
                            }
                            short expected = ((ShortWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case INT:
                        {
                            int value = deserializeRead.currentInt;
                            if (!(object instanceof IntWritable)) {
                                TestCase.fail("Integer expected writable not Integer");
                            }
                            int expected = ((IntWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case LONG:
                        {
                            long value = deserializeRead.currentLong;
                            if (!(object instanceof LongWritable)) {
                                TestCase.fail("Long expected writable not Long");
                            }
                            Long expected = ((LongWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case FLOAT:
                        {
                            float value = deserializeRead.currentFloat;
                            if (!(object instanceof FloatWritable)) {
                                TestCase.fail("Float expected writable not Float");
                            }
                            float expected = ((FloatWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case DOUBLE:
                        {
                            double value = deserializeRead.currentDouble;
                            if (!(object instanceof DoubleWritable)) {
                                TestCase.fail("Double expected writable not Double");
                            }
                            double expected = ((DoubleWritable) object).get();
                            if (value != expected) {
                                TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                            }
                        }
                        break;
                    case STRING:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            String expected = ((Text) object).toString();
                            if (!string.equals(expected)) {
                                TestCase.fail("String field mismatch (expected '" + expected + "' found '" + string + "')");
                            }
                        }
                        break;
                    case CHAR:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                            HiveChar expected = ((HiveCharWritable) object).getHiveChar();
                            if (!hiveChar.equals(expected)) {
                                TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')");
                            }
                        }
                        break;
                    case VARCHAR:
                        {
                            byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            Text text = new Text(stringBytes);
                            String string = text.toString();
                            HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                            HiveVarchar expected = ((HiveVarcharWritable) object).getHiveVarchar();
                            if (!hiveVarchar.equals(expected)) {
                                TestCase.fail("Varchar field mismatch (expected '" + expected + "' found '" + hiveVarchar + "')");
                            }
                        }
                        break;
                    case DECIMAL:
                        {
                            HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                            if (value == null) {
                                TestCase.fail("Decimal field evaluated to NULL");
                            }
                            HiveDecimal expected = ((HiveDecimalWritable) object).getHiveDecimal();
                            if (!value.equals(expected)) {
                                DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                                int precision = decimalTypeInfo.getPrecision();
                                int scale = decimalTypeInfo.getScale();
                                TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                            }
                        }
                        break;
                    case DATE:
                        {
                            Date value = deserializeRead.currentDateWritable.get();
                            Date expected = ((DateWritable) object).get();
                            if (!value.equals(expected)) {
                                TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case TIMESTAMP:
                        {
                            Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                            Timestamp expected = ((TimestampWritable) object).getTimestamp();
                            if (!value.equals(expected)) {
                                TestCase.fail("Timestamp field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case INTERVAL_YEAR_MONTH:
                        {
                            HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                            HiveIntervalYearMonth expected = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth();
                            if (!value.equals(expected)) {
                                TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case INTERVAL_DAY_TIME:
                        {
                            HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                            HiveIntervalDayTime expected = ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime();
                            if (!value.equals(expected)) {
                                TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                            }
                        }
                        break;
                    case BINARY:
                        {
                            byte[] byteArray = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                            BytesWritable bytesWritable = (BytesWritable) object;
                            byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());
                            if (byteArray.length != expected.length) {
                                TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                            }
                            for (int b = 0; b < byteArray.length; b++) {
                                if (byteArray[b] != expected[b]) {
                                    TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected) + " found " + Arrays.toString(byteArray) + ")");
                                }
                            }
                        }
                        break;
                    default:
                        throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
                }
            }
            break;
        case LIST:
        case MAP:
        case STRUCT:
        case UNION:
            throw new Error("Complex types need to be handled separately");
        default:
            throw new Error("Unknown category " + typeInfo.getCategory());
    }
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 10 with Complex

use of org.apache.hadoop.hive.serde2.proto.test.Complexpb.Complex in project hive by apache.

the class TestBinarySortableFast method testBinarySortableFastCase.

private void testBinarySortableFastCase(int caseNum, boolean doNonRandomFill, Random r, SerdeRandomRowSource.SupportedTypes supportedTypes, int depth) throws Throwable {
    SerdeRandomRowSource source = new SerdeRandomRowSource();
    // UNDONE: Until Fast BinarySortable supports complex types -- disable.
    source.init(r, supportedTypes, depth);
    int rowCount = 1000;
    Object[][] rows = source.randomRows(rowCount);
    if (doNonRandomFill) {
        MyTestClass.nonRandomRowFill(rows, source.primitiveCategories());
    }
    // We need to operate on sorted data to fully test BinarySortable.
    source.sort(rows);
    StructObjectInspector rowStructObjectInspector = source.rowStructObjectInspector();
    TypeInfo[] typeInfos = source.typeInfos();
    int columnCount = typeInfos.length;
    int writeColumnCount = columnCount;
    StructObjectInspector writeRowStructObjectInspector = rowStructObjectInspector;
    boolean doWriteFewerColumns = r.nextBoolean();
    if (doWriteFewerColumns) {
        writeColumnCount = 1 + r.nextInt(columnCount);
        if (writeColumnCount == columnCount) {
            doWriteFewerColumns = false;
        } else {
            writeRowStructObjectInspector = source.partialRowStructObjectInspector(writeColumnCount);
        }
    }
    String fieldNames = ObjectInspectorUtils.getFieldNames(rowStructObjectInspector);
    String fieldTypes = ObjectInspectorUtils.getFieldTypes(rowStructObjectInspector);
    String order;
    order = StringUtils.leftPad("", columnCount, '+');
    String nullOrder;
    nullOrder = StringUtils.leftPad("", columnCount, 'a');
    AbstractSerDe serde_ascending = TestBinarySortableSerDe.getSerDe(fieldNames, fieldTypes, order, nullOrder);
    AbstractSerDe serde_ascending_fewer = null;
    if (doWriteFewerColumns) {
        String partialFieldNames = ObjectInspectorUtils.getFieldNames(writeRowStructObjectInspector);
        String partialFieldTypes = ObjectInspectorUtils.getFieldTypes(writeRowStructObjectInspector);
        serde_ascending_fewer = TestBinarySortableSerDe.getSerDe(partialFieldNames, partialFieldTypes, order, nullOrder);
    }
    order = StringUtils.leftPad("", columnCount, '-');
    nullOrder = StringUtils.leftPad("", columnCount, 'z');
    AbstractSerDe serde_descending = TestBinarySortableSerDe.getSerDe(fieldNames, fieldTypes, order, nullOrder);
    AbstractSerDe serde_descending_fewer = null;
    if (doWriteFewerColumns) {
        String partialFieldNames = ObjectInspectorUtils.getFieldNames(writeRowStructObjectInspector);
        String partialFieldTypes = ObjectInspectorUtils.getFieldTypes(writeRowStructObjectInspector);
        serde_descending_fewer = TestBinarySortableSerDe.getSerDe(partialFieldNames, partialFieldTypes, order, nullOrder);
    }
    boolean[] columnSortOrderIsDesc = new boolean[columnCount];
    Arrays.fill(columnSortOrderIsDesc, false);
    byte[] columnNullMarker = new byte[columnCount];
    Arrays.fill(columnNullMarker, BinarySortableSerDe.ZERO);
    byte[] columnNotNullMarker = new byte[columnCount];
    Arrays.fill(columnNotNullMarker, BinarySortableSerDe.ONE);
    /*
     * Acending.
     */
    testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */
    true, typeInfos, /* useIncludeColumns */
    false, /* doWriteFewerColumns */
    false, r);
    testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */
    true, typeInfos, /* useIncludeColumns */
    true, /* doWriteFewerColumns */
    false, r);
    if (doWriteFewerColumns) {
        testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */
        true, typeInfos, /* useIncludeColumns */
        false, /* doWriteFewerColumns */
        true, r);
        testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_ascending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */
        true, typeInfos, /* useIncludeColumns */
        true, /* doWriteFewerColumns */
        true, r);
    }
    /*
     * Descending.
     */
    Arrays.fill(columnSortOrderIsDesc, true);
    testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */
    false, typeInfos, /* useIncludeColumns */
    false, /* doWriteFewerColumns */
    false, r);
    testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_ascending_fewer, writeRowStructObjectInspector, /* ascending */
    false, typeInfos, /* useIncludeColumns */
    true, /* doWriteFewerColumns */
    false, r);
    if (doWriteFewerColumns) {
        testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_descending_fewer, writeRowStructObjectInspector, /* ascending */
        false, typeInfos, /* useIncludeColumns */
        false, /* doWriteFewerColumns */
        true, r);
        testBinarySortableFast(source, rows, columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker, serde_descending, rowStructObjectInspector, serde_descending_fewer, writeRowStructObjectInspector, /* ascending */
        false, typeInfos, /* useIncludeColumns */
        true, /* doWriteFewerColumns */
        true, r);
    }
}
Also used : SerdeRandomRowSource(org.apache.hadoop.hive.serde2.SerdeRandomRowSource) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)12 ArrayList (java.util.ArrayList)10 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)10 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)9 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)8 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)7 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)5 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)5 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)5 BooleanWritable (org.apache.hadoop.io.BooleanWritable)5 FloatWritable (org.apache.hadoop.io.FloatWritable)5 IntWritable (org.apache.hadoop.io.IntWritable)5 LongWritable (org.apache.hadoop.io.LongWritable)5 Text (org.apache.hadoop.io.Text)5 Path (org.apache.hadoop.fs.Path)3 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)3 CalciteSemanticException (org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)3 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)3 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)3 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)3