Search in sources :

Example 1 with TimestampWritable

use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.

the class GenericUDFTrunc method evaluateDate.

private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLengthException, HiveException, UDFArgumentTypeException, UDFArgumentException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
    }
    if (arguments[0].get() == null || arguments[1].get() == null) {
        return null;
    }
    if (textConverter2 != null) {
        fmtInput = textConverter2.convert(arguments[1].get()).toString();
    }
    Date date;
    switch(inputType1) {
        case STRING:
            String dateString = textConverter1.convert(arguments[0].get()).toString();
            try {
                date = formatter.parse(dateString.toString());
            } catch (ParseException e) {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
            date = ts;
            break;
        case DATE:
            DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
            date = dw.get();
            break;
        default:
            throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
    }
    if (evalDate(date) == null) {
        return null;
    }
    Date newDate = calendar.getTime();
    output.set(formatter.format(newDate));
    return output;
}
Also used : UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) ParseException(java.text.ParseException) Timestamp(java.sql.Timestamp) Date(java.util.Date)

Example 2 with TimestampWritable

use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.

the class GenericUDFFromUtcTimestamp method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    Object o0 = arguments[0].get();
    if (o0 == null) {
        return null;
    }
    Object o1 = arguments[1].get();
    if (o1 == null) {
        return null;
    }
    Object converted_o0 = timestampConverter.convert(o0);
    if (converted_o0 == null) {
        return null;
    }
    Timestamp inputTs = ((TimestampWritable) converted_o0).getTimestamp();
    String tzStr = textConverter.convert(o1).toString();
    TimeZone timezone = TimeZone.getTimeZone(tzStr);
    TimeZone fromTz;
    TimeZone toTz;
    if (invert()) {
        fromTz = timezone;
        toTz = tzUTC;
    } else {
        fromTz = tzUTC;
        toTz = timezone;
    }
    // inputTs is the year/month/day/hour/minute/second in the local timezone.
    // For this UDF we want it in the timezone represented by fromTz
    Timestamp fromTs = timestampFromString(inputTs.toString(), fromTz);
    if (fromTs == null) {
        return null;
    }
    // Now output this timestamp's millis value to the equivalent toTz.
    dateFormat.setTimeZone(toTz);
    Timestamp result = Timestamp.valueOf(dateFormat.format(fromTs));
    if (inputTs.getNanos() != 0) {
        result.setNanos(inputTs.getNanos());
    }
    return result;
}
Also used : TimeZone(java.util.TimeZone) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Timestamp(java.sql.Timestamp)

Example 3 with TimestampWritable

use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.

the class TestVectorSerDeRow method serializeRow.

private Output serializeRow(Object[] row, VectorRandomRowSource source, SerializeWrite serializeWrite) throws HiveException, IOException {
    Output output = new Output();
    serializeWrite.set(output);
    PrimitiveTypeInfo[] primitiveTypeInfos = source.primitiveTypeInfos();
    for (int i = 0; i < primitiveTypeInfos.length; i++) {
        Object object = row[i];
        PrimitiveCategory primitiveCategory = primitiveTypeInfos[i].getPrimitiveCategory();
        switch(primitiveCategory) {
            case BOOLEAN:
                {
                    BooleanWritable expectedWritable = (BooleanWritable) object;
                    boolean value = expectedWritable.get();
                    serializeWrite.writeBoolean(value);
                }
                break;
            case BYTE:
                {
                    ByteWritable expectedWritable = (ByteWritable) object;
                    byte value = expectedWritable.get();
                    serializeWrite.writeByte(value);
                }
                break;
            case SHORT:
                {
                    ShortWritable expectedWritable = (ShortWritable) object;
                    short value = expectedWritable.get();
                    serializeWrite.writeShort(value);
                }
                break;
            case INT:
                {
                    IntWritable expectedWritable = (IntWritable) object;
                    int value = expectedWritable.get();
                    serializeWrite.writeInt(value);
                }
                break;
            case LONG:
                {
                    LongWritable expectedWritable = (LongWritable) object;
                    long value = expectedWritable.get();
                    serializeWrite.writeLong(value);
                }
                break;
            case DATE:
                {
                    DateWritable expectedWritable = (DateWritable) object;
                    Date value = expectedWritable.get();
                    serializeWrite.writeDate(value);
                }
                break;
            case FLOAT:
                {
                    FloatWritable expectedWritable = (FloatWritable) object;
                    float value = expectedWritable.get();
                    serializeWrite.writeFloat(value);
                }
                break;
            case DOUBLE:
                {
                    DoubleWritable expectedWritable = (DoubleWritable) object;
                    double value = expectedWritable.get();
                    serializeWrite.writeDouble(value);
                }
                break;
            case STRING:
                {
                    Text text = (Text) object;
                    serializeWrite.writeString(text.getBytes(), 0, text.getLength());
                }
                break;
            case CHAR:
                {
                    HiveCharWritable expectedWritable = (HiveCharWritable) object;
                    HiveChar value = expectedWritable.getHiveChar();
                    serializeWrite.writeHiveChar(value);
                }
                break;
            case VARCHAR:
                {
                    HiveVarcharWritable expectedWritable = (HiveVarcharWritable) object;
                    HiveVarchar value = expectedWritable.getHiveVarchar();
                    serializeWrite.writeHiveVarchar(value);
                }
                break;
            case BINARY:
                {
                    BytesWritable expectedWritable = (BytesWritable) object;
                    byte[] bytes = expectedWritable.getBytes();
                    int length = expectedWritable.getLength();
                    serializeWrite.writeBinary(bytes, 0, length);
                }
                break;
            case TIMESTAMP:
                {
                    TimestampWritable expectedWritable = (TimestampWritable) object;
                    Timestamp value = expectedWritable.getTimestamp();
                    serializeWrite.writeTimestamp(value);
                }
                break;
            case INTERVAL_YEAR_MONTH:
                {
                    HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) object;
                    HiveIntervalYearMonth value = expectedWritable.getHiveIntervalYearMonth();
                    serializeWrite.writeHiveIntervalYearMonth(value);
                }
                break;
            case INTERVAL_DAY_TIME:
                {
                    HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) object;
                    HiveIntervalDayTime value = expectedWritable.getHiveIntervalDayTime();
                    serializeWrite.writeHiveIntervalDayTime(value);
                }
                break;
            case DECIMAL:
                {
                    HiveDecimalWritable expectedWritable = (HiveDecimalWritable) object;
                    HiveDecimal value = expectedWritable.getHiveDecimal();
                    serializeWrite.writeHiveDecimal(value, ((DecimalTypeInfo) primitiveTypeInfos[i]).scale());
                }
                break;
            default:
                throw new HiveException("Unexpected primitive category " + primitiveCategory);
        }
    }
    return output;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) Output(org.apache.hadoop.hive.serde2.ByteStream.Output) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 4 with TimestampWritable

use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.

the class TestVectorSerDeRow method deserializeAndVerify.

void deserializeAndVerify(Output output, DeserializeRead deserializeRead, VectorRandomRowSource source, Object[] expectedRow) throws HiveException, IOException {
    deserializeRead.set(output.getData(), 0, output.getLength());
    PrimitiveCategory[] primitiveCategories = source.primitiveCategories();
    for (int i = 0; i < primitiveCategories.length; i++) {
        Object expected = expectedRow[i];
        PrimitiveCategory primitiveCategory = primitiveCategories[i];
        PrimitiveTypeInfo primitiveTypeInfo = source.primitiveTypeInfos()[i];
        if (!deserializeRead.readNextField()) {
            throw new HiveException("Unexpected NULL when reading primitiveCategory " + primitiveCategory + " expected (" + expected.getClass().getName() + ", " + expected.toString() + ") " + " deserializeRead " + deserializeRead.getClass().getName());
        }
        switch(primitiveCategory) {
            case BOOLEAN:
                {
                    Boolean value = deserializeRead.currentBoolean;
                    BooleanWritable expectedWritable = (BooleanWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case BYTE:
                {
                    Byte value = deserializeRead.currentByte;
                    ByteWritable expectedWritable = (ByteWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                    }
                }
                break;
            case SHORT:
                {
                    Short value = deserializeRead.currentShort;
                    ShortWritable expectedWritable = (ShortWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case INT:
                {
                    Integer value = deserializeRead.currentInt;
                    IntWritable expectedWritable = (IntWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case LONG:
                {
                    Long value = deserializeRead.currentLong;
                    LongWritable expectedWritable = (LongWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case DATE:
                {
                    DateWritable value = deserializeRead.currentDateWritable;
                    DateWritable expectedWritable = (DateWritable) expected;
                    if (!value.equals(expectedWritable)) {
                        TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                    }
                }
                break;
            case FLOAT:
                {
                    Float value = deserializeRead.currentFloat;
                    FloatWritable expectedWritable = (FloatWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case DOUBLE:
                {
                    Double value = deserializeRead.currentDouble;
                    DoubleWritable expectedWritable = (DoubleWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case STRING:
            case CHAR:
            case VARCHAR:
            case BINARY:
                {
                    byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                    Text text = new Text(stringBytes);
                    String string = text.toString();
                    switch(primitiveCategory) {
                        case STRING:
                            {
                                Text expectedWritable = (Text) expected;
                                if (!string.equals(expectedWritable.toString())) {
                                    TestCase.fail("String field mismatch (expected '" + expectedWritable.toString() + "' found '" + string + "')");
                                }
                            }
                            break;
                        case CHAR:
                            {
                                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                                HiveCharWritable expectedWritable = (HiveCharWritable) expected;
                                if (!hiveChar.equals(expectedWritable.getHiveChar())) {
                                    TestCase.fail("Char field mismatch (expected '" + expectedWritable.getHiveChar() + "' found '" + hiveChar + "')");
                                }
                            }
                            break;
                        case VARCHAR:
                            {
                                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                                HiveVarcharWritable expectedWritable = (HiveVarcharWritable) expected;
                                if (!hiveVarchar.equals(expectedWritable.getHiveVarchar())) {
                                    TestCase.fail("Varchar field mismatch (expected '" + expectedWritable.getHiveVarchar() + "' found '" + hiveVarchar + "')");
                                }
                            }
                            break;
                        case BINARY:
                            {
                                BytesWritable expectedWritable = (BytesWritable) expected;
                                if (stringBytes.length != expectedWritable.getLength()) {
                                    TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")");
                                }
                                byte[] expectedBytes = expectedWritable.getBytes();
                                for (int b = 0; b < stringBytes.length; b++) {
                                    if (stringBytes[b] != expectedBytes[b]) {
                                        TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")");
                                    }
                                }
                            }
                            break;
                        default:
                            throw new HiveException("Unexpected primitive category " + primitiveCategory);
                    }
                }
                break;
            case DECIMAL:
                {
                    HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                    if (value == null) {
                        TestCase.fail("Decimal field evaluated to NULL");
                    }
                    HiveDecimalWritable expectedWritable = (HiveDecimalWritable) expected;
                    if (!value.equals(expectedWritable.getHiveDecimal())) {
                        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                        int precision = decimalTypeInfo.getPrecision();
                        int scale = decimalTypeInfo.getScale();
                        TestCase.fail("Decimal field mismatch (expected " + expectedWritable.getHiveDecimal() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                    }
                }
                break;
            case TIMESTAMP:
                {
                    Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                    TimestampWritable expectedWritable = (TimestampWritable) expected;
                    if (!value.equals(expectedWritable.getTimestamp())) {
                        TestCase.fail("Timestamp field mismatch (expected " + expectedWritable.getTimestamp() + " found " + value.toString() + ")");
                    }
                }
                break;
            case INTERVAL_YEAR_MONTH:
                {
                    HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                    HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) expected;
                    HiveIntervalYearMonth expectedValue = expectedWritable.getHiveIntervalYearMonth();
                    if (!value.equals(expectedValue)) {
                        TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expectedValue + " found " + value.toString() + ")");
                    }
                }
                break;
            case INTERVAL_DAY_TIME:
                {
                    HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                    HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) expected;
                    HiveIntervalDayTime expectedValue = expectedWritable.getHiveIntervalDayTime();
                    if (!value.equals(expectedValue)) {
                        TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expectedValue + " found " + value.toString() + ")");
                    }
                }
                break;
            default:
                throw new HiveException("Unexpected primitive category " + primitiveCategory);
        }
    }
    TestCase.assertTrue(deserializeRead.isEndOfInputReached());
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 5 with TimestampWritable

use of org.apache.hadoop.hive.serde2.io.TimestampWritable in project hive by apache.

the class TestVectorGroupByOperator method testMultiKey.

private void testMultiKey(String aggregateName, FakeVectorRowBatchFromObjectIterables data, HashMap<Object, Object> expected) throws HiveException {
    Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
    ArrayList<String> outputColumnNames = new ArrayList<String>();
    ArrayList<ExprNodeDesc> keysDesc = new ArrayList<ExprNodeDesc>();
    Set<Object> keys = new HashSet<Object>();
    // The types array tells us the number of columns in the data
    final String[] columnTypes = data.getTypes();
    // Columns 0..N-1 are keys. Column N is the aggregate value input
    int i = 0;
    for (; i < columnTypes.length - 1; ++i) {
        String columnName = String.format("_col%d", i);
        mapColumnNames.put(columnName, i);
        outputColumnNames.add(columnName);
    }
    mapColumnNames.put("value", i);
    outputColumnNames.add("value");
    VectorizationContext ctx = new VectorizationContext("name", outputColumnNames);
    ArrayList<AggregationDesc> aggs = new ArrayList(1);
    aggs.add(buildAggregationDesc(ctx, aggregateName, GenericUDAFEvaluator.Mode.PARTIAL1, "value", TypeInfoFactory.getPrimitiveTypeInfo(columnTypes[i])));
    for (i = 0; i < columnTypes.length - 1; ++i) {
        String columnName = String.format("_col%d", i);
        keysDesc.add(buildColumnDesc(ctx, columnName, TypeInfoFactory.getPrimitiveTypeInfo(columnTypes[i])));
    }
    GroupByDesc desc = new GroupByDesc();
    desc.setVectorDesc(new VectorGroupByDesc());
    desc.setOutputColumnNames(outputColumnNames);
    desc.setAggregators(aggs);
    desc.setKeys(keysDesc);
    ((VectorGroupByDesc) desc.getVectorDesc()).setProcessingMode(ProcessingMode.HASH);
    CompilationOpContext cCtx = new CompilationOpContext();
    Operator<? extends OperatorDesc> groupByOp = OperatorFactory.get(cCtx, desc);
    VectorGroupByOperator vgo = (VectorGroupByOperator) Vectorizer.vectorizeGroupByOperator(groupByOp, ctx);
    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
    vgo.initialize(hconf, null);
    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {

        private int rowIndex;

        private String aggregateName;

        private Map<Object, Object> expected;

        private Set<Object> keys;

        @Override
        public void inspectRow(Object row, int tag) throws HiveException {
            assertTrue(row instanceof Object[]);
            Object[] fields = (Object[]) row;
            assertEquals(columnTypes.length, fields.length);
            ArrayList<Object> keyValue = new ArrayList<Object>(columnTypes.length - 1);
            for (int i = 0; i < columnTypes.length - 1; ++i) {
                Object key = fields[i];
                if (null == key) {
                    keyValue.add(null);
                } else if (key instanceof Text) {
                    Text txKey = (Text) key;
                    keyValue.add(txKey.toString());
                } else if (key instanceof ByteWritable) {
                    ByteWritable bwKey = (ByteWritable) key;
                    keyValue.add(bwKey.get());
                } else if (key instanceof ShortWritable) {
                    ShortWritable swKey = (ShortWritable) key;
                    keyValue.add(swKey.get());
                } else if (key instanceof IntWritable) {
                    IntWritable iwKey = (IntWritable) key;
                    keyValue.add(iwKey.get());
                } else if (key instanceof LongWritable) {
                    LongWritable lwKey = (LongWritable) key;
                    keyValue.add(lwKey.get());
                } else if (key instanceof TimestampWritable) {
                    TimestampWritable twKey = (TimestampWritable) key;
                    keyValue.add(twKey.getTimestamp());
                } else if (key instanceof DoubleWritable) {
                    DoubleWritable dwKey = (DoubleWritable) key;
                    keyValue.add(dwKey.get());
                } else if (key instanceof FloatWritable) {
                    FloatWritable fwKey = (FloatWritable) key;
                    keyValue.add(fwKey.get());
                } else if (key instanceof BooleanWritable) {
                    BooleanWritable bwKey = (BooleanWritable) key;
                    keyValue.add(bwKey.get());
                } else {
                    Assert.fail(String.format("Not implemented key output type %s: %s", key.getClass().getName(), key));
                }
            }
            String keyAsString = Arrays.deepToString(keyValue.toArray());
            assertTrue(expected.containsKey(keyValue));
            Object expectedValue = expected.get(keyValue);
            Object value = fields[columnTypes.length - 1];
            Validator validator = getValidator(aggregateName);
            validator.validate(keyAsString, expectedValue, new Object[] { value });
            keys.add(keyValue);
        }

        private FakeCaptureOutputOperator.OutputInspector init(String aggregateName, Map<Object, Object> expected, Set<Object> keys) {
            this.aggregateName = aggregateName;
            this.expected = expected;
            this.keys = keys;
            return this;
        }
    }.init(aggregateName, expected, keys));
    for (VectorizedRowBatch unit : data) {
        vgo.process(unit, 0);
    }
    vgo.close(false);
    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(expected.size(), outBatchList.size());
    assertEquals(expected.size(), keys.size());
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) LongWritable(org.apache.hadoop.io.LongWritable) VectorGroupByDesc(org.apache.hadoop.hive.ql.plan.VectorGroupByDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HashSet(java.util.HashSet) FakeCaptureOutputOperator(org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator) Text(org.apache.hadoop.io.Text) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) VectorGroupByDesc(org.apache.hadoop.hive.ql.plan.VectorGroupByDesc) AggregationDesc(org.apache.hadoop.hive.ql.plan.AggregationDesc) Map(java.util.Map) HashMap(java.util.HashMap)

Aggregations

TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)77 IntWritable (org.apache.hadoop.io.IntWritable)33 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)26 Text (org.apache.hadoop.io.Text)25 Timestamp (java.sql.Timestamp)24 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)24 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)20 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)20 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)20 LongWritable (org.apache.hadoop.io.LongWritable)20 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)18 Test (org.junit.Test)18 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)17 BytesWritable (org.apache.hadoop.io.BytesWritable)17 FloatWritable (org.apache.hadoop.io.FloatWritable)17 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)16 HiveIntervalDayTimeWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable)16 BooleanWritable (org.apache.hadoop.io.BooleanWritable)16 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)12 HiveIntervalYearMonthWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)12