Search in sources :

Example 11 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class TestVectorUDFDatetimeLegacyHybridCalendar method compareToUDFDatetimeLegacyHybridCalendar.

private void compareToUDFDatetimeLegacyHybridCalendar(GenericUDF udf, long in, long out) throws HiveException {
    DateWritableV2 dateWInput = new DateWritableV2((int) in);
    DateWritableV2 dateWOutput = (DateWritableV2) udf.evaluate(new GenericUDF.DeferredObject[] { new GenericUDF.DeferredJavaObject(dateWInput) });
    Assert.assertEquals(dateWOutput.get(), Date.ofEpochDay((int) out));
}
Also used : DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2)

Example 12 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class VectorHashKeyWrapperGeneral method stringifyKeys.

/*
   * This method is mainly intended for debug display purposes.
   */
@Override
public String stringifyKeys(VectorColumnSetInfo columnSetInfo) {
    StringBuilder sb = new StringBuilder();
    boolean isFirstKey = true;
    if (longValues.length > 0) {
        isFirstKey = false;
        sb.append("longs ");
        boolean isFirstValue = true;
        for (int i = 0; i < columnSetInfo.longIndices.length; i++) {
            if (isFirstValue) {
                isFirstValue = false;
            } else {
                sb.append(", ");
            }
            int keyIndex = columnSetInfo.longIndices[i];
            if (isNull[keyIndex]) {
                sb.append("null");
            } else {
                sb.append(longValues[i]);
                PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) columnSetInfo.typeInfos[keyIndex];
                // FUTURE: Add INTERVAL_YEAR_MONTH, etc, as desired.
                switch(primitiveTypeInfo.getPrimitiveCategory()) {
                    case DATE:
                        {
                            Date dt = new Date(0);
                            dt.setTime(DateWritableV2.daysToMillis((int) longValues[i]));
                            sb.append(" date ");
                            sb.append(dt.toString());
                        }
                        break;
                    default:
                        // Add nothing more.
                        break;
                }
            }
        }
    }
    if (doubleValues.length > 0) {
        if (isFirstKey) {
            isFirstKey = false;
        } else {
            sb.append(", ");
        }
        sb.append("doubles ");
        boolean isFirstValue = true;
        for (int i = 0; i < columnSetInfo.doubleIndices.length; i++) {
            if (isFirstValue) {
                isFirstValue = false;
            } else {
                sb.append(", ");
            }
            int keyIndex = columnSetInfo.doubleIndices[i];
            if (isNull[keyIndex]) {
                sb.append("null");
            } else {
                sb.append(doubleValues[i]);
            }
        }
    }
    if (byteValues.length > 0) {
        if (isFirstKey) {
            isFirstKey = false;
        } else {
            sb.append(", ");
        }
        sb.append("byte lengths ");
        boolean isFirstValue = true;
        for (int i = 0; i < columnSetInfo.stringIndices.length; i++) {
            if (isFirstValue) {
                isFirstValue = false;
            } else {
                sb.append(", ");
            }
            int keyIndex = columnSetInfo.stringIndices[i];
            if (isNull[keyIndex]) {
                sb.append("null");
            } else {
                sb.append(byteLengths[i]);
            }
        }
    }
    if (decimalValues.length > 0) {
        if (isFirstKey) {
            isFirstKey = true;
        } else {
            sb.append(", ");
        }
        sb.append("decimals ");
        boolean isFirstValue = true;
        for (int i = 0; i < columnSetInfo.decimalIndices.length; i++) {
            if (isFirstValue) {
                isFirstValue = false;
            } else {
                sb.append(", ");
            }
            int keyIndex = columnSetInfo.decimalIndices[i];
            if (isNull[keyIndex]) {
                sb.append("null");
            } else {
                sb.append(decimalValues[i]);
            }
        }
    }
    if (timestampValues.length > 0) {
        if (isFirstKey) {
            isFirstKey = false;
        } else {
            sb.append(", ");
        }
        sb.append("timestamps ");
        boolean isFirstValue = true;
        for (int i = 0; i < columnSetInfo.timestampIndices.length; i++) {
            if (isFirstValue) {
                isFirstValue = false;
            } else {
                sb.append(", ");
            }
            int keyIndex = columnSetInfo.timestampIndices[i];
            if (isNull[keyIndex]) {
                sb.append("null");
            } else {
                sb.append(timestampValues[i]);
            }
        }
    }
    if (intervalDayTimeValues.length > 0) {
        if (isFirstKey) {
            isFirstKey = false;
        } else {
            sb.append(", ");
        }
        sb.append("interval day times ");
        boolean isFirstValue = true;
        for (int i = 0; i < columnSetInfo.intervalDayTimeIndices.length; i++) {
            if (isFirstValue) {
                isFirstValue = false;
            } else {
                sb.append(", ");
            }
            int keyIndex = columnSetInfo.intervalDayTimeIndices[i];
            if (isNull[keyIndex]) {
                sb.append("null");
            } else {
                sb.append(intervalDayTimeValues[i]);
            }
        }
    }
    return sb.toString();
}
Also used : PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) Date(java.sql.Date)

Example 13 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class BatchToRowReader method nextDate.

public static DateWritableV2 nextDate(ColumnVector vector, int row, Object previous) {
    if (vector.isRepeating) {
        row = 0;
    }
    if (vector.noNulls || !vector.isNull[row]) {
        DateWritableV2 result;
        if (previous == null || previous.getClass() != DateWritableV2.class) {
            result = new DateWritableV2();
        } else {
            result = (DateWritableV2) previous;
        }
        int date = (int) ((LongColumnVector) vector).vector[row];
        result.set(date);
        return result;
    } else {
        return null;
    }
}
Also used : DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)

Example 14 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class GenericUDFInBloomFilter method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    // Return if either of the arguments is null
    if (arguments[0].get() == null || arguments[1].get() == null) {
        return null;
    }
    if (!initializedBloomFilter) {
        // Setup the bloom filter once
        InputStream in = null;
        try {
            BytesWritable bw = (BytesWritable) arguments[1].get();
            byte[] bytes = new byte[bw.getLength()];
            System.arraycopy(bw.getBytes(), 0, bytes, 0, bw.getLength());
            in = new NonSyncByteArrayInputStream(bytes);
            bloomFilter = BloomKFilter.deserialize(in);
        } catch (IOException e) {
            throw new HiveException(e);
        } finally {
            IOUtils.closeStream(in);
        }
        initializedBloomFilter = true;
    }
    // Check if the value is in bloom filter
    switch(((PrimitiveObjectInspector) valObjectInspector).getTypeInfo().getPrimitiveCategory()) {
        case BOOLEAN:
            boolean vBoolean = ((BooleanObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vBoolean ? 1 : 0);
        case BYTE:
            byte vByte = ((ByteObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vByte);
        case SHORT:
            short vShort = ((ShortObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vShort);
        case INT:
            int vInt = ((IntObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vInt);
        case LONG:
            long vLong = ((LongObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vLong);
        case FLOAT:
            float vFloat = ((FloatObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testDouble(vFloat);
        case DOUBLE:
            double vDouble = ((DoubleObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testDouble(vDouble);
        case DECIMAL:
            HiveDecimalWritable vDecimal = ((HiveDecimalObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            int startIdx = vDecimal.toBytes(scratchBuffer);
            return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
        case DATE:
            DateWritableV2 vDate = ((DateObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            return bloomFilter.testLong(vDate.getDays());
        case TIMESTAMP:
            Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).getPrimitiveJavaObject(arguments[0].get());
            return bloomFilter.testLong(vTimeStamp.toEpochMilli());
        case CHAR:
            Text vChar = ((HiveCharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();
            return bloomFilter.testBytes(vChar.getBytes(), 0, vChar.getLength());
        case VARCHAR:
            Text vVarchar = ((HiveVarcharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getTextValue();
            return bloomFilter.testBytes(vVarchar.getBytes(), 0, vVarchar.getLength());
        case STRING:
            Text vString = ((StringObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            return bloomFilter.testBytes(vString.getBytes(), 0, vString.getLength());
        case BINARY:
            BytesWritable vBytes = ((BinaryObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            return bloomFilter.testBytes(vBytes.getBytes(), 0, vBytes.getLength());
        default:
            throw new UDFArgumentTypeException(0, "Bad primitive category " + ((PrimitiveTypeInfo) valObjectInspector).getPrimitiveCategory());
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) NonSyncByteArrayInputStream(org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream) InputStream(java.io.InputStream) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) BytesWritable(org.apache.hadoop.io.BytesWritable) NonSyncByteArrayInputStream(org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException)

Example 15 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class GenericUDFDateAdd method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    if (arguments[0].get() == null) {
        return null;
    }
    Object daysWritableObject = daysConverter.convert(arguments[1].get());
    if (daysWritableObject == null) {
        return null;
    }
    int toBeAdded;
    if (daysWritableObject instanceof ByteWritable) {
        toBeAdded = ((ByteWritable) daysWritableObject).get();
    } else if (daysWritableObject instanceof ShortWritable) {
        toBeAdded = ((ShortWritable) daysWritableObject).get();
    } else if (daysWritableObject instanceof IntWritable) {
        toBeAdded = ((IntWritable) daysWritableObject).get();
    } else {
        return null;
    }
    // Convert the first param into a DateWritableV2 value
    switch(inputType1) {
        case STRING:
            String dateString = dateConverter.convert(arguments[0].get()).toString();
            if (DateParser.parseDate(dateString, dateVal)) {
                output.set(dateVal);
            } else {
                return null;
            }
            break;
        case TIMESTAMP:
            Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get())).getTimestamp();
            output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
            break;
        case DATE:
            DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
            output.set(dw.getDays());
            break;
        default:
            throw new UDFArgumentException("DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
    }
    int newDays = output.getDays() + (signModifier * toBeAdded);
    output.set(newDays);
    return output;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) IntWritable(org.apache.hadoop.io.IntWritable)

Aggregations

DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)73 Test (org.junit.Test)36 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)34 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)29 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)29 Text (org.apache.hadoop.io.Text)29 TimestampWritableV2 (org.apache.hadoop.hive.serde2.io.TimestampWritableV2)28 Date (org.apache.hadoop.hive.common.type.Date)24 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 BytesWritable (org.apache.hadoop.io.BytesWritable)20 IntWritable (org.apache.hadoop.io.IntWritable)17 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)16 HiveIntervalDayTimeWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable)16 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)15 HiveIntervalYearMonthWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)15 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)14 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)14 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)14