Search in sources :

Example 26 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class TestHiveIntervalDayTimeWritable method testConstructor.

@Test
@Concurrent(count = 4)
@Repeating(repetition = 100)
public void testConstructor() throws Exception {
    HiveIntervalDayTime hi1 = HiveIntervalDayTime.valueOf("3 4:5:6.12345");
    HiveIntervalDayTimeWritable hiw1 = new HiveIntervalDayTimeWritable(hi1);
    HiveIntervalDayTimeWritable hiw2 = new HiveIntervalDayTimeWritable(hiw1);
    assertEquals(hiw1, hiw2);
}
Also used : HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 27 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class TestDateTimeMath method checkIntervalDayTimeArithmetic.

private static void checkIntervalDayTimeArithmetic(String left, char operationType, String right, String expected) throws Exception {
    HiveIntervalDayTime leftInterval = left == null ? null : HiveIntervalDayTime.valueOf(left);
    HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right);
    HiveIntervalDayTime expectedResult = expected == null ? null : HiveIntervalDayTime.valueOf(expected);
    HiveIntervalDayTime testResult = null;
    DateTimeMath dtm = new DateTimeMath();
    switch(operationType) {
        case '-':
            testResult = dtm.subtract(leftInterval, rightInterval);
            break;
        case '+':
            testResult = dtm.add(leftInterval, rightInterval);
            break;
        default:
            throw new IllegalArgumentException("Invalid operation " + operationType);
    }
    assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval), expectedResult, testResult);
}
Also used : DateTimeMath(org.apache.hadoop.hive.ql.util.DateTimeMath) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 28 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class TestDateTimeMath method checkTsArithmetic.

private static void checkTsArithmetic(String left, String right, String expected) throws Exception {
    Timestamp leftTs = null;
    if (left != null) {
        leftTs = Timestamp.valueOf(left);
    }
    Timestamp rightTs = null;
    if (left != null) {
        rightTs = Timestamp.valueOf(right);
    }
    HiveIntervalDayTime expectedResult = null;
    if (expected != null) {
        expectedResult = HiveIntervalDayTime.valueOf(expected);
    }
    DateTimeMath dtm = new DateTimeMath();
    HiveIntervalDayTime testResult = dtm.subtract(leftTs, rightTs);
    assertEquals(String.format("%s - %s", leftTs, rightTs), expectedResult, testResult);
}
Also used : DateTimeMath(org.apache.hadoop.hive.ql.util.DateTimeMath) Timestamp(java.sql.Timestamp) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 29 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class VectorPTFOperator method setCurrentPartition.

private void setCurrentPartition(VectorizedRowBatch batch) {
    final int count = partitionColumnMap.length;
    for (int i = 0; i < count; i++) {
        ColumnVector colVector = batch.cols[partitionColumnMap[i]];
        // Partition columns are repeated -- so we test element 0.
        final boolean isNull = !colVector.noNulls && colVector.isNull[0];
        currentPartitionIsNull[i] = isNull;
        if (isNull) {
            continue;
        }
        switch(partitionColumnVectorTypes[i]) {
            case LONG:
                currentPartitionLongs[i] = ((LongColumnVector) colVector).vector[0];
                break;
            case DOUBLE:
                currentPartitionDoubles[i] = ((DoubleColumnVector) colVector).vector[0];
                break;
            case BYTES:
                {
                    BytesColumnVector byteColVector = (BytesColumnVector) colVector;
                    byte[] bytes = byteColVector.vector[0];
                    final int start = byteColVector.start[0];
                    final int length = byteColVector.length[0];
                    if (currentPartitionByteArrays[i] == null || currentPartitionByteLengths[i] < length) {
                        currentPartitionByteArrays[i] = Arrays.copyOfRange(bytes, start, start + length);
                    } else {
                        System.arraycopy(bytes, start, currentPartitionByteArrays[i], 0, length);
                    }
                    currentPartitionByteLengths[i] = length;
                }
                break;
            case DECIMAL:
                if (currentPartitionDecimals[i] == null) {
                    currentPartitionDecimals[i] = new HiveDecimalWritable();
                }
                currentPartitionDecimals[i].set(((DecimalColumnVector) colVector).vector[0]);
                break;
            case TIMESTAMP:
                if (currentPartitionTimestamps[i] == null) {
                    currentPartitionTimestamps[i] = new Timestamp(0);
                }
                ((TimestampColumnVector) colVector).timestampUpdate(currentPartitionTimestamps[i], 0);
                break;
            case INTERVAL_DAY_TIME:
                if (currentPartitionIntervalDayTimes[i] == null) {
                    currentPartitionIntervalDayTimes[i] = new HiveIntervalDayTime();
                }
                ((IntervalDayTimeColumnVector) colVector).intervalDayTimeUpdate(currentPartitionIntervalDayTimes[i], 0);
                break;
            default:
                throw new RuntimeException("Unexpected column vector type " + partitionColumnVectorTypes[i]);
        }
    }
}
Also used : TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) IntervalDayTimeColumnVector(org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector) Timestamp(java.sql.Timestamp) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) ColumnVector(org.apache.hadoop.hive.ql.exec.vector.ColumnVector) TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) IntervalDayTimeColumnVector(org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector) DoubleColumnVector(org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Example 30 with HiveIntervalDayTime

use of org.apache.hadoop.hive.common.type.HiveIntervalDayTime in project hive by apache.

the class VectorRandomRowSource method getRandIntervalDayTime.

public static HiveIntervalDayTime getRandIntervalDayTime(Random r) {
    String optionalNanos = "";
    if (r.nextInt(2) == 1) {
        optionalNanos = String.format(".%09d", Integer.valueOf(0 + r.nextInt(DateUtils.NANOS_PER_SEC)));
    }
    final String yearMonthSignStr = r.nextInt(2) == 0 ? "" : "-";
    final String dayTimeStr = String.format("%s%d %02d:%02d:%02d%s", yearMonthSignStr, // day
    Integer.valueOf(1 + r.nextInt(28)), // hour
    Integer.valueOf(0 + r.nextInt(24)), // minute
    Integer.valueOf(0 + r.nextInt(60)), // second
    Integer.valueOf(0 + r.nextInt(60)), optionalNanos);
    HiveIntervalDayTime intervalDayTimeVal = HiveIntervalDayTime.valueOf(dayTimeStr);
    return intervalDayTimeVal;
}
Also used : HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime)

Aggregations

HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)35 Timestamp (java.sql.Timestamp)18 HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)18 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)14 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)14 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)14 BytesWritable (org.apache.hadoop.io.BytesWritable)14 Text (org.apache.hadoop.io.Text)14 Date (java.sql.Date)12 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)12 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)12 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)11 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)11 FloatWritable (org.apache.hadoop.io.FloatWritable)11 IntWritable (org.apache.hadoop.io.IntWritable)11 LongWritable (org.apache.hadoop.io.LongWritable)11 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)10 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)10 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)10 BooleanWritable (org.apache.hadoop.io.BooleanWritable)10