use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestTeradataBinarySerdeForTimeStamp method testTimestampPrecision6.
@Test
public void testTimestampPrecision6() throws Exception {
props.setProperty(TeradataBinarySerde.TD_TIMESTAMP_PRECISION, "6");
serde.initialize(null, props, null);
// 2012-10-01 12:00:00.110000
BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("00323031322d31302d30312031323a30303a30302e313130303030"));
List<Object> row = (List<Object>) serde.deserialize(in);
Timestamp ts = ((TimestampWritableV2) row.get(0)).getTimestamp();
Assert.assertEquals(ts.getYear(), 2012);
Assert.assertEquals(ts.getMonth(), 10);
Assert.assertEquals(ts.getDay(), 1);
Assert.assertEquals(ts.getHours(), 12);
Assert.assertEquals(ts.getMinutes(), 0);
Assert.assertEquals(ts.getSeconds(), 0);
Assert.assertEquals(ts.getNanos(), 110000000);
BytesWritable res = (BytesWritable) serde.serialize(row, serde.getObjectInspector());
Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes()));
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestTeradataBinarySerdeGeneral method testDeserializeAndSerialize.
@Test
public void testDeserializeAndSerialize() throws Exception {
BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("00004e6f762020202020201b006120646179203d2031312f31312f31312020202020202020203435ec10000000000000c5feffff" + "7707010000000000002a40ef2b3dab0d14e6531c8908a72700000007b20100313931312d31312d31312031393a32303a32312e34" + "33333230301b00746573743a20202020202020343333322020202020202020333135"));
List<Object> row = (List<Object>) serde.deserialize(in);
Assert.assertEquals("Nov", ((HiveCharWritable) row.get(0)).toString());
Assert.assertEquals("a day = 11/11/11 45", ((HiveVarcharWritable) row.get(1)).toString());
Assert.assertEquals(4332L, ((LongWritable) row.get(2)).get());
Assert.assertEquals(-315, ((IntWritable) row.get(3)).get());
Assert.assertEquals((short) 1911, ((ShortWritable) row.get(4)).get());
Assert.assertEquals((byte) 1, ((ByteWritable) row.get(5)).get());
Assert.assertEquals((double) 13, ((DoubleWritable) row.get(6)).get(), 0);
Assert.assertEquals(30, ((HiveDecimalWritable) row.get(7)).getScale());
Assert.assertEquals((double) 3.141592653589793238462643383279, ((HiveDecimalWritable) row.get(7)).getHiveDecimal().doubleValue(), 0);
Assert.assertEquals("1911-11-11", ((DateWritableV2) row.get(8)).toString());
Assert.assertEquals("1911-11-11 19:20:21.4332", ((TimestampWritableV2) row.get(9)).toString());
Assert.assertEquals(27, ((BytesWritable) row.get(10)).getLength());
BytesWritable res = (BytesWritable) serde.serialize(row, serde.getObjectInspector());
Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes()));
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project carbondata by apache.
the class WritableReadSupport method createWritablePrimitive.
/**
* This method will create the Writable Objects for primitives.
*
* @param obj
* @param carbonColumn
* @return
* @throws IOException
*/
private Writable createWritablePrimitive(Object obj, CarbonColumn carbonColumn) throws IOException {
DataType dataType = carbonColumn.getDataType();
if (obj == null) {
return null;
}
if (dataType == DataTypes.NULL) {
return null;
} else if (dataType == DataTypes.DOUBLE) {
return new DoubleWritable((double) obj);
} else if (dataType == DataTypes.INT) {
return new IntWritable((int) obj);
} else if (dataType == DataTypes.LONG) {
return new LongWritable((long) obj);
} else if (dataType == DataTypes.SHORT) {
return new ShortWritable((short) obj);
} else if (dataType == DataTypes.BYTE) {
return new ByteWritable((byte) obj);
} else if (dataType == DataTypes.BOOLEAN) {
return new BooleanWritable((boolean) obj);
} else if (dataType == DataTypes.VARCHAR) {
return new Text(obj.toString());
} else if (dataType == DataTypes.BINARY) {
return new BytesWritable((byte[]) obj);
} else if (dataType == DataTypes.DATE) {
return new DateWritableV2((Integer) obj);
} else if (dataType == DataTypes.TIMESTAMP) {
SimpleDateFormat dateFormat = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
String formattedTime = dateFormat.format((long) obj / 1000);
org.apache.hadoop.hive.common.type.Timestamp t = org.apache.hadoop.hive.common.type.Timestamp.valueOf(formattedTime);
return new TimestampWritableV2(t);
} else if (dataType == DataTypes.STRING) {
return new Text(obj.toString());
} else if (DataTypes.isArrayType(dataType)) {
return createArray(obj, carbonColumn);
} else if (DataTypes.isStructType(dataType)) {
return createStruct(obj, carbonColumn);
} else if (DataTypes.isMapType(dataType)) {
return createMap(obj, carbonColumn);
} else if (DataTypes.isDecimal(dataType)) {
return new HiveDecimalWritable(HiveDecimal.create(new java.math.BigDecimal(obj.toString())));
} else if (dataType == DataTypes.FLOAT) {
return new FloatWritable((float) obj);
} else {
throw new IOException("unsupported data type:" + dataType);
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class KuduSerDe method deserialize.
/**
* Deserialize an object out of a Writable blob.
*/
@Override
public Object deserialize(Writable writable) throws SerDeException {
KuduWritable input = (KuduWritable) writable;
List<Object> output = new ArrayList<>();
for (int i = 0; i < schema.getColumnCount(); i++) {
// If the column isn't set, skip it.
if (!input.isSet(i)) {
continue;
}
Object javaObj = input.getValueObject(i);
ColumnSchema col = schema.getColumnByIndex(i);
PrimitiveTypeInfo typeInfo = toHiveType(col.getType(), col.getTypeAttributes());
if (javaObj == null) {
output.add(null);
} else {
switch(typeInfo.getPrimitiveCategory()) {
case BOOLEAN:
output.add(new BooleanWritable((boolean) javaObj));
break;
case BYTE:
output.add(new ByteWritable((byte) javaObj));
break;
case SHORT:
output.add(new ShortWritable((short) javaObj));
break;
case INT:
output.add(new IntWritable((int) javaObj));
break;
case LONG:
output.add(new LongWritable((long) javaObj));
break;
case TIMESTAMP:
java.sql.Timestamp sqlTs = (java.sql.Timestamp) javaObj;
Timestamp hiveTs = Timestamp.ofEpochMilli(sqlTs.getTime(), sqlTs.getNanos());
output.add(new TimestampWritableV2(hiveTs));
break;
case DECIMAL:
HiveDecimal hiveDecimal = HiveDecimal.create((BigDecimal) javaObj);
output.add(new HiveDecimalWritable(hiveDecimal));
break;
case FLOAT:
output.add(new FloatWritable((float) javaObj));
break;
case DOUBLE:
output.add(new DoubleWritable((double) javaObj));
break;
case STRING:
output.add(new Text((String) javaObj));
break;
case BINARY:
output.add(new BytesWritable((byte[]) javaObj));
break;
default:
throw new SerDeException("Unsupported type: " + typeInfo.getPrimitiveCategory());
}
}
}
return output;
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class RecordReaderImpl method nextTimestamp.
static TimestampWritableV2 nextTimestamp(ColumnVector vector, int row, Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
TimestampWritableV2 result;
if (previous == null || previous.getClass() != TimestampWritableV2.class) {
result = new TimestampWritableV2();
} else {
result = (TimestampWritableV2) previous;
}
TimestampColumnVector tcv = (TimestampColumnVector) vector;
result.setInternal(tcv.time[row], tcv.nanos[row]);
return result;
} else {
return null;
}
}
Aggregations