use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class DateParser method parseDate.
/**
* Obtains an instance of Date from a text string such as 2021-02-21. The
* {@code result} is not modified if parsing of the {@code text} fails.
*
* @param text the text to parse
* @param result the {@code Date} to load the results into
* @return True if parsing was successful; false otherwise.
* @throws NullPointerException if {@code text} or {@code result} is null
*/
public static boolean parseDate(final String text, final Date result) {
Objects.requireNonNull(text);
Objects.requireNonNull(result);
try {
Date date = DATE_CACHE.get(text);
result.setTimeInMillis(date.toEpochMilli());
return true;
} catch (Exception e) {
return false;
}
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class JdbcSerDe method deserialize.
@Override
public Object deserialize(Writable blob) throws SerDeException {
log.trace("Deserializing from SerDe");
if (!(blob instanceof MapWritable)) {
throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName());
}
if ((row == null) || (hiveColumnNames == null)) {
throw new SerDeException("JDBC SerDe hasn't been initialized properly");
}
row.clear();
MapWritable input = (MapWritable) blob;
Text columnKey = new Text();
for (int i = 0; i < hiveColumnNames.length; i++) {
columnKey.set(hiveColumnNames[i]);
Writable value = input.get(columnKey);
Object rowVal;
if (value instanceof NullWritable) {
rowVal = null;
} else {
rowVal = ((ObjectWritable) value).get();
switch(hiveColumnTypes[i].getPrimitiveCategory()) {
case INT:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).intValue();
} else {
rowVal = Integer.valueOf(rowVal.toString());
}
break;
case SHORT:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).shortValue();
} else {
rowVal = Short.valueOf(rowVal.toString());
}
break;
case BYTE:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).byteValue();
} else {
rowVal = Byte.valueOf(rowVal.toString());
}
break;
case LONG:
if (rowVal instanceof Long) {
rowVal = ((Number) rowVal).longValue();
} else {
rowVal = Long.valueOf(rowVal.toString());
}
break;
case FLOAT:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).floatValue();
} else {
rowVal = Float.valueOf(rowVal.toString());
}
break;
case DOUBLE:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).doubleValue();
} else {
rowVal = Double.valueOf(rowVal.toString());
}
break;
case DECIMAL:
int scale = ((DecimalTypeInfo) hiveColumnTypes[i]).getScale();
rowVal = HiveDecimal.create(rowVal.toString());
((HiveDecimal) rowVal).setScale(scale, BigDecimal.ROUND_HALF_EVEN);
break;
case BOOLEAN:
if (rowVal instanceof Number) {
rowVal = ((Number) value).intValue() != 0;
} else {
rowVal = Boolean.valueOf(rowVal.toString());
}
break;
case CHAR:
case VARCHAR:
case STRING:
if (rowVal instanceof java.sql.Date) {
rowVal = DateUtils.getDateFormat().format((java.sql.Date) rowVal);
} else {
rowVal = rowVal.toString();
}
break;
case DATE:
if (rowVal instanceof java.sql.Date) {
LocalDate localDate = ((java.sql.Date) rowVal).toLocalDate();
rowVal = Date.of(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
} else {
rowVal = Date.valueOf(rowVal.toString());
}
break;
case TIMESTAMP:
if (rowVal instanceof java.sql.Timestamp) {
LocalDateTime localDateTime = ((java.sql.Timestamp) rowVal).toLocalDateTime();
rowVal = Timestamp.ofEpochSecond(localDateTime.toEpochSecond(UTC), localDateTime.getNano());
} else {
rowVal = Timestamp.valueOf(rowVal.toString());
}
break;
default:
// do nothing
break;
}
}
row.add(rowVal);
}
return row;
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class TeradataBinarySerde method deserializeField.
private Object deserializeField(TeradataBinaryDataInputStream in, TypeInfo type, Object reuse, boolean isNull) throws IOException, ParseException, SerDeException {
// In this case, you cannot avoid reading the bytes even it is not used.
switch(type.getCategory()) {
case PRIMITIVE:
PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
switch(ptype.getPrimitiveCategory()) {
case // Teradata Type: VARCHAR
VARCHAR:
String st = in.readVarchar();
if (isNull) {
return null;
} else {
HiveVarcharWritable r = reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse;
r.set(st, ((VarcharTypeInfo) type).getLength());
return r;
}
case // Teradata Type: INT
INT:
int i = in.readInt();
if (isNull) {
return null;
} else {
IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
r.set(i);
return r;
}
case // Teradata Type: TIMESTAMP
TIMESTAMP:
Timestamp ts = in.readTimestamp(getTimeStampByteNum(timestampPrecision));
if (isNull) {
return null;
} else {
TimestampWritableV2 r = reuse == null ? new TimestampWritableV2() : (TimestampWritableV2) reuse;
r.set(ts);
return r;
}
case // Teradata Type: FLOAT
DOUBLE:
double d = in.readDouble();
if (isNull) {
return null;
} else {
DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
r.set(d);
return r;
}
case // Teradata Type: DATE
DATE:
Date dt = in.readDate();
if (isNull) {
return null;
} else {
DateWritableV2 r = reuse == null ? new DateWritableV2() : (DateWritableV2) reuse;
r.set(dt);
return r;
}
case // Teradata Type: BYTEINT
BYTE:
byte bt = in.readByte();
if (isNull) {
return null;
} else {
ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
r.set(bt);
return r;
}
case // Teradata Type: BIGINT
LONG:
long l = in.readLong();
if (isNull) {
return null;
} else {
LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
r.set(l);
return r;
}
case // Teradata Type: CHAR
CHAR:
CharTypeInfo ctype = (CharTypeInfo) type;
int length = ctype.getLength();
String c = in.readChar(length * getCharByteNum(charCharset));
if (isNull) {
return null;
} else {
HiveCharWritable r = reuse == null ? new HiveCharWritable() : (HiveCharWritable) reuse;
r.set(c, length);
return r;
}
case // Teradata Type: DECIMAL
DECIMAL:
DecimalTypeInfo dtype = (DecimalTypeInfo) type;
int precision = dtype.precision();
int scale = dtype.scale();
HiveDecimal hd = in.readDecimal(scale, getDecimalByteNum(precision));
if (isNull) {
return null;
} else {
HiveDecimalWritable r = (reuse == null ? new HiveDecimalWritable() : (HiveDecimalWritable) reuse);
r.set(hd);
return r;
}
case // Teradata Type: SMALLINT
SHORT:
short s = in.readShort();
if (isNull) {
return null;
} else {
ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
r.set(s);
return r;
}
case // Teradata Type: VARBYTE
BINARY:
byte[] content = in.readVarbyte();
if (isNull) {
return null;
} else {
BytesWritable r = new BytesWritable();
r.set(content, 0, content.length);
return r;
}
default:
throw new SerDeException("Unrecognized type: " + ptype.getPrimitiveCategory());
}
// Currently, deserialization of complex types is not supported
case LIST:
case MAP:
case STRUCT:
default:
throw new SerDeException("Unsupported category: " + type.getCategory());
}
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class TestDateWritableV2 method testGettersSetters.
@Test
@Concurrent(count = 4)
@Repeating(repetition = 100)
public void testGettersSetters() {
Date date1 = Date.valueOf(getRandomDateString());
Date date2 = Date.valueOf(getRandomDateString());
Date date3 = Date.valueOf(getRandomDateString());
DateWritableV2 dw1 = new DateWritableV2(date1);
DateWritableV2 dw2 = new DateWritableV2(date2);
DateWritableV2 dw3 = new DateWritableV2(date3);
DateWritableV2 dw4 = new DateWritableV2();
// Getters
assertEquals(date1, dw1.get());
assertEquals(date1.toEpochSecond(), dw1.getTimeInSeconds());
dw4.set(Date.valueOf("1970-01-02"));
assertEquals(1, dw4.getDays());
dw4.set(Date.valueOf("1971-01-01"));
assertEquals(365, dw4.getDays());
// Setters
dw4.set(dw1.getDays());
assertEquals(dw1, dw4);
dw4.set(dw2.get());
assertEquals(dw2, dw4);
dw4.set(dw3);
assertEquals(dw3, dw4);
}
use of org.apache.hadoop.hive.common.type.Date in project hive by apache.
the class TestDateWritableV2 method setupDateStrings.
@BeforeClass
public static void setupDateStrings() {
Date initialDate = Date.valueOf("2014-01-01");
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(initialDate.toEpochMilli());
for (int idx = 0; idx < 365; ++idx) {
dateStrings[idx] = Date.ofEpochMilli(cal.getTimeInMillis()).toString();
cal.add(1, Calendar.DAY_OF_YEAR);
}
}
Aggregations