use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestOrcFile method testTimestamp.
@Test
public void testTimestamp() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcFile.class) {
inspector = ObjectInspectorFactory.getReflectionObjectInspector(Timestamp.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).bufferSize(10000).version(OrcFile.Version.V_0_11));
List<Timestamp> tslist = Lists.newArrayList();
tslist.add(Timestamp.valueOf("2037-01-01 00:00:00.000999"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.000000222"));
tslist.add(Timestamp.valueOf("1999-01-01 00:00:00.999999999"));
tslist.add(Timestamp.valueOf("1995-01-01 00:00:00.688888888"));
tslist.add(Timestamp.valueOf("2002-01-01 00:00:00.1"));
tslist.add(Timestamp.valueOf("2010-03-02 00:00:00.000009001"));
tslist.add(Timestamp.valueOf("2005-01-01 00:00:00.000002229"));
tslist.add(Timestamp.valueOf("2006-01-01 00:00:00.900203003"));
tslist.add(Timestamp.valueOf("2003-01-01 00:00:00.800000007"));
tslist.add(Timestamp.valueOf("1996-08-02 00:00:00.723100809"));
tslist.add(Timestamp.valueOf("1998-11-02 00:00:00.857340643"));
tslist.add(Timestamp.valueOf("2008-10-02 00:00:00"));
for (Timestamp ts : tslist) {
writer.addRow(ts);
}
writer.close();
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
RecordReader rows = reader.rows();
int idx = 0;
while (rows.hasNext()) {
Object row = rows.next(null);
Timestamp tlistTimestamp = tslist.get(idx++);
if (tlistTimestamp.getNanos() != ((TimestampWritableV2) row).getNanos()) {
assertTrue(false);
}
}
assertEquals(0, writer.getSchema().getMaximumId());
boolean[] expected = new boolean[] { false };
boolean[] included = OrcUtils.includeColumns("", writer.getSchema());
assertEquals(true, Arrays.equals(expected, included));
rows.close();
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class TestArrowColumnarBatchSerDe method testPrimitiveRandomTimestamp.
@Test
public void testPrimitiveRandomTimestamp() throws SerDeException {
String[][] schema = { { "timestamp1", "timestamp" } };
int size = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_ARROW_BATCH_SIZE);
Random rand = new Random(294722773L);
Object[][] rows = new Object[size][];
for (int i = 0; i < size; i++) {
long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000;
Timestamp timestamp = Timestamp.ofEpochMilli(rand.nextBoolean() ? millis : -millis);
timestamp.setNanos(rand.nextInt(1000) * 1000);
rows[i] = new Object[] { new TimestampWritableV2(timestamp) };
}
initAndSerializeAndDeserialize(schema, rows);
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class RexNodeConverter method convert.
protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
final RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
final PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
final RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);
PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
ConstantObjectInspector coi = literal.getWritableObjectInspector();
Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
RexNode calciteLiteral = null;
// If value is null, the type should also be VOID.
if (value == null) {
hiveTypeCategory = PrimitiveCategory.VOID;
}
// TODO: Verify if we need to use ConstantObjectInspector to unwrap data
switch(hiveTypeCategory) {
case BOOLEAN:
calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
break;
case BYTE:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
break;
case SHORT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
break;
case INT:
calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
break;
case LONG:
calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
break;
case DECIMAL:
if (value instanceof HiveDecimal) {
value = ((HiveDecimal) value).bigDecimalValue();
} else if (value instanceof Decimal128) {
value = ((Decimal128) value).toBigDecimal();
}
if (value == null) {
// literals.
throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
// TODO: return createNullLiteral(literal);
}
calciteLiteral = rexBuilder.makeExactLiteral((BigDecimal) value, calciteDataType);
break;
case FLOAT:
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
break;
case DOUBLE:
// TODO: The best solution is to support NaN in expression reduction.
if (Double.isNaN((Double) value)) {
throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal);
}
calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
break;
case CHAR:
if (value instanceof HiveChar) {
value = ((HiveChar) value).getValue();
}
final int lengthChar = TypeInfoUtils.getCharacterLengthForType(hiveType);
RelDataType charType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.CHAR, lengthChar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), charType, false);
break;
case VARCHAR:
if (value instanceof HiveVarchar) {
value = ((HiveVarchar) value).getValue();
}
final int lengthVarchar = TypeInfoUtils.getCharacterLengthForType(hiveType);
RelDataType varcharType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, lengthVarchar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), varcharType, true);
break;
case STRING:
RelDataType stringType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), stringType, true);
break;
case DATE:
final Date date = (Date) value;
calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromDaysSinceEpoch(date.toEpochDay()));
break;
case TIMESTAMP:
final TimestampString tsString;
if (value instanceof Calendar) {
tsString = TimestampString.fromCalendarFields((Calendar) value);
} else {
final Timestamp ts = (Timestamp) value;
tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos());
}
// Must call makeLiteral, not makeTimestampLiteral
// to have the RexBuilder.roundTime logic kick in
calciteLiteral = rexBuilder.makeLiteral(tsString, rexBuilder.getTypeFactory().createSqlType(SqlTypeName.TIMESTAMP, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP)), false);
break;
case TIMESTAMPLOCALTZ:
final TimestampString tsLocalTZString;
Instant i = ((TimestampTZ) value).getZonedDateTime().toInstant();
tsLocalTZString = TimestampString.fromMillisSinceEpoch(i.toEpochMilli()).withNanos(i.getNano());
calciteLiteral = rexBuilder.makeTimestampWithLocalTimeZoneLiteral(tsLocalTZString, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE));
break;
case INTERVAL_YEAR_MONTH:
// Calcite year-month literal value is months as BigDecimal
BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
break;
case INTERVAL_DAY_TIME:
// Calcite day-time interval is millis value as BigDecimal
// Seconds converted to millis
BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
// Nanos converted to millis
BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
break;
case VOID:
calciteLiteral = rexBuilder.makeLiteral(null, calciteDataType, true);
break;
case BINARY:
case UNKNOWN:
default:
throw new RuntimeException("Unsupported Literal");
}
return calciteLiteral;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class UDFSecond method evaluate.
@Override
public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
switch(inputTypes[0]) {
case INTERVAL_DAY_TIME:
HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
if (intervalDayTime == null) {
return null;
}
output.set(intervalDayTime.getSeconds());
break;
case STRING:
case CHAR:
case VARCHAR:
case DATE:
case TIMESTAMP:
case TIMESTAMPLOCALTZ:
case VOID:
Timestamp ts = getTimestampValue(arguments, 0, converters);
if (ts == null) {
return null;
}
calendar.setTimeInMillis(ts.toEpochMilli());
output.set(calendar.get(Calendar.SECOND));
}
return output;
}
use of org.apache.hadoop.hive.common.type.Timestamp in project hive by apache.
the class UDFWeekOfYear method evaluate.
public IntWritable evaluate(TimestampWritableV2 t) {
if (t == null) {
return null;
}
Timestamp ts = t.getTimestamp();
calendar.setTimeInMillis(ts.toEpochMilli());
result.set(calendar.get(Calendar.WEEK_OF_YEAR));
return result;
}
Aggregations