use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.
the class TaggedWritable method write.
@Override
public void write(DataOutput out) throws IOException {
Text.writeString(out, stageName);
ObjectWritable recordWritable = new ObjectWritable(record);
recordWritable.write(out);
}
use of org.apache.hadoop.io.ObjectWritable in project hive by apache.
the class JdbcSerDe method deserialize.
@Override
public Object deserialize(Writable blob) throws SerDeException {
log.trace("Deserializing from SerDe");
if (!(blob instanceof MapWritable)) {
throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName());
}
if ((row == null) || (hiveColumnNames == null)) {
throw new SerDeException("JDBC SerDe hasn't been initialized properly");
}
row.clear();
MapWritable input = (MapWritable) blob;
Text columnKey = new Text();
for (int i = 0; i < hiveColumnNames.length; i++) {
columnKey.set(hiveColumnNames[i]);
Writable value = input.get(columnKey);
Object rowVal;
if (value instanceof NullWritable) {
rowVal = null;
} else {
rowVal = ((ObjectWritable) value).get();
switch(hiveColumnTypes[i].getPrimitiveCategory()) {
case INT:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).intValue();
} else {
rowVal = Integer.valueOf(rowVal.toString());
}
break;
case SHORT:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).shortValue();
} else {
rowVal = Short.valueOf(rowVal.toString());
}
break;
case BYTE:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).byteValue();
} else {
rowVal = Byte.valueOf(rowVal.toString());
}
break;
case LONG:
if (rowVal instanceof Long) {
rowVal = ((Number) rowVal).longValue();
} else {
rowVal = Long.valueOf(rowVal.toString());
}
break;
case FLOAT:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).floatValue();
} else {
rowVal = Float.valueOf(rowVal.toString());
}
break;
case DOUBLE:
if (rowVal instanceof Number) {
rowVal = ((Number) rowVal).doubleValue();
} else {
rowVal = Double.valueOf(rowVal.toString());
}
break;
case DECIMAL:
int scale = ((DecimalTypeInfo) hiveColumnTypes[i]).getScale();
rowVal = HiveDecimal.create(rowVal.toString());
((HiveDecimal) rowVal).setScale(scale, BigDecimal.ROUND_HALF_EVEN);
break;
case BOOLEAN:
if (rowVal instanceof Number) {
rowVal = ((Number) value).intValue() != 0;
} else {
rowVal = Boolean.valueOf(rowVal.toString());
}
break;
case CHAR:
case VARCHAR:
case STRING:
if (rowVal instanceof java.sql.Date) {
rowVal = DateUtils.getDateFormat().format((java.sql.Date) rowVal);
} else {
rowVal = rowVal.toString();
}
break;
case DATE:
if (rowVal instanceof java.sql.Date) {
LocalDate localDate = ((java.sql.Date) rowVal).toLocalDate();
rowVal = Date.of(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
} else {
rowVal = Date.valueOf(rowVal.toString());
}
break;
case TIMESTAMP:
if (rowVal instanceof java.sql.Timestamp) {
LocalDateTime localDateTime = ((java.sql.Timestamp) rowVal).toLocalDateTime();
rowVal = Timestamp.ofEpochSecond(localDateTime.toEpochSecond(UTC), localDateTime.getNano());
} else {
rowVal = Timestamp.valueOf(rowVal.toString());
}
break;
default:
// do nothing
break;
}
}
row.add(rowVal);
}
return row;
}
use of org.apache.hadoop.io.ObjectWritable in project hive by apache.
the class JdbcRecordReader method next.
@Override
public boolean next(LongWritable key, MapWritable value) throws IOException {
try {
LOGGER.trace("JdbcRecordReader.next called");
if (dbAccessor == null) {
dbAccessor = DatabaseAccessorFactory.getAccessor(conf);
iterator = dbAccessor.getRecordIterator(conf, split.getPartitionColumn(), split.getLowerBound(), split.getUpperBound(), split.getLimit(), split.getOffset());
}
if (iterator.hasNext()) {
LOGGER.trace("JdbcRecordReader has more records to read.");
key.set(pos);
pos++;
Map<String, Object> record = iterator.next();
if ((record != null) && (!record.isEmpty())) {
for (Entry<String, Object> entry : record.entrySet()) {
value.put(new Text(entry.getKey()), entry.getValue() == null ? NullWritable.get() : new ObjectWritable(entry.getValue()));
}
return true;
} else {
LOGGER.debug("JdbcRecordReader got null record.");
return false;
}
} else {
LOGGER.debug("JdbcRecordReader has no more records to read.");
return false;
}
} catch (Exception e) {
throw new IOException(e);
}
}
Aggregations