Search in sources :

Example 6 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project cdap by caskdata.

the class TaggedWritable method write.

@Override
public void write(DataOutput out) throws IOException {
    Text.writeString(out, stageName);
    ObjectWritable recordWritable = new ObjectWritable(record);
    recordWritable.write(out);
}
Also used : ObjectWritable(org.apache.hadoop.io.ObjectWritable)

Example 7 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project hive by apache.

the class JdbcSerDe method deserialize.

@Override
public Object deserialize(Writable blob) throws SerDeException {
    log.trace("Deserializing from SerDe");
    if (!(blob instanceof MapWritable)) {
        throw new SerDeException("Expected MapWritable. Got " + blob.getClass().getName());
    }
    if ((row == null) || (hiveColumnNames == null)) {
        throw new SerDeException("JDBC SerDe hasn't been initialized properly");
    }
    row.clear();
    MapWritable input = (MapWritable) blob;
    Text columnKey = new Text();
    for (int i = 0; i < hiveColumnNames.length; i++) {
        columnKey.set(hiveColumnNames[i]);
        Writable value = input.get(columnKey);
        Object rowVal;
        if (value instanceof NullWritable) {
            rowVal = null;
        } else {
            rowVal = ((ObjectWritable) value).get();
            switch(hiveColumnTypes[i].getPrimitiveCategory()) {
                case INT:
                    if (rowVal instanceof Number) {
                        rowVal = ((Number) rowVal).intValue();
                    } else {
                        rowVal = Integer.valueOf(rowVal.toString());
                    }
                    break;
                case SHORT:
                    if (rowVal instanceof Number) {
                        rowVal = ((Number) rowVal).shortValue();
                    } else {
                        rowVal = Short.valueOf(rowVal.toString());
                    }
                    break;
                case BYTE:
                    if (rowVal instanceof Number) {
                        rowVal = ((Number) rowVal).byteValue();
                    } else {
                        rowVal = Byte.valueOf(rowVal.toString());
                    }
                    break;
                case LONG:
                    if (rowVal instanceof Long) {
                        rowVal = ((Number) rowVal).longValue();
                    } else {
                        rowVal = Long.valueOf(rowVal.toString());
                    }
                    break;
                case FLOAT:
                    if (rowVal instanceof Number) {
                        rowVal = ((Number) rowVal).floatValue();
                    } else {
                        rowVal = Float.valueOf(rowVal.toString());
                    }
                    break;
                case DOUBLE:
                    if (rowVal instanceof Number) {
                        rowVal = ((Number) rowVal).doubleValue();
                    } else {
                        rowVal = Double.valueOf(rowVal.toString());
                    }
                    break;
                case DECIMAL:
                    int scale = ((DecimalTypeInfo) hiveColumnTypes[i]).getScale();
                    rowVal = HiveDecimal.create(rowVal.toString());
                    ((HiveDecimal) rowVal).setScale(scale, BigDecimal.ROUND_HALF_EVEN);
                    break;
                case BOOLEAN:
                    if (rowVal instanceof Number) {
                        rowVal = ((Number) value).intValue() != 0;
                    } else {
                        rowVal = Boolean.valueOf(rowVal.toString());
                    }
                    break;
                case CHAR:
                case VARCHAR:
                case STRING:
                    if (rowVal instanceof java.sql.Date) {
                        rowVal = DateUtils.getDateFormat().format((java.sql.Date) rowVal);
                    } else {
                        rowVal = rowVal.toString();
                    }
                    break;
                case DATE:
                    if (rowVal instanceof java.sql.Date) {
                        LocalDate localDate = ((java.sql.Date) rowVal).toLocalDate();
                        rowVal = Date.of(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
                    } else {
                        rowVal = Date.valueOf(rowVal.toString());
                    }
                    break;
                case TIMESTAMP:
                    if (rowVal instanceof java.sql.Timestamp) {
                        LocalDateTime localDateTime = ((java.sql.Timestamp) rowVal).toLocalDateTime();
                        rowVal = Timestamp.ofEpochSecond(localDateTime.toEpochSecond(UTC), localDateTime.getNano());
                    } else {
                        rowVal = Timestamp.valueOf(rowVal.toString());
                    }
                    break;
                default:
                    // do nothing
                    break;
            }
        }
        row.add(rowVal);
    }
    return row;
}
Also used : LocalDateTime(java.time.LocalDateTime) NullWritable(org.apache.hadoop.io.NullWritable) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) ObjectWritable(org.apache.hadoop.io.ObjectWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) Text(org.apache.hadoop.io.Text) MapWritable(org.apache.hadoop.io.MapWritable) NullWritable(org.apache.hadoop.io.NullWritable) LocalDate(java.time.LocalDate) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Date(org.apache.hadoop.hive.common.type.Date) LocalDate(java.time.LocalDate) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 8 with ObjectWritable

use of org.apache.hadoop.io.ObjectWritable in project hive by apache.

the class JdbcRecordReader method next.

@Override
public boolean next(LongWritable key, MapWritable value) throws IOException {
    try {
        LOGGER.trace("JdbcRecordReader.next called");
        if (dbAccessor == null) {
            dbAccessor = DatabaseAccessorFactory.getAccessor(conf);
            iterator = dbAccessor.getRecordIterator(conf, split.getPartitionColumn(), split.getLowerBound(), split.getUpperBound(), split.getLimit(), split.getOffset());
        }
        if (iterator.hasNext()) {
            LOGGER.trace("JdbcRecordReader has more records to read.");
            key.set(pos);
            pos++;
            Map<String, Object> record = iterator.next();
            if ((record != null) && (!record.isEmpty())) {
                for (Entry<String, Object> entry : record.entrySet()) {
                    value.put(new Text(entry.getKey()), entry.getValue() == null ? NullWritable.get() : new ObjectWritable(entry.getValue()));
                }
                return true;
            } else {
                LOGGER.debug("JdbcRecordReader got null record.");
                return false;
            }
        } else {
            LOGGER.debug("JdbcRecordReader has no more records to read.");
            return false;
        }
    } catch (Exception e) {
        throw new IOException(e);
    }
}
Also used : Text(org.apache.hadoop.io.Text) ObjectWritable(org.apache.hadoop.io.ObjectWritable) IOException(java.io.IOException) IOException(java.io.IOException)

Aggregations

ObjectWritable (org.apache.hadoop.io.ObjectWritable)8 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)3 IOException (java.io.IOException)2 MapWritable (org.apache.hadoop.io.MapWritable)2 Text (org.apache.hadoop.io.Text)2 Writable (org.apache.hadoop.io.Writable)2 StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 ObjectInputStream (java.io.ObjectInputStream)1 InvocationTargetException (java.lang.reflect.InvocationTargetException)1 LocalDate (java.time.LocalDate)1 LocalDateTime (java.time.LocalDateTime)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 Packet (net.ripe.hadoop.pcap.packet.Packet)1 Scan (org.apache.hadoop.hbase.client.Scan)1 ClientProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos)1 Date (org.apache.hadoop.hive.common.type.Date)1 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)1 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)1