Search in sources :

Example 41 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class MapJoinOperator method generateMapMetaData.

public void generateMapMetaData() throws HiveException {
    try {
        TableDesc keyTableDesc = conf.getKeyTblDesc();
        AbstractSerDe keySerializer = (AbstractSerDe) ReflectionUtil.newInstance(keyTableDesc.getDeserializerClass(), null);
        SerDeUtils.initializeSerDe(keySerializer, null, keyTableDesc.getProperties(), null);
        MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerializer, false);
        for (int pos = 0; pos < order.length; pos++) {
            if (pos == posBigTable) {
                continue;
            }
            TableDesc valueTableDesc;
            if (conf.getNoOuterJoin()) {
                valueTableDesc = conf.getValueTblDescs().get(pos);
            } else {
                valueTableDesc = conf.getValueFilteredTblDescs().get(pos);
            }
            AbstractSerDe valueSerDe = (AbstractSerDe) ReflectionUtil.newInstance(valueTableDesc.getDeserializerClass(), null);
            SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null);
            MapJoinObjectSerDeContext valueContext = new MapJoinObjectSerDeContext(valueSerDe, hasFilter(pos));
            mapJoinTableSerdes[pos] = new MapJoinTableContainerSerDe(keyContext, valueContext);
        }
    } catch (SerDeException e) {
        throw new HiveException(e);
    }
}
Also used : MapJoinTableContainerSerDe(org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) MapJoinObjectSerDeContext(org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext) TableDesc(org.apache.hadoop.hive.ql.plan.TableDesc) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 42 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class HybridHashTableContainer method setSerde.

@Override
public void setSerde(MapJoinObjectSerDeContext keyCtx, MapJoinObjectSerDeContext valCtx) throws SerDeException {
    AbstractSerDe keySerde = keyCtx.getSerDe(), valSerde = valCtx.getSerDe();
    if (writeHelper == null) {
        LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " + valSerde.getClass().getName());
        // We assume this hashtable is loaded only when tez is enabled
        LazyBinaryStructObjectInspector valSoi = (LazyBinaryStructObjectInspector) valSerde.getObjectInspector();
        writeHelper = new MapJoinBytesTableContainer.LazyBinaryKvWriter(keySerde, valSoi, valCtx.hasFilterTag());
        if (internalValueOi == null) {
            internalValueOi = valSoi;
        }
        if (sortableSortOrders == null) {
            sortableSortOrders = ((BinarySortableSerDe) keySerde).getSortOrders();
        }
        if (nullMarkers == null) {
            nullMarkers = ((BinarySortableSerDe) keySerde).getNullMarkers();
        }
        if (notNullMarkers == null) {
            notNullMarkers = ((BinarySortableSerDe) keySerde).getNotNullMarkers();
        }
    }
}
Also used : AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) LazyBinaryStructObjectInspector(org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryStructObjectInspector)

Example 43 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class MapJoinBytesTableContainer method setSerde.

@Override
public void setSerde(MapJoinObjectSerDeContext keyContext, MapJoinObjectSerDeContext valueContext) throws SerDeException {
    AbstractSerDe keySerde = keyContext.getSerDe(), valSerde = valueContext.getSerDe();
    if (writeHelper == null) {
        LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " + valSerde.getClass().getName());
        if (keySerde instanceof BinarySortableSerDe && valSerde instanceof LazyBinarySerDe) {
            LazyBinaryStructObjectInspector valSoi = (LazyBinaryStructObjectInspector) valSerde.getObjectInspector();
            writeHelper = new LazyBinaryKvWriter(keySerde, valSoi, valueContext.hasFilterTag());
            internalValueOi = valSoi;
            sortableSortOrders = ((BinarySortableSerDe) keySerde).getSortOrders();
            nullMarkers = ((BinarySortableSerDe) keySerde).getNullMarkers();
            notNullMarkers = ((BinarySortableSerDe) keySerde).getNotNullMarkers();
        } else {
            writeHelper = new KeyValueWriter(keySerde, valSerde, valueContext.hasFilterTag());
            internalValueOi = createInternalOi(valueContext);
            sortableSortOrders = null;
            nullMarkers = null;
            notNullMarkers = null;
        }
    }
}
Also used : BinarySortableSerDe(org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe) LazyBinarySerDe(org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) LazyBinaryStructObjectInspector(org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryStructObjectInspector)

Example 44 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class MapJoinKeyObject method write.

@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException {
    AbstractSerDe serde = context.getSerDe();
    ObjectInspector objectInspector = context.getStandardOI();
    Writable container = serde.serialize(key, objectInspector);
    container.write(out);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Writable(org.apache.hadoop.io.Writable) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe)

Example 45 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class LlapRowRecordReader method initSerDe.

protected AbstractSerDe initSerDe(Configuration conf) throws SerDeException {
    Properties props = new Properties();
    StringBuffer columnsBuffer = new StringBuffer();
    StringBuffer typesBuffer = new StringBuffer();
    boolean isFirst = true;
    for (FieldDesc colDesc : schema.getColumns()) {
        if (!isFirst) {
            columnsBuffer.append(',');
            typesBuffer.append(',');
        }
        columnsBuffer.append(colDesc.getName());
        typesBuffer.append(colDesc.getTypeDesc().toString());
        isFirst = false;
    }
    String columns = columnsBuffer.toString();
    String types = typesBuffer.toString();
    props.put(serdeConstants.LIST_COLUMNS, columns);
    props.put(serdeConstants.LIST_COLUMN_TYPES, types);
    props.put(serdeConstants.ESCAPE_CHAR, "\\");
    AbstractSerDe serde = new LazySimpleSerDe();
    serde.initialize(conf, props);
    return serde;
}
Also used : LazySimpleSerDe(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe) Properties(java.util.Properties) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) FieldDesc(org.apache.hadoop.hive.llap.FieldDesc)

Aggregations

AbstractSerDe (org.apache.hadoop.hive.serde2.AbstractSerDe)40 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)22 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)16 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)15 Properties (java.util.Properties)12 ArrayList (java.util.ArrayList)10 BytesWritable (org.apache.hadoop.io.BytesWritable)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 Writable (org.apache.hadoop.io.Writable)8 IOException (java.io.IOException)7 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)7 InputSplit (org.apache.hadoop.mapred.InputSplit)7 Test (org.junit.Test)7 AbstractPrimitiveLazyObjectInspector (org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.AbstractPrimitiveLazyObjectInspector)6 LazyBinaryMapObjectInspector (org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryMapObjectInspector)6 JavaBinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaBinaryObjectInspector)6 WritableBinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector)6 Path (org.apache.hadoop.fs.Path)5 RecordWriter (org.apache.hadoop.mapred.RecordWriter)5 ConcurrentModificationException (java.util.ConcurrentModificationException)4