use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class MapJoinOperator method generateMapMetaData.
public void generateMapMetaData() throws HiveException {
try {
TableDesc keyTableDesc = conf.getKeyTblDesc();
AbstractSerDe keySerializer = (AbstractSerDe) ReflectionUtil.newInstance(keyTableDesc.getDeserializerClass(), null);
SerDeUtils.initializeSerDe(keySerializer, null, keyTableDesc.getProperties(), null);
MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerializer, false);
for (int pos = 0; pos < order.length; pos++) {
if (pos == posBigTable) {
continue;
}
TableDesc valueTableDesc;
if (conf.getNoOuterJoin()) {
valueTableDesc = conf.getValueTblDescs().get(pos);
} else {
valueTableDesc = conf.getValueFilteredTblDescs().get(pos);
}
AbstractSerDe valueSerDe = (AbstractSerDe) ReflectionUtil.newInstance(valueTableDesc.getDeserializerClass(), null);
SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null);
MapJoinObjectSerDeContext valueContext = new MapJoinObjectSerDeContext(valueSerDe, hasFilter(pos));
mapJoinTableSerdes[pos] = new MapJoinTableContainerSerDe(keyContext, valueContext);
}
} catch (SerDeException e) {
throw new HiveException(e);
}
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class HybridHashTableContainer method setSerde.
@Override
public void setSerde(MapJoinObjectSerDeContext keyCtx, MapJoinObjectSerDeContext valCtx) throws SerDeException {
AbstractSerDe keySerde = keyCtx.getSerDe(), valSerde = valCtx.getSerDe();
if (writeHelper == null) {
LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " + valSerde.getClass().getName());
// We assume this hashtable is loaded only when tez is enabled
LazyBinaryStructObjectInspector valSoi = (LazyBinaryStructObjectInspector) valSerde.getObjectInspector();
writeHelper = new MapJoinBytesTableContainer.LazyBinaryKvWriter(keySerde, valSoi, valCtx.hasFilterTag());
if (internalValueOi == null) {
internalValueOi = valSoi;
}
if (sortableSortOrders == null) {
sortableSortOrders = ((BinarySortableSerDe) keySerde).getSortOrders();
}
if (nullMarkers == null) {
nullMarkers = ((BinarySortableSerDe) keySerde).getNullMarkers();
}
if (notNullMarkers == null) {
notNullMarkers = ((BinarySortableSerDe) keySerde).getNotNullMarkers();
}
}
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class MapJoinBytesTableContainer method setSerde.
@Override
public void setSerde(MapJoinObjectSerDeContext keyContext, MapJoinObjectSerDeContext valueContext) throws SerDeException {
AbstractSerDe keySerde = keyContext.getSerDe(), valSerde = valueContext.getSerDe();
if (writeHelper == null) {
LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " + valSerde.getClass().getName());
if (keySerde instanceof BinarySortableSerDe && valSerde instanceof LazyBinarySerDe) {
LazyBinaryStructObjectInspector valSoi = (LazyBinaryStructObjectInspector) valSerde.getObjectInspector();
writeHelper = new LazyBinaryKvWriter(keySerde, valSoi, valueContext.hasFilterTag());
internalValueOi = valSoi;
sortableSortOrders = ((BinarySortableSerDe) keySerde).getSortOrders();
nullMarkers = ((BinarySortableSerDe) keySerde).getNullMarkers();
notNullMarkers = ((BinarySortableSerDe) keySerde).getNotNullMarkers();
} else {
writeHelper = new KeyValueWriter(keySerde, valSerde, valueContext.hasFilterTag());
internalValueOi = createInternalOi(valueContext);
sortableSortOrders = null;
nullMarkers = null;
notNullMarkers = null;
}
}
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class MapJoinKeyObject method write.
@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException {
AbstractSerDe serde = context.getSerDe();
ObjectInspector objectInspector = context.getStandardOI();
Writable container = serde.serialize(key, objectInspector);
container.write(out);
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class LlapRowRecordReader method initSerDe.
protected AbstractSerDe initSerDe(Configuration conf) throws SerDeException {
Properties props = new Properties();
StringBuffer columnsBuffer = new StringBuffer();
StringBuffer typesBuffer = new StringBuffer();
boolean isFirst = true;
for (FieldDesc colDesc : schema.getColumns()) {
if (!isFirst) {
columnsBuffer.append(',');
typesBuffer.append(',');
}
columnsBuffer.append(colDesc.getName());
typesBuffer.append(colDesc.getTypeDesc().toString());
isFirst = false;
}
String columns = columnsBuffer.toString();
String types = typesBuffer.toString();
props.put(serdeConstants.LIST_COLUMNS, columns);
props.put(serdeConstants.LIST_COLUMN_TYPES, types);
props.put(serdeConstants.ESCAPE_CHAR, "\\");
AbstractSerDe serde = new LazySimpleSerDe();
serde.initialize(conf, props);
return serde;
}
Aggregations