use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class SQLOperation method getSerDe.
private AbstractSerDe getSerDe() throws SQLException {
if (serde != null) {
return serde;
}
try {
List<FieldSchema> fieldSchemas = mResultSchema.getFieldSchemas();
StringBuilder namesSb = new StringBuilder();
StringBuilder typesSb = new StringBuilder();
if (fieldSchemas != null && !fieldSchemas.isEmpty()) {
for (int pos = 0; pos < fieldSchemas.size(); pos++) {
if (pos != 0) {
namesSb.append(",");
typesSb.append(",");
}
namesSb.append(fieldSchemas.get(pos).getName());
typesSb.append(fieldSchemas.get(pos).getType());
}
}
String names = namesSb.toString();
String types = typesSb.toString();
serde = new LazySimpleSerDe();
Properties props = new Properties();
if (names.length() > 0) {
LOG.debug("Column names: " + names);
props.setProperty(serdeConstants.LIST_COLUMNS, names);
}
if (types.length() > 0) {
LOG.debug("Column types: " + types);
props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types);
}
SerDeUtils.initializeSerDe(serde, queryState.getConf(), props, null);
} catch (Exception ex) {
ex.printStackTrace();
throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex);
}
return serde;
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class FlatRowContainer method add.
/** Called when loading the hashtable. */
public void add(MapJoinObjectSerDeContext context, BytesWritable value) throws HiveException {
AbstractSerDe serde = context.getSerDe();
// has tag => need to set later
isAliasFilterSet = !context.hasFilterTag();
if (rowLength == UNKNOWN) {
try {
rowLength = ObjectInspectorUtils.getStructSize(serde.getObjectInspector());
} catch (SerDeException ex) {
throw new HiveException("Get structure size error", ex);
}
if (rowLength == 0) {
array = EMPTY_OBJECT_ARRAY;
}
}
if (rowLength > 0) {
int rowCount = (array.length / rowLength);
listRealloc(array.length + rowLength);
read(serde, value, rowCount);
} else {
// see rowLength javadoc
--rowLength;
}
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class MapJoinEagerRowContainer method read.
@SuppressWarnings("unchecked")
public void read(MapJoinObjectSerDeContext context, Writable currentValue) throws SerDeException {
AbstractSerDe serde = context.getSerDe();
List<Object> value = (List<Object>) ObjectInspectorUtils.copyToStandardObject(serde.deserialize(currentValue), serde.getObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
if (value == null) {
addRow(toList(EMPTY_OBJECT_ARRAY));
} else {
Object[] valuesArray = value.toArray();
if (context.hasFilterTag()) {
aliasFilter &= ((ShortWritable) valuesArray[valuesArray.length - 1]).get();
}
addRow(toList(valuesArray));
}
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class MapJoinEagerRowContainer method write.
@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException {
AbstractSerDe serde = context.getSerDe();
ObjectInspector valueObjectInspector = context.getStandardOI();
long numRows = rowCount();
long numRowsWritten = 0L;
out.writeLong(numRows);
for (List<Object> row = first(); row != null; row = next()) {
serde.serialize(row.toArray(), valueObjectInspector).write(out);
++numRowsWritten;
}
if (numRows != rowCount()) {
throw new ConcurrentModificationException("Values was modified while persisting");
}
if (numRowsWritten != numRows) {
throw new IllegalStateException("Expected to write " + numRows + " but wrote " + numRowsWritten);
}
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class MapJoinKey method read.
@SuppressWarnings("deprecation")
public static MapJoinKey read(Output output, MapJoinObjectSerDeContext context, Writable writable) throws SerDeException, HiveException {
AbstractSerDe serde = context.getSerDe();
Object obj = serde.deserialize(writable);
MapJoinKeyObject result = new MapJoinKeyObject();
result.read(serde.getObjectInspector(), obj);
return result;
}
Aggregations