use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class HiveMetaStoreUtils method getDeserializer.
public static Deserializer getDeserializer(Configuration conf, org.apache.hadoop.hive.metastore.api.Table table, String metaTable, boolean skipConfError, String lib) throws MetaException {
AbstractSerDe deserializer;
try {
deserializer = ReflectionUtil.newInstance(conf.getClassByName(lib).asSubclass(AbstractSerDe.class), conf);
} catch (Exception e) {
throw new MetaException(e.getClass().getName() + " " + e.getMessage());
}
try {
Properties properties = MetaStoreUtils.getTableMetadata(table);
if (metaTable != null) {
properties.put("metaTable", metaTable);
}
deserializer.initialize(conf, properties, null);
} catch (SerDeException e) {
if (!skipConfError) {
LOG.error("error in initSerDe: " + e.getClass().getName() + " " + e.getMessage(), e);
throw new MetaException(e.getClass().getName() + " " + e.getMessage());
}
}
return deserializer;
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class MapJoinKeyObject method write.
@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException {
AbstractSerDe serde = context.getSerDe();
ObjectInspector objectInspector = context.getStandardOI();
Writable container = serde.serialize(key, objectInspector);
container.write(out);
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class PTFTranslator method createLazyBinarySerDe.
/*
* OI & Serde helper methods
*/
protected static AbstractSerDe createLazyBinarySerDe(Configuration cfg, StructObjectInspector oi, Map<String, String> serdePropsMap) throws SerDeException {
serdePropsMap = serdePropsMap == null ? new LinkedHashMap<String, String>() : serdePropsMap;
PTFDeserializer.addOIPropertiestoSerDePropsMap(oi, serdePropsMap);
AbstractSerDe serDe = new LazyBinarySerDe();
Properties p = new Properties();
p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS));
p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES));
serDe.initialize(cfg, p, null);
return serDe;
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class LlapRowRecordReader method initSerDe.
protected AbstractSerDe initSerDe(Configuration conf) throws SerDeException {
Properties props = new Properties();
StringBuilder columnsBuffer = new StringBuilder();
StringBuilder typesBuffer = new StringBuilder();
boolean isFirst = true;
for (FieldDesc colDesc : schema.getColumns()) {
if (!isFirst) {
columnsBuffer.append(',');
typesBuffer.append(',');
}
columnsBuffer.append(colDesc.getName());
typesBuffer.append(colDesc.getTypeInfo().toString());
isFirst = false;
}
String columns = columnsBuffer.toString();
String types = typesBuffer.toString();
props.put(serdeConstants.LIST_COLUMNS, columns);
props.put(serdeConstants.LIST_COLUMN_TYPES, types);
props.put(serdeConstants.ESCAPE_CHAR, "\\");
AbstractSerDe createdSerDe = createSerDe();
createdSerDe.initialize(conf, props, null);
return createdSerDe;
}
use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.
the class DynamicValueRegistryTez method init.
@Override
public void init(RegistryConf conf) throws Exception {
RegistryConfTez rct = (RegistryConfTez) conf;
for (String inputSourceName : rct.baseWork.getInputSourceToRuntimeValuesInfo().keySet()) {
LOG.info("Runtime value source: " + inputSourceName);
LogicalInput runtimeValueInput = rct.inputs.get(inputSourceName);
RuntimeValuesInfo runtimeValuesInfo = rct.baseWork.getInputSourceToRuntimeValuesInfo().get(inputSourceName);
// Setup deserializer/obj inspectors for the incoming data source
AbstractSerDe serDe = ReflectionUtils.newInstance(runtimeValuesInfo.getTableDesc().getSerDeClass(), null);
serDe.initialize(rct.conf, runtimeValuesInfo.getTableDesc().getProperties(), null);
ObjectInspector inspector = serDe.getObjectInspector();
// Set up col expressions for the dynamic values using this input
List<ExprNodeEvaluator> colExprEvaluators = new ArrayList<ExprNodeEvaluator>();
for (ExprNodeDesc expr : runtimeValuesInfo.getColExprs()) {
ExprNodeEvaluator exprEval = ExprNodeEvaluatorFactory.get(expr, null);
exprEval.initialize(inspector);
colExprEvaluators.add(exprEval);
}
runtimeValueInput.start();
List<Input> inputList = new ArrayList<Input>();
inputList.add(runtimeValueInput);
rct.processorContext.waitForAllInputsReady(inputList);
KeyValueReader kvReader = (KeyValueReader) runtimeValueInput.getReader();
long rowCount = 0;
while (kvReader.next()) {
Object row = serDe.deserialize((Writable) kvReader.getCurrentValue());
rowCount++;
for (int colIdx = 0; colIdx < colExprEvaluators.size(); ++colIdx) {
// Read each expression and save it to the value registry
ExprNodeEvaluator eval = colExprEvaluators.get(colIdx);
Object val = eval.evaluate(row);
setValue(runtimeValuesInfo.getDynamicValueIDs().get(colIdx), val);
}
}
// For now, expecting a single row (min/max, aggregated bloom filter), or no rows
if (rowCount == 0) {
LOG.debug("No input rows from " + inputSourceName + ", filling dynamic values with nulls");
for (int colIdx = 0; colIdx < colExprEvaluators.size(); ++colIdx) {
ExprNodeEvaluator eval = colExprEvaluators.get(colIdx);
setValue(runtimeValuesInfo.getDynamicValueIDs().get(colIdx), null);
}
} else if (rowCount > 1) {
throw new IllegalStateException("Expected 0 or 1 rows from " + inputSourceName + ", got " + rowCount);
}
}
}
Aggregations