Search in sources :

Example 61 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class HiveMetaStoreUtils method getDeserializer.

public static Deserializer getDeserializer(Configuration conf, org.apache.hadoop.hive.metastore.api.Table table, String metaTable, boolean skipConfError, String lib) throws MetaException {
    AbstractSerDe deserializer;
    try {
        deserializer = ReflectionUtil.newInstance(conf.getClassByName(lib).asSubclass(AbstractSerDe.class), conf);
    } catch (Exception e) {
        throw new MetaException(e.getClass().getName() + " " + e.getMessage());
    }
    try {
        Properties properties = MetaStoreUtils.getTableMetadata(table);
        if (metaTable != null) {
            properties.put("metaTable", metaTable);
        }
        deserializer.initialize(conf, properties, null);
    } catch (SerDeException e) {
        if (!skipConfError) {
            LOG.error("error in initSerDe: " + e.getClass().getName() + " " + e.getMessage(), e);
            throw new MetaException(e.getClass().getName() + " " + e.getMessage());
        }
    }
    return deserializer;
}
Also used : Properties(java.util.Properties) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) LoginException(javax.security.auth.login.LoginException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 62 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class MapJoinKeyObject method write.

@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException {
    AbstractSerDe serde = context.getSerDe();
    ObjectInspector objectInspector = context.getStandardOI();
    Writable container = serde.serialize(key, objectInspector);
    container.write(out);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Writable(org.apache.hadoop.io.Writable) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe)

Example 63 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class PTFTranslator method createLazyBinarySerDe.

/*
   * OI & Serde helper methods
   */
protected static AbstractSerDe createLazyBinarySerDe(Configuration cfg, StructObjectInspector oi, Map<String, String> serdePropsMap) throws SerDeException {
    serdePropsMap = serdePropsMap == null ? new LinkedHashMap<String, String>() : serdePropsMap;
    PTFDeserializer.addOIPropertiestoSerDePropsMap(oi, serdePropsMap);
    AbstractSerDe serDe = new LazyBinarySerDe();
    Properties p = new Properties();
    p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS));
    p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES));
    serDe.initialize(cfg, p, null);
    return serDe;
}
Also used : LazyBinarySerDe(org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe) Properties(java.util.Properties) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) LinkedHashMap(java.util.LinkedHashMap)

Example 64 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class LlapRowRecordReader method initSerDe.

protected AbstractSerDe initSerDe(Configuration conf) throws SerDeException {
    Properties props = new Properties();
    StringBuilder columnsBuffer = new StringBuilder();
    StringBuilder typesBuffer = new StringBuilder();
    boolean isFirst = true;
    for (FieldDesc colDesc : schema.getColumns()) {
        if (!isFirst) {
            columnsBuffer.append(',');
            typesBuffer.append(',');
        }
        columnsBuffer.append(colDesc.getName());
        typesBuffer.append(colDesc.getTypeInfo().toString());
        isFirst = false;
    }
    String columns = columnsBuffer.toString();
    String types = typesBuffer.toString();
    props.put(serdeConstants.LIST_COLUMNS, columns);
    props.put(serdeConstants.LIST_COLUMN_TYPES, types);
    props.put(serdeConstants.ESCAPE_CHAR, "\\");
    AbstractSerDe createdSerDe = createSerDe();
    createdSerDe.initialize(conf, props, null);
    return createdSerDe;
}
Also used : Properties(java.util.Properties) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) FieldDesc(org.apache.hadoop.hive.llap.FieldDesc)

Example 65 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class DynamicValueRegistryTez method init.

@Override
public void init(RegistryConf conf) throws Exception {
    RegistryConfTez rct = (RegistryConfTez) conf;
    for (String inputSourceName : rct.baseWork.getInputSourceToRuntimeValuesInfo().keySet()) {
        LOG.info("Runtime value source: " + inputSourceName);
        LogicalInput runtimeValueInput = rct.inputs.get(inputSourceName);
        RuntimeValuesInfo runtimeValuesInfo = rct.baseWork.getInputSourceToRuntimeValuesInfo().get(inputSourceName);
        // Setup deserializer/obj inspectors for the incoming data source
        AbstractSerDe serDe = ReflectionUtils.newInstance(runtimeValuesInfo.getTableDesc().getSerDeClass(), null);
        serDe.initialize(rct.conf, runtimeValuesInfo.getTableDesc().getProperties(), null);
        ObjectInspector inspector = serDe.getObjectInspector();
        // Set up col expressions for the dynamic values using this input
        List<ExprNodeEvaluator> colExprEvaluators = new ArrayList<ExprNodeEvaluator>();
        for (ExprNodeDesc expr : runtimeValuesInfo.getColExprs()) {
            ExprNodeEvaluator exprEval = ExprNodeEvaluatorFactory.get(expr, null);
            exprEval.initialize(inspector);
            colExprEvaluators.add(exprEval);
        }
        runtimeValueInput.start();
        List<Input> inputList = new ArrayList<Input>();
        inputList.add(runtimeValueInput);
        rct.processorContext.waitForAllInputsReady(inputList);
        KeyValueReader kvReader = (KeyValueReader) runtimeValueInput.getReader();
        long rowCount = 0;
        while (kvReader.next()) {
            Object row = serDe.deserialize((Writable) kvReader.getCurrentValue());
            rowCount++;
            for (int colIdx = 0; colIdx < colExprEvaluators.size(); ++colIdx) {
                // Read each expression and save it to the value registry
                ExprNodeEvaluator eval = colExprEvaluators.get(colIdx);
                Object val = eval.evaluate(row);
                setValue(runtimeValuesInfo.getDynamicValueIDs().get(colIdx), val);
            }
        }
        // For now, expecting a single row (min/max, aggregated bloom filter), or no rows
        if (rowCount == 0) {
            LOG.debug("No input rows from " + inputSourceName + ", filling dynamic values with nulls");
            for (int colIdx = 0; colIdx < colExprEvaluators.size(); ++colIdx) {
                ExprNodeEvaluator eval = colExprEvaluators.get(colIdx);
                setValue(runtimeValuesInfo.getDynamicValueIDs().get(colIdx), null);
            }
        } else if (rowCount > 1) {
            throw new IllegalStateException("Expected 0 or 1 rows from " + inputSourceName + ", got " + rowCount);
        }
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) KeyValueReader(org.apache.tez.runtime.library.api.KeyValueReader) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) ArrayList(java.util.ArrayList) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) RuntimeValuesInfo(org.apache.hadoop.hive.ql.parse.RuntimeValuesInfo) LogicalInput(org.apache.tez.runtime.api.LogicalInput) Input(org.apache.tez.runtime.api.Input) LogicalInput(org.apache.tez.runtime.api.LogicalInput) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Aggregations

AbstractSerDe (org.apache.hadoop.hive.serde2.AbstractSerDe)61 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)31 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)26 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)23 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)19 ArrayList (java.util.ArrayList)18 Properties (java.util.Properties)16 IOException (java.io.IOException)15 BytesWritable (org.apache.hadoop.io.BytesWritable)12 TableDesc (org.apache.hadoop.hive.ql.plan.TableDesc)9 Writable (org.apache.hadoop.io.Writable)8 Test (org.junit.Test)8 Path (org.apache.hadoop.fs.Path)7 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)7 AbstractPrimitiveLazyObjectInspector (org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.AbstractPrimitiveLazyObjectInspector)6 LazyBinaryMapObjectInspector (org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryMapObjectInspector)6 JavaBinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaBinaryObjectInspector)6 WritableBinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector)6 LinkedHashMap (java.util.LinkedHashMap)5 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)5