Search in sources :

Example 11 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class SQLOperation method getSerDe.

private AbstractSerDe getSerDe() throws SQLException {
    if (serde != null) {
        return serde;
    }
    try {
        List<FieldSchema> fieldSchemas = mResultSchema.getFieldSchemas();
        StringBuilder namesSb = new StringBuilder();
        StringBuilder typesSb = new StringBuilder();
        if (fieldSchemas != null && !fieldSchemas.isEmpty()) {
            for (int pos = 0; pos < fieldSchemas.size(); pos++) {
                if (pos != 0) {
                    namesSb.append(",");
                    typesSb.append(",");
                }
                namesSb.append(fieldSchemas.get(pos).getName());
                typesSb.append(fieldSchemas.get(pos).getType());
            }
        }
        String names = namesSb.toString();
        String types = typesSb.toString();
        serde = new LazySimpleSerDe();
        Properties props = new Properties();
        if (names.length() > 0) {
            LOG.debug("Column names: " + names);
            props.setProperty(serdeConstants.LIST_COLUMNS, names);
        }
        if (types.length() > 0) {
            LOG.debug("Column types: " + types);
            props.setProperty(serdeConstants.LIST_COLUMN_TYPES, types);
        }
        SerDeUtils.initializeSerDe(serde, queryState.getConf(), props, null);
    } catch (Exception ex) {
        ex.printStackTrace();
        throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex);
    }
    return serde;
}
Also used : SQLException(java.sql.SQLException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) LazySimpleSerDe(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Properties(java.util.Properties) JsonGenerationException(org.codehaus.jackson.JsonGenerationException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) CommandNeedRetryException(org.apache.hadoop.hive.ql.CommandNeedRetryException) SQLException(java.sql.SQLException) RejectedExecutionException(java.util.concurrent.RejectedExecutionException) JsonMappingException(org.codehaus.jackson.map.JsonMappingException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 12 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class FlatRowContainer method add.

/** Called when loading the hashtable. */
public void add(MapJoinObjectSerDeContext context, BytesWritable value) throws HiveException {
    AbstractSerDe serde = context.getSerDe();
    // has tag => need to set later
    isAliasFilterSet = !context.hasFilterTag();
    if (rowLength == UNKNOWN) {
        try {
            rowLength = ObjectInspectorUtils.getStructSize(serde.getObjectInspector());
        } catch (SerDeException ex) {
            throw new HiveException("Get structure size error", ex);
        }
        if (rowLength == 0) {
            array = EMPTY_OBJECT_ARRAY;
        }
    }
    if (rowLength > 0) {
        int rowCount = (array.length / rowLength);
        listRealloc(array.length + rowLength);
        read(serde, value, rowCount);
    } else {
        // see rowLength javadoc
        --rowLength;
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 13 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class MapJoinEagerRowContainer method read.

@SuppressWarnings("unchecked")
public void read(MapJoinObjectSerDeContext context, Writable currentValue) throws SerDeException {
    AbstractSerDe serde = context.getSerDe();
    List<Object> value = (List<Object>) ObjectInspectorUtils.copyToStandardObject(serde.deserialize(currentValue), serde.getObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
    if (value == null) {
        addRow(toList(EMPTY_OBJECT_ARRAY));
    } else {
        Object[] valuesArray = value.toArray();
        if (context.hasFilterTag()) {
            aliasFilter &= ((ShortWritable) valuesArray[valuesArray.length - 1]).get();
        }
        addRow(toList(valuesArray));
    }
}
Also used : AbstractList(java.util.AbstractList) ArrayList(java.util.ArrayList) List(java.util.List) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe)

Example 14 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class MapJoinEagerRowContainer method write.

@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException {
    AbstractSerDe serde = context.getSerDe();
    ObjectInspector valueObjectInspector = context.getStandardOI();
    long numRows = rowCount();
    long numRowsWritten = 0L;
    out.writeLong(numRows);
    for (List<Object> row = first(); row != null; row = next()) {
        serde.serialize(row.toArray(), valueObjectInspector).write(out);
        ++numRowsWritten;
    }
    if (numRows != rowCount()) {
        throw new ConcurrentModificationException("Values was modified while persisting");
    }
    if (numRowsWritten != numRows) {
        throw new IllegalStateException("Expected to write " + numRows + " but wrote " + numRowsWritten);
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConcurrentModificationException(java.util.ConcurrentModificationException) AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe)

Example 15 with AbstractSerDe

use of org.apache.hadoop.hive.serde2.AbstractSerDe in project hive by apache.

the class MapJoinKey method read.

@SuppressWarnings("deprecation")
public static MapJoinKey read(Output output, MapJoinObjectSerDeContext context, Writable writable) throws SerDeException, HiveException {
    AbstractSerDe serde = context.getSerDe();
    Object obj = serde.deserialize(writable);
    MapJoinKeyObject result = new MapJoinKeyObject();
    result.read(serde.getObjectInspector(), obj);
    return result;
}
Also used : AbstractSerDe(org.apache.hadoop.hive.serde2.AbstractSerDe)

Aggregations

AbstractSerDe (org.apache.hadoop.hive.serde2.AbstractSerDe)40 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)22 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)16 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)15 Properties (java.util.Properties)12 ArrayList (java.util.ArrayList)10 BytesWritable (org.apache.hadoop.io.BytesWritable)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 Writable (org.apache.hadoop.io.Writable)8 IOException (java.io.IOException)7 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)7 InputSplit (org.apache.hadoop.mapred.InputSplit)7 Test (org.junit.Test)7 AbstractPrimitiveLazyObjectInspector (org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.AbstractPrimitiveLazyObjectInspector)6 LazyBinaryMapObjectInspector (org.apache.hadoop.hive.serde2.lazybinary.objectinspector.LazyBinaryMapObjectInspector)6 JavaBinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaBinaryObjectInspector)6 WritableBinaryObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector)6 Path (org.apache.hadoop.fs.Path)5 RecordWriter (org.apache.hadoop.mapred.RecordWriter)5 ConcurrentModificationException (java.util.ConcurrentModificationException)4