Search in sources :

Example 81 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.

the class DruidSerDe method inferSchema.

/* Select query */
private void inferSchema(SelectQuery query, List<String> columnNames, List<PrimitiveTypeInfo> columnTypes, String address) throws SerDeException {
    // Timestamp column
    columnNames.add(DruidTable.DEFAULT_TIMESTAMP_COLUMN);
    columnTypes.add(TypeInfoFactory.timestampTypeInfo);
    // Dimension columns
    for (DimensionSpec ds : query.getDimensions()) {
        columnNames.add(ds.getOutputName());
        columnTypes.add(TypeInfoFactory.stringTypeInfo);
    }
    // The type for metric columns is not explicit in the query, thus in this case
    // we need to emit a metadata query to know their type
    SegmentMetadataQueryBuilder builder = new Druids.SegmentMetadataQueryBuilder();
    builder.dataSource(query.getDataSource());
    builder.merge(true);
    builder.analysisTypes();
    SegmentMetadataQuery metadataQuery = builder.build();
    // Execute query in Druid
    SegmentAnalysis schemaInfo;
    try {
        schemaInfo = submitMetadataRequest(address, metadataQuery);
    } catch (IOException e) {
        throw new SerDeException(e);
    }
    if (schemaInfo == null) {
        throw new SerDeException("Connected to Druid but could not retrieve datasource information");
    }
    for (String metric : query.getMetrics()) {
        columnNames.add(metric);
        columnTypes.add(DruidSerDeUtils.convertDruidToHiveType(schemaInfo.getColumns().get(metric).getType()));
    }
}
Also used : DimensionSpec(io.druid.query.dimension.DimensionSpec) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) SegmentMetadataQueryBuilder(io.druid.query.Druids.SegmentMetadataQueryBuilder) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 82 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.

the class HBaseSerDeParameters method createValueFactories.

private List<HBaseValueFactory> createValueFactories(Configuration conf, Properties tbl) throws SerDeException {
    List<HBaseValueFactory> valueFactories = new ArrayList<HBaseValueFactory>();
    try {
        for (int i = 0; i < columnMappings.size(); i++) {
            String serType = getSerializationType(conf, tbl, columnMappings.getColumnsMapping()[i]);
            if (AVRO_SERIALIZATION_TYPE.equals(serType)) {
                Schema schema = getSchema(conf, tbl, columnMappings.getColumnsMapping()[i]);
                valueFactories.add(new AvroHBaseValueFactory(i, schema));
            } else if (STRUCT_SERIALIZATION_TYPE.equals(serType)) {
                String structValueClassName = tbl.getProperty(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS);
                if (structValueClassName == null) {
                    throw new IllegalArgumentException(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS + " must be set for hbase columns of type [" + STRUCT_SERIALIZATION_TYPE + "]");
                }
                Class<?> structValueClass = loadClass(structValueClassName, job);
                valueFactories.add(new StructHBaseValueFactory(i, structValueClass));
            } else {
                valueFactories.add(new DefaultHBaseValueFactory(i));
            }
        }
    } catch (Exception e) {
        throw new SerDeException(e);
    }
    return valueFactories;
}
Also used : DefaultHBaseValueFactory(org.apache.hadoop.hive.hbase.struct.DefaultHBaseValueFactory) Schema(org.apache.avro.Schema) ArrayList(java.util.ArrayList) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) StructHBaseValueFactory(org.apache.hadoop.hive.hbase.struct.StructHBaseValueFactory) StructHBaseValueFactory(org.apache.hadoop.hive.hbase.struct.StructHBaseValueFactory) HBaseValueFactory(org.apache.hadoop.hive.hbase.struct.HBaseValueFactory) AvroHBaseValueFactory(org.apache.hadoop.hive.hbase.struct.AvroHBaseValueFactory) DefaultHBaseValueFactory(org.apache.hadoop.hive.hbase.struct.DefaultHBaseValueFactory) AvroHBaseValueFactory(org.apache.hadoop.hive.hbase.struct.AvroHBaseValueFactory) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 83 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.

the class TestHBaseSerDe method deserializeAndSerialize.

private void deserializeAndSerialize(HBaseSerDe serDe, Result r, Put p, Object[] expectedFieldsData) throws SerDeException {
    // Get the row structure
    StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
    List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
    assertEquals(9, fieldRefs.size());
    // Deserialize
    Object row = serDe.deserialize(new ResultWritable(r));
    for (int i = 0; i < fieldRefs.size(); i++) {
        Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
        if (fieldData != null) {
            fieldData = ((LazyPrimitive<?, ?>) fieldData).getWritableObject();
        }
        assertEquals("Field " + i, expectedFieldsData[i], fieldData);
    }
    // Serialize
    assertEquals(PutWritable.class, serDe.getSerializedClass());
    PutWritable serializedPut = (PutWritable) serDe.serialize(row, oi);
    assertEquals("Serialized data", p.toString(), String.valueOf(serializedPut.getPut()));
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 84 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.

the class TestHBaseSerDe method deserializeAndSerializeHiveStructColumnFamily.

private void deserializeAndSerializeHiveStructColumnFamily(HBaseSerDe serDe, Result r, Put p, Object[] expectedFieldsData, int[] expectedMapSize, List<Object> expectedQualifiers, Object notPresentKey) throws SerDeException, IOException {
    StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
    List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
    Object row = serDe.deserialize(new ResultWritable(r));
    int k = 0;
    for (int i = 0; i < fieldRefs.size(); i++) {
        Object fieldData = soi.getStructFieldData(row, fieldRefs.get(i));
        assertNotNull(fieldData);
        if (fieldData instanceof LazyPrimitive<?, ?>) {
            assertEquals(expectedFieldsData[i], ((LazyPrimitive<?, ?>) fieldData).getWritableObject());
        } else if (fieldData instanceof LazyHBaseCellMap) {
            for (int j = 0; j < ((LazyHBaseCellMap) fieldData).getMapSize(); j++) {
                assertEquals(expectedFieldsData[k + 1], ((LazyHBaseCellMap) fieldData).getMapValueElement(expectedQualifiers.get(k)).toString().trim());
                k++;
            }
            assertEquals(expectedMapSize[i - 1], ((LazyHBaseCellMap) fieldData).getMapSize());
            // Make sure that the unwanted key is not present in the map
            assertNull(((LazyHBaseCellMap) fieldData).getMapValueElement(notPresentKey));
        } else {
            fail("Error: field data not an instance of LazyPrimitive<?, ?> or LazyHBaseCellMap");
        }
    }
    SerDeUtils.getJSONString(row, soi);
    // Now serialize
    Put put = ((PutWritable) serDe.serialize(row, soi)).getPut();
    assertNotNull(put);
}
Also used : LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) Put(org.apache.hadoop.hbase.client.Put) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 85 with SerDeException

use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.

the class DelimitedInputWriter method encode.

@Override
public Object encode(byte[] record) throws SerializationError {
    try {
        BytesWritable blob = new BytesWritable();
        blob.set(record, 0, record.length);
        return serde.deserialize(blob);
    } catch (SerDeException e) {
        throw new SerializationError("Unable to convert byte[] record into Object", e);
    }
}
Also used : BytesWritable(org.apache.hadoop.io.BytesWritable) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Aggregations

SerDeException (org.apache.hadoop.hive.serde2.SerDeException)124 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)108 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)100 ArrayList (java.util.ArrayList)98 Properties (java.util.Properties)59 Test (org.junit.Test)59 Configuration (org.apache.hadoop.conf.Configuration)52 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)52 Text (org.apache.hadoop.io.Text)50 IOException (java.io.IOException)37 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)33 Schema (org.apache.avro.Schema)31 StructField (org.apache.hadoop.hive.serde2.objectinspector.StructField)31 MapObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector)28 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)28 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)24 Put (org.apache.hadoop.hbase.client.Put)22 LazySerDeParameters (org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters)22 IntWritable (org.apache.hadoop.io.IntWritable)22 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)21