Search in sources :

Example 1 with TypeDesc

use of org.apache.hadoop.hive.llap.TypeDesc in project hive by apache.

the class GenericUDTFGetSplits method convertTypeString.

private TypeDesc convertTypeString(String typeString) throws HiveException {
    TypeDesc typeDesc;
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString);
    Preconditions.checkState(typeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE, "Unsupported non-primitive type " + typeString);
    switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
        case BOOLEAN:
            typeDesc = new TypeDesc(TypeDesc.Type.BOOLEAN);
            break;
        case BYTE:
            typeDesc = new TypeDesc(TypeDesc.Type.TINYINT);
            break;
        case SHORT:
            typeDesc = new TypeDesc(TypeDesc.Type.SMALLINT);
            break;
        case INT:
            typeDesc = new TypeDesc(TypeDesc.Type.INT);
            break;
        case LONG:
            typeDesc = new TypeDesc(TypeDesc.Type.BIGINT);
            break;
        case FLOAT:
            typeDesc = new TypeDesc(TypeDesc.Type.FLOAT);
            break;
        case DOUBLE:
            typeDesc = new TypeDesc(TypeDesc.Type.DOUBLE);
            break;
        case STRING:
            typeDesc = new TypeDesc(TypeDesc.Type.STRING);
            break;
        case CHAR:
            CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.CHAR, charTypeInfo.getLength());
            break;
        case VARCHAR:
            VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.VARCHAR, varcharTypeInfo.getLength());
            break;
        case DATE:
            typeDesc = new TypeDesc(TypeDesc.Type.DATE);
            break;
        case TIMESTAMP:
            typeDesc = new TypeDesc(TypeDesc.Type.TIMESTAMP);
            break;
        case BINARY:
            typeDesc = new TypeDesc(TypeDesc.Type.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
            break;
        default:
            throw new HiveException("Unsupported type " + typeString);
    }
    return typeDesc;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) TypeDesc(org.apache.hadoop.hive.llap.TypeDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 2 with TypeDesc

use of org.apache.hadoop.hive.llap.TypeDesc in project hive by apache.

the class GenericUDTFGetSplits method convertSchema.

private Schema convertSchema(Object obj) throws HiveException {
    org.apache.hadoop.hive.metastore.api.Schema schema = (org.apache.hadoop.hive.metastore.api.Schema) obj;
    List<FieldDesc> colDescs = new ArrayList<FieldDesc>();
    for (FieldSchema fs : schema.getFieldSchemas()) {
        String colName = fs.getName();
        String typeString = fs.getType();
        TypeDesc typeDesc = convertTypeString(typeString);
        colDescs.add(new FieldDesc(colName, typeDesc));
    }
    Schema Schema = new Schema(colDescs);
    return Schema;
}
Also used : FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Schema(org.apache.hadoop.hive.llap.Schema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) TypeDesc(org.apache.hadoop.hive.llap.TypeDesc) FieldDesc(org.apache.hadoop.hive.llap.FieldDesc)

Example 3 with TypeDesc

use of org.apache.hadoop.hive.llap.TypeDesc in project hive by apache.

the class TestLlapInputSplit method testWritable.

@Test
public void testWritable() throws Exception {
    int splitNum = 88;
    byte[] planBytes = "0123456789987654321".getBytes();
    byte[] fragmentBytes = "abcdefghijklmnopqrstuvwxyz".getBytes();
    SplitLocationInfo[] locations = { new SplitLocationInfo("location1", false), new SplitLocationInfo("location2", false) };
    ArrayList<FieldDesc> colDescs = new ArrayList<FieldDesc>();
    colDescs.add(new FieldDesc("col1", new TypeDesc(TypeDesc.Type.STRING)));
    colDescs.add(new FieldDesc("col2", new TypeDesc(TypeDesc.Type.INT)));
    Schema schema = new Schema(colDescs);
    byte[] tokenBytes = new byte[] { 1 };
    LlapInputSplit split1 = new LlapInputSplit(splitNum, planBytes, fragmentBytes, null, locations, schema, "hive", tokenBytes);
    ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
    DataOutputStream dataOut = new DataOutputStream(byteOutStream);
    split1.write(dataOut);
    ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
    DataInputStream dataIn = new DataInputStream(byteInStream);
    LlapInputSplit split2 = new LlapInputSplit();
    split2.readFields(dataIn);
    // Did we read all the data?
    assertEquals(0, byteInStream.available());
    checkLlapSplits(split1, split2);
}
Also used : SplitLocationInfo(org.apache.hadoop.mapred.SplitLocationInfo) DataOutputStream(java.io.DataOutputStream) Schema(org.apache.hadoop.hive.llap.Schema) ArrayList(java.util.ArrayList) TypeDesc(org.apache.hadoop.hive.llap.TypeDesc) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream) LlapInputSplit(org.apache.hadoop.hive.llap.LlapInputSplit) ByteArrayInputStream(java.io.ByteArrayInputStream) FieldDesc(org.apache.hadoop.hive.llap.FieldDesc) Test(org.junit.Test)

Aggregations

TypeDesc (org.apache.hadoop.hive.llap.TypeDesc)3 ArrayList (java.util.ArrayList)2 FieldDesc (org.apache.hadoop.hive.llap.FieldDesc)2 Schema (org.apache.hadoop.hive.llap.Schema)2 ByteArrayInputStream (java.io.ByteArrayInputStream)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 DataInputStream (java.io.DataInputStream)1 DataOutputStream (java.io.DataOutputStream)1 LlapInputSplit (org.apache.hadoop.hive.llap.LlapInputSplit)1 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)1 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)1 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)1 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)1 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)1 SplitLocationInfo (org.apache.hadoop.mapred.SplitLocationInfo)1 Test (org.junit.Test)1