Search in sources :

Example 86 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestGenericUDFDateFormat method testWrongDateStr.

@Test
public void testWrongDateStr() throws HiveException {
    GenericUDFDateFormat udf = new GenericUDFDateFormat();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    Text fmtText = new Text("EEEE");
    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
    ObjectInspector[] arguments = { valueOI0, valueOI1 };
    udf.initialize(arguments);
    runAndVerifyStr("2016-02-30 10:30:45", fmtText, null, udf);
    runAndVerifyStr("2016-02-30 10:30:45", fmtText, null, udf);
    runAndVerifyStr("2014-01-32", fmtText, null, udf);
    runAndVerifyStr("01/14/2014", fmtText, null, udf);
    runAndVerifyStr(null, fmtText, null, udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Example 87 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestGenericUDFAesDecrypt method testAesDec256ConstStr.

@Test
public void testAesDec256ConstStr() throws HiveException, NoSuchAlgorithmException {
    int maxKeyLen = Cipher.getMaxAllowedKeyLength("AES");
    // Jurisdiction Policy Files not installed
    if (maxKeyLen < 256) {
        return;
    }
    GenericUDFAesDecrypt udf = new GenericUDFAesDecrypt();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
    Text keyWr = new Text("1234567890123456" + "1234567890123456");
    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, keyWr);
    ObjectInspector[] arguments = { valueOI0, valueOI1 };
    udf.initialize(arguments);
    runAndVerifyStr("nYfCuJeRd5eD60yXDw7WEA==", keyWr, "ABC", udf);
    runAndVerifyStr("mVClVqZ6W4VF6b842FOgCA==", keyWr, "", udf);
    // null
    runAndVerifyStr(null, keyWr, null, udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Example 88 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class TestGenericUDFAesDecrypt method testAesDec128ConstStr.

@Test
public void testAesDec128ConstStr() throws HiveException {
    GenericUDFAesDecrypt udf = new GenericUDFAesDecrypt();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
    Text keyWr = new Text("1234567890123456");
    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, keyWr);
    ObjectInspector[] arguments = { valueOI0, valueOI1 };
    udf.initialize(arguments);
    runAndVerifyStr("y6Ss+zCYObpCbgfWfyNWTw==", keyWr, "ABC", udf);
    runAndVerifyStr("BQGHoM3lqYcsurCRq3PlUw==", keyWr, "", udf);
    // null
    runAndVerifyStr(null, keyWr, null, udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) Test(org.junit.Test)

Example 89 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class ShowPartitionsOperation method getPartitionNames.

// Get partition names if order or filter is specified.
private List<String> getPartitionNames(Table tbl) throws HiveException {
    List<String> partNames;
    ExprNodeDesc predicate = desc.getCond();
    if (desc.getPartSpec() != null) {
        List<FieldSchema> fieldSchemas = tbl.getPartitionKeys();
        Map<String, String> colTypes = new HashMap<String, String>();
        for (FieldSchema fs : fieldSchemas) {
            colTypes.put(fs.getName().toLowerCase(), fs.getType());
        }
        for (Map.Entry<String, String> entry : desc.getPartSpec().entrySet()) {
            String type = colTypes.get(entry.getKey().toLowerCase());
            PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
            Object val = entry.getValue();
            if (!pti.equals(TypeInfoFactory.stringTypeInfo)) {
                Object converted = ObjectInspectorConverters.getConverter(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(TypeInfoFactory.stringTypeInfo), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti)).convert(val);
                if (converted == null) {
                    throw new HiveException("Cannot convert to " + type + " from string, value: " + val);
                }
                val = converted;
            }
            List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
            children.add(new ExprNodeColumnDesc(pti, entry.getKey().toLowerCase(), null, true));
            children.add(new ExprNodeConstantDesc(pti, val));
            ExprNodeDesc exprNodeDesc = ExprNodeGenericFuncDesc.newInstance(new GenericUDFOPEqual(), children);
            predicate = (predicate == null) ? exprNodeDesc : ExprNodeDescUtils.mergePredicates(exprNodeDesc, predicate);
        }
    }
    partNames = context.getDb().getPartitionNames(tbl, (ExprNodeGenericFuncDesc) predicate, desc.getOrder(), desc.getLimit());
    return partNames;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) GenericUDFOPEqual(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) HashMap(java.util.HashMap) Map(java.util.Map)

Example 90 with TypeInfoFactory.stringTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.

the class ReplUtils method genPartSpecs.

public static Map<Integer, List<ExprNodeGenericFuncDesc>> genPartSpecs(Table table, List<Map<String, String>> partitions) throws SemanticException {
    Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = new HashMap<>();
    int partPrefixLength = 0;
    if (partitions.size() > 0) {
        partPrefixLength = partitions.get(0).size();
    // pick the length of the first ptn, we expect all ptns listed to have the same number of
    // key-vals.
    }
    List<ExprNodeGenericFuncDesc> partitionDesc = new ArrayList<>();
    for (Map<String, String> ptn : partitions) {
        // convert each key-value-map to appropriate expression.
        ExprNodeGenericFuncDesc expr = null;
        for (Map.Entry<String, String> kvp : ptn.entrySet()) {
            String key = kvp.getKey();
            Object val = kvp.getValue();
            String type = table.getPartColByName(key).getType();
            PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
            ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
            ExprNodeGenericFuncDesc op = PartitionUtils.makeBinaryPredicate("=", column, new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, val));
            expr = (expr == null) ? op : PartitionUtils.makeBinaryPredicate("and", expr, op);
        }
        if (expr != null) {
            partitionDesc.add(expr);
        }
    }
    if (partitionDesc.size() > 0) {
        partSpecs.put(partPrefixLength, partitionDesc);
    }
    return partSpecs;
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) HashMap(java.util.HashMap)

Aggregations

Test (org.junit.Test)65 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)44 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)36 Text (org.apache.hadoop.io.Text)34 ArrayList (java.util.ArrayList)19 HiveConf (org.apache.hadoop.hive.conf.HiveConf)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)17 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)16 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)16 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)14 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)14 SmallTableGenerationParameters (org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters)13 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)13 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)12 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)12 Properties (java.util.Properties)11 BytesColumnVector (org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector)11 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)11 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)11 Configuration (org.apache.hadoop.conf.Configuration)10