use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFDateFormat method testWrongDateStr.
@Test
public void testWrongDateStr() throws HiveException {
GenericUDFDateFormat udf = new GenericUDFDateFormat();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
Text fmtText = new Text("EEEE");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyStr("2016-02-30 10:30:45", fmtText, null, udf);
runAndVerifyStr("2016-02-30 10:30:45", fmtText, null, udf);
runAndVerifyStr("2014-01-32", fmtText, null, udf);
runAndVerifyStr("01/14/2014", fmtText, null, udf);
runAndVerifyStr(null, fmtText, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFAesDecrypt method testAesDec256ConstStr.
@Test
public void testAesDec256ConstStr() throws HiveException, NoSuchAlgorithmException {
int maxKeyLen = Cipher.getMaxAllowedKeyLength("AES");
// Jurisdiction Policy Files not installed
if (maxKeyLen < 256) {
return;
}
GenericUDFAesDecrypt udf = new GenericUDFAesDecrypt();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
Text keyWr = new Text("1234567890123456" + "1234567890123456");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, keyWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyStr("nYfCuJeRd5eD60yXDw7WEA==", keyWr, "ABC", udf);
runAndVerifyStr("mVClVqZ6W4VF6b842FOgCA==", keyWr, "", udf);
// null
runAndVerifyStr(null, keyWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFAesDecrypt method testAesDec128ConstStr.
@Test
public void testAesDec128ConstStr() throws HiveException {
GenericUDFAesDecrypt udf = new GenericUDFAesDecrypt();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
Text keyWr = new Text("1234567890123456");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, keyWr);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
runAndVerifyStr("y6Ss+zCYObpCbgfWfyNWTw==", keyWr, "ABC", udf);
runAndVerifyStr("BQGHoM3lqYcsurCRq3PlUw==", keyWr, "", udf);
// null
runAndVerifyStr(null, keyWr, null, udf);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class ShowPartitionsOperation method getPartitionNames.
// Get partition names if order or filter is specified.
private List<String> getPartitionNames(Table tbl) throws HiveException {
List<String> partNames;
ExprNodeDesc predicate = desc.getCond();
if (desc.getPartSpec() != null) {
List<FieldSchema> fieldSchemas = tbl.getPartitionKeys();
Map<String, String> colTypes = new HashMap<String, String>();
for (FieldSchema fs : fieldSchemas) {
colTypes.put(fs.getName().toLowerCase(), fs.getType());
}
for (Map.Entry<String, String> entry : desc.getPartSpec().entrySet()) {
String type = colTypes.get(entry.getKey().toLowerCase());
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
Object val = entry.getValue();
if (!pti.equals(TypeInfoFactory.stringTypeInfo)) {
Object converted = ObjectInspectorConverters.getConverter(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(TypeInfoFactory.stringTypeInfo), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti)).convert(val);
if (converted == null) {
throw new HiveException("Cannot convert to " + type + " from string, value: " + val);
}
val = converted;
}
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(new ExprNodeColumnDesc(pti, entry.getKey().toLowerCase(), null, true));
children.add(new ExprNodeConstantDesc(pti, val));
ExprNodeDesc exprNodeDesc = ExprNodeGenericFuncDesc.newInstance(new GenericUDFOPEqual(), children);
predicate = (predicate == null) ? exprNodeDesc : ExprNodeDescUtils.mergePredicates(exprNodeDesc, predicate);
}
}
partNames = context.getDb().getPartitionNames(tbl, (ExprNodeGenericFuncDesc) predicate, desc.getOrder(), desc.getLimit());
return partNames;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class ReplUtils method genPartSpecs.
public static Map<Integer, List<ExprNodeGenericFuncDesc>> genPartSpecs(Table table, List<Map<String, String>> partitions) throws SemanticException {
Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = new HashMap<>();
int partPrefixLength = 0;
if (partitions.size() > 0) {
partPrefixLength = partitions.get(0).size();
// pick the length of the first ptn, we expect all ptns listed to have the same number of
// key-vals.
}
List<ExprNodeGenericFuncDesc> partitionDesc = new ArrayList<>();
for (Map<String, String> ptn : partitions) {
// convert each key-value-map to appropriate expression.
ExprNodeGenericFuncDesc expr = null;
for (Map.Entry<String, String> kvp : ptn.entrySet()) {
String key = kvp.getKey();
Object val = kvp.getValue();
String type = table.getPartColByName(key).getType();
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
ExprNodeGenericFuncDesc op = PartitionUtils.makeBinaryPredicate("=", column, new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, val));
expr = (expr == null) ? op : PartitionUtils.makeBinaryPredicate("and", expr, op);
}
if (expr != null) {
partitionDesc.add(expr);
}
}
if (partitionDesc.size() > 0) {
partSpecs.put(partPrefixLength, partitionDesc);
}
return partSpecs;
}
Aggregations