use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class KeyWrapperFactory method getKeyWrapper.
public KeyWrapper getKeyWrapper() {
if (keyFields.length == 1 && TypeInfoUtils.getTypeInfoFromObjectInspector(keyObjectInspectors[0]).equals(TypeInfoFactory.stringTypeInfo)) {
assert (TypeInfoUtils.getTypeInfoFromObjectInspector(currentKeyObjectInspectors[0]).equals(TypeInfoFactory.stringTypeInfo));
soi_new = (StringObjectInspector) keyObjectInspectors[0];
soi_copy = (StringObjectInspector) currentKeyObjectInspectors[0];
return new TextKeyWrapper(false);
} else {
currentStructEqualComparer = new ListObjectsEqualComparer(currentKeyObjectInspectors, currentKeyObjectInspectors);
newKeyStructEqualComparer = new ListObjectsEqualComparer(currentKeyObjectInspectors, keyObjectInspectors);
return new ListKeyWrapper(false);
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class NumericOpMethodResolver method getEvalMethod.
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.hive.ql.exec.UDFMethodResolver#getEvalMethod(java.util
* .List)
*/
@Override
public Method getEvalMethod(List<TypeInfo> argTypeInfos) throws UDFArgumentException {
assert (argTypeInfos.size() == 2);
List<TypeInfo> pTypeInfos = null;
List<TypeInfo> modArgTypeInfos = new ArrayList<TypeInfo>();
// in string form should always be convertible into either of those
if (argTypeInfos.get(0).equals(TypeInfoFactory.stringTypeInfo) || argTypeInfos.get(1).equals(TypeInfoFactory.stringTypeInfo)) {
// complete the operation in that type.
if (argTypeInfos.get(0).equals(TypeInfoFactory.decimalTypeInfo) || argTypeInfos.get(1).equals(TypeInfoFactory.decimalTypeInfo)) {
modArgTypeInfos.add(TypeInfoFactory.decimalTypeInfo);
modArgTypeInfos.add(TypeInfoFactory.decimalTypeInfo);
} else {
modArgTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
modArgTypeInfos.add(TypeInfoFactory.doubleTypeInfo);
}
} else {
// resolve to type T
for (int i = 0; i < 2; i++) {
if (argTypeInfos.get(i).equals(TypeInfoFactory.voidTypeInfo)) {
modArgTypeInfos.add(TypeInfoFactory.byteTypeInfo);
} else {
modArgTypeInfos.add(argTypeInfos.get(i));
}
}
}
TypeInfo commonType = FunctionRegistry.getCommonClass(modArgTypeInfos.get(0), modArgTypeInfos.get(1));
if (commonType == null) {
throw new UDFArgumentException("Unable to find a common class between" + "types " + modArgTypeInfos.get(0).getTypeName() + " and " + modArgTypeInfos.get(1).getTypeName());
}
pTypeInfos = new ArrayList<TypeInfo>();
pTypeInfos.add(commonType);
pTypeInfos.add(commonType);
Method udfMethod = null;
for (Method m : Arrays.asList(udfClass.getMethods())) {
if (m.getName().equals("evaluate")) {
List<TypeInfo> argumentTypeInfos = TypeInfoUtils.getParameterTypeInfos(m, pTypeInfos.size());
if (argumentTypeInfos == null) {
// null means the method does not accept number of arguments passed.
continue;
}
boolean match = (argumentTypeInfos.size() == pTypeInfos.size());
for (int i = 0; i < pTypeInfos.size() && match; i++) {
TypeInfo accepted = argumentTypeInfos.get(i);
if (!accepted.accept(pTypeInfos.get(i))) {
match = false;
}
}
if (match) {
if (udfMethod != null) {
throw new AmbiguousMethodException(udfClass, argTypeInfos, Arrays.asList(new Method[] { udfMethod, m }));
} else {
udfMethod = m;
}
}
}
}
if (udfMethod == null) {
throw new NoMatchingMethodException(udfClass, argTypeInfos, null);
}
return udfMethod;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class MapJoinMultiKeyBenchBase method doSetup.
public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception {
HiveConf hiveConf = new HiveConf();
long seed = 2543;
// 100,000.
int rowCount = 100000;
String[] bigTableColumnNames = new String[] { "b1", "b2", "b3" };
TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.intTypeInfo, TypeInfoFactory.longTypeInfo, TypeInfoFactory.stringTypeInfo };
int[] bigTableKeyColumnNums = new int[] { 0, 1, 2 };
String[] smallTableValueColumnNames = new String[] { "sv1" };
TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
int[] bigTableRetainColumnNums = new int[] { 0, 1, 2 };
int[] smallTableRetainKeyColumnNums = new int[] {};
int[] smallTableRetainValueColumnNums = new int[] { 0 };
SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
smallTableGenerationParameters.setValueOption(ValueOption.ONLY_ONE);
setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestOperators method testScriptOperator.
@Test
public void testScriptOperator() throws Throwable {
try {
System.out.println("Testing Script Operator");
// col1
ExprNodeDesc exprDesc1 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "col1", "", false);
// col2
ExprNodeDesc expr1 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "col0", "", false);
ExprNodeDesc expr2 = new ExprNodeConstantDesc("1");
ExprNodeDesc exprDesc2 = ExprNodeTypeCheck.getExprNodeDefaultExprProcessor().getFuncExprNodeDesc("concat", expr1, expr2);
// select operator to project these two columns
ArrayList<ExprNodeDesc> earr = new ArrayList<ExprNodeDesc>();
earr.add(exprDesc1);
earr.add(exprDesc2);
ArrayList<String> outputCols = new ArrayList<String>();
for (int i = 0; i < earr.size(); i++) {
outputCols.add("_col" + i);
}
SelectDesc selectCtx = new SelectDesc(earr, outputCols);
Operator<SelectDesc> op = OperatorFactory.get(new CompilationOpContext(), SelectDesc.class);
op.setConf(selectCtx);
// scriptOperator to echo the output of the select
TableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
TableDesc scriptInput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
ScriptDesc sd = new ScriptDesc("cat", scriptOutput, TextRecordWriter.class, scriptInput, TextRecordReader.class, TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
// Collect operator to observe the output of the script
CollectDesc cd = new CollectDesc(Integer.valueOf(10));
CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, sop);
op.initialize(new JobConf(TestOperators.class), new ObjectInspector[] { r[0].oi });
// evaluate on row
for (int i = 0; i < 5; i++) {
op.process(r[i].o, 0);
}
op.close(false);
InspectableObject io = new InspectableObject();
for (int i = 0; i < 5; i++) {
cdop.retrieve(io);
System.out.println("[" + i + "] io.o=" + io.o);
System.out.println("[" + i + "] io.oi=" + io.oi);
StructObjectInspector soi = (StructObjectInspector) io.oi;
assert (soi != null);
StructField a = soi.getStructFieldRef("a");
StructField b = soi.getStructFieldRef("b");
assertEquals("" + (i + 1), ((PrimitiveObjectInspector) a.getFieldObjectInspector()).getPrimitiveJavaObject(soi.getStructFieldData(io.o, a)));
assertEquals((i) + "1", ((PrimitiveObjectInspector) b.getFieldObjectInspector()).getPrimitiveJavaObject(soi.getStructFieldData(io.o, b)));
}
System.out.println("Script Operator ok");
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.stringTypeInfo in project hive by apache.
the class TestGenericUDFRegexp method testEmptyConstant.
@Test
public void testEmptyConstant() throws HiveException {
GenericUDFRegExp udf = new GenericUDFRegExp();
ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
Text regexText = new Text("");
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, regexText);
ObjectInspector[] arguments = { valueOI0, valueOI1 };
udf.initialize(arguments);
// empty regex (should be one WARN message)
runAndVerifyConst("foo", regexText, false, udf);
runAndVerifyConst("bar", regexText, false, udf);
// null
runAndVerifyConst(null, regexText, null, udf);
}
Aggregations