use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.
the class GenericUDFFormatNumber method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("The function FORMAT_NUMBER(X, D or F) needs two arguments.");
}
switch(arguments[0].getCategory()) {
case PRIMITIVE:
break;
default:
throw new UDFArgumentTypeException(0, "Argument 1" + " of function FORMAT_NUMBER must be \"" + serdeConstants.TINYINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.INT_TYPE_NAME + "\"" + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\"" + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DECIMAL_TYPE_NAME + "\", but \"" + arguments[0].getTypeName() + "\" was found.");
}
switch(arguments[1].getCategory()) {
case PRIMITIVE:
break;
default:
throw new UDFArgumentTypeException(1, "Argument 2" + " of function FORMAT_NUMBER must be \"" + serdeConstants.TINYINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.INT_TYPE_NAME + "\"" + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.STRING_TYPE_NAME + "\", but \"" + arguments[1].getTypeName() + "\" was found.");
}
PrimitiveObjectInspector xObjectInspector = (PrimitiveObjectInspector) arguments[0];
PrimitiveObjectInspector dObjectInspector = (PrimitiveObjectInspector) arguments[1];
switch(xObjectInspector.getPrimitiveCategory()) {
case VOID:
case BYTE:
case SHORT:
case INT:
case LONG:
case DOUBLE:
case FLOAT:
case DECIMAL:
break;
default:
throw new UDFArgumentTypeException(0, "Argument 1" + " of function FORMAT_NUMBER must be \"" + serdeConstants.TINYINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.INT_TYPE_NAME + "\"" + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\"" + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\"" + " or \"" + serdeConstants.DECIMAL_TYPE_NAME + "\", but \"" + arguments[0].getTypeName() + "\" was found.");
}
dType = dObjectInspector.getPrimitiveCategory();
switch(dType) {
case VOID:
case BYTE:
case SHORT:
case INT:
case LONG:
break;
case STRING:
if (!(arguments[1] instanceof ConstantObjectInspector)) {
throw new UDFArgumentTypeException(1, "Format string passed must be a constant STRING." + arguments[1].toString());
}
ConstantObjectInspector constantOI = (ConstantObjectInspector) arguments[1];
String fValue = constantOI.getWritableConstantValue().toString();
DecimalFormat dFormat = new DecimalFormat(fValue);
numberFormat.applyPattern(dFormat.toPattern());
break;
default:
throw new UDFArgumentTypeException(1, "Argument 2" + " of function FORMAT_NUMBER must be \"" + serdeConstants.TINYINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.SMALLINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.INT_TYPE_NAME + "\"" + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\"" + " or \"" + serdeConstants.STRING_TYPE_NAME + "\", but \"" + arguments[1].getTypeName() + "\" was found.");
}
argumentOIs = arguments;
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.
the class GenericUDFCharacterLength method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("CHARACTER_LENGTH requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentException("CHARACTER_LENGTH only takes primitive types, got " + argumentOI.getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
stringConverter = new PrimitiveObjectInspectorConverter.StringConverter(argumentOI);
PrimitiveObjectInspector.PrimitiveCategory inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = null;
switch(inputType) {
case CHAR:
case VARCHAR:
case STRING:
isInputString = true;
break;
case BINARY:
isInputString = false;
break;
default:
throw new UDFArgumentException(" CHARACTER_LENGTH() only takes STRING/CHAR/VARCHAR/BINARY types as first argument, got " + inputType);
}
outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.
the class TestVectorTimestampExtract method doRowCastTest.
private boolean doRowCastTest(TypeInfo dateTimeStringTypeInfo, List<String> columns, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, Object[][] randomRows, ObjectInspector rowInspector, Object[] resultObjects) throws Exception {
/*
System.out.println(
"*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
" timestampExtractTestMode ROW_MODE" +
" exprDesc " + exprDesc.toString());
*/
HiveConf hiveConf = new HiveConf();
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
try {
evaluator.initialize(rowInspector);
} catch (HiveException e) {
return false;
}
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(TypeInfoFactory.intTypeInfo);
PrimitiveCategory dateTimeStringPrimitiveCategory = ((PrimitiveTypeInfo) dateTimeStringTypeInfo).getPrimitiveCategory();
final int rowCount = randomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = randomRows[i];
Object object = row[0];
Object result;
switch(dateTimeStringPrimitiveCategory) {
case TIMESTAMP:
result = evaluator.evaluate((TimestampWritableV2) object);
break;
case DATE:
result = evaluator.evaluate((DateWritableV2) object);
break;
case STRING:
{
Text text;
if (object == null) {
text = null;
} else if (object instanceof String) {
text = new Text();
text.set((String) object);
} else {
text = (Text) object;
}
result = evaluator.evaluate(text);
}
break;
default:
throw new RuntimeException("Unexpected date timestamp string primitive category " + dateTimeStringPrimitiveCategory);
}
Object copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
resultObjects[i] = copyResult;
}
return true;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.
the class TestParquetSerDe method testParquetHiveSerDe.
@Test
public void testParquetHiveSerDe() throws Throwable {
try {
// Create the SerDe
System.out.println("test: testParquetHiveSerDe");
final ParquetHiveSerDe serDe = new ParquetHiveSerDe();
final Configuration conf = new Configuration();
final Properties tbl = createProperties();
serDe.initialize(conf, tbl, null);
// Data
final Writable[] arr = new Writable[9];
// primitive types
arr[0] = new ByteWritable((byte) 123);
arr[1] = new ShortWritable((short) 456);
arr[2] = new IntWritable(789);
arr[3] = new LongWritable(1000l);
arr[4] = new DoubleWritable((double) 5.3);
arr[5] = new BytesWritable("hive and hadoop and parquet. Big family.".getBytes("UTF-8"));
arr[6] = new BytesWritable("parquetSerde binary".getBytes("UTF-8"));
final Writable[] map = new Writable[3];
for (int i = 0; i < 3; ++i) {
final Writable[] pair = new Writable[2];
pair[0] = new BytesWritable(("key_" + i).getBytes("UTF-8"));
pair[1] = new IntWritable(i);
map[i] = new ArrayWritable(Writable.class, pair);
}
arr[7] = new ArrayWritable(Writable.class, map);
final Writable[] array = new Writable[5];
for (int i = 0; i < 5; ++i) {
array[i] = new BytesWritable(("elem_" + i).getBytes("UTF-8"));
}
arr[8] = new ArrayWritable(Writable.class, array);
final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
// Test
deserializeAndSerializeLazySimple(serDe, arrWritable);
System.out.println("test: testParquetHiveSerDe - OK");
} catch (final Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE in project hive by apache.
the class ExprNodeDescExprFactory method toExpr.
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc toExpr(ColumnInfo colInfo, RowResolver rowResolver, int offset) throws SemanticException {
ObjectInspector inspector = colInfo.getObjectInspector();
if (inspector instanceof ConstantObjectInspector && inspector instanceof PrimitiveObjectInspector) {
return toPrimitiveConstDesc(colInfo, inspector);
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof ListObjectInspector) {
ObjectInspector listElementOI = ((ListObjectInspector) inspector).getListElementObjectInspector();
if (listElementOI instanceof PrimitiveObjectInspector) {
return toListConstDesc(colInfo, inspector, listElementOI);
}
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof MapObjectInspector) {
ObjectInspector keyOI = ((MapObjectInspector) inspector).getMapKeyObjectInspector();
ObjectInspector valueOI = ((MapObjectInspector) inspector).getMapValueObjectInspector();
if (keyOI instanceof PrimitiveObjectInspector && valueOI instanceof PrimitiveObjectInspector) {
return toMapConstDesc(colInfo, inspector, keyOI, valueOI);
}
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof StructObjectInspector) {
boolean allPrimitive = true;
List<? extends StructField> fields = ((StructObjectInspector) inspector).getAllStructFieldRefs();
for (StructField field : fields) {
allPrimitive &= field.getFieldObjectInspector() instanceof PrimitiveObjectInspector;
}
if (allPrimitive) {
return toStructConstDesc(colInfo, inspector, fields);
}
}
// non-constant or non-primitive constants
ExprNodeColumnDesc column = new ExprNodeColumnDesc(colInfo);
column.setSkewedCol(colInfo.isSkewedCol());
return column;
}
Aggregations