use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector in project hive by apache.
the class GenericUDFToUnixTimeStamp method initializeInput.
protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException("The function " + getName().toUpperCase() + "requires at least one argument");
}
for (ObjectInspector argument : arguments) {
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException(getName().toUpperCase() + " only takes string/date/timestamp types, got " + argument.getTypeName());
}
}
PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector) arguments[0];
switch(arg1OI.getPrimitiveCategory()) {
case CHAR:
case VARCHAR:
case STRING:
inputTextConverter = ObjectInspectorConverters.getConverter(arg1OI, PrimitiveObjectInspectorFactory.javaStringObjectInspector);
if (arguments.length > 1) {
PrimitiveObjectInspector arg2OI = (PrimitiveObjectInspector) arguments[1];
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(arg2OI.getPrimitiveCategory()) != PrimitiveGrouping.STRING_GROUP) {
throw new UDFArgumentException("The time pattern for " + getName().toUpperCase() + " should be string type");
}
patternConverter = ObjectInspectorConverters.getConverter(arg2OI, PrimitiveObjectInspectorFactory.javaStringObjectInspector);
}
break;
case DATE:
inputDateOI = (DateObjectInspector) arguments[0];
break;
case TIMESTAMP:
inputTimestampOI = (TimestampObjectInspector) arguments[0];
break;
default:
throw new UDFArgumentException("The function " + getName().toUpperCase() + " takes only string/date/timestamp types");
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector in project hive by apache.
the class GenericUDFToVarchar method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentException("VARCHAR cast requires a value argument");
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
} catch (ClassCastException e) {
throw new UDFArgumentException("The function VARCHAR takes only primitive types");
}
// Check if this UDF has been provided with type params for the output varchar type
SettableHiveVarcharObjectInspector outputOI;
outputOI = (SettableHiveVarcharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
converter = new HiveVarcharConverter(argumentOI, outputOI);
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector in project hive by apache.
the class MatchPath method createSelectListOI.
protected static StructObjectInspector createSelectListOI(MatchPath evaluator, PTFInputDef inpDef) {
StructObjectInspector inOI = inpDef.getOutputShape().getOI();
ArrayList<String> inputColumnNames = new ArrayList<String>();
ArrayList<String> selectListNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
for (StructField f : inOI.getAllStructFieldRefs()) {
String inputColName = evaluator.inputColumnNamesMap.get(f.getFieldName());
if (inputColName != null) {
inputColumnNames.add(inputColName);
selectListNames.add(f.getFieldName());
fieldOIs.add(f.getFieldObjectInspector());
}
}
StandardListObjectInspector pathAttrOI = ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(inputColumnNames, fieldOIs));
ArrayList<ObjectInspector> selectFieldOIs = new ArrayList<ObjectInspector>();
selectFieldOIs.addAll(fieldOIs);
selectFieldOIs.add(pathAttrOI);
selectListNames.add(MatchPath.PATHATTR_NAME);
return ObjectInspectorFactory.getStandardStructObjectInspector(selectListNames, selectFieldOIs);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector in project hive by apache.
the class VectorRandomRowSource method getWritableObject.
public static Object getWritableObject(int column, Object object, List<ObjectInspector> primitiveObjectInspectorList, PrimitiveCategory[] primitiveCategories, PrimitiveTypeInfo[] primitiveTypeInfos) {
ObjectInspector objectInspector = primitiveObjectInspectorList.get(column);
PrimitiveCategory primitiveCategory = primitiveCategories[column];
PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
switch(primitiveCategory) {
case BOOLEAN:
return ((WritableBooleanObjectInspector) objectInspector).create((boolean) object);
case BYTE:
return ((WritableByteObjectInspector) objectInspector).create((byte) object);
case SHORT:
return ((WritableShortObjectInspector) objectInspector).create((short) object);
case INT:
return ((WritableIntObjectInspector) objectInspector).create((int) object);
case LONG:
return ((WritableLongObjectInspector) objectInspector).create((long) object);
case DATE:
return ((WritableDateObjectInspector) objectInspector).create((Date) object);
case FLOAT:
return ((WritableFloatObjectInspector) objectInspector).create((float) object);
case DOUBLE:
return ((WritableDoubleObjectInspector) objectInspector).create((double) object);
case STRING:
return ((WritableStringObjectInspector) objectInspector).create((String) object);
case CHAR:
{
WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector((CharTypeInfo) primitiveTypeInfo);
return writableCharObjectInspector.create((HiveChar) object);
}
case VARCHAR:
{
WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector((VarcharTypeInfo) primitiveTypeInfo);
return writableVarcharObjectInspector.create((HiveVarchar) object);
}
case BINARY:
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
case TIMESTAMP:
return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
case INTERVAL_YEAR_MONTH:
return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
case INTERVAL_DAY_TIME:
return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
case DECIMAL:
{
WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
HiveDecimalWritable result = (HiveDecimalWritable) writableDecimalObjectInspector.create((HiveDecimal) object);
return result;
}
default:
throw new Error("Unknown primitive category " + primitiveCategory);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector in project hive by apache.
the class OrcFileGenerator method generateOrcFile.
/**
* Generates an orc file based on the provided record class in the specified file system
* at the output path.
*
* @param conf the configuration used to initialize the orc writer
* @param fs the file system to which will contain the generated orc file
* @param outputPath the path where the generated orc will be placed
* @param recordClass a class the defines the record format for the generated orc file, this
* class must have exactly one constructor.
*/
public static void generateOrcFile(Configuration conf, FileSystem fs, Path outputPath, Class recordClass) throws IOException, InstantiationException, IllegalAccessException, InvocationTargetException {
ObjectInspector inspector;
synchronized (TestVectorizedORCReader.class) {
inspector = ObjectInspectorFactory.getReflectionObjectInspector(recordClass, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer = OrcFile.createWriter(fs, outputPath, conf, inspector, 100000, CompressionKind.ZLIB, 10000, 10000);
try {
Constructor[] constructors = recordClass.getConstructors();
if (constructors.length != 1) {
throw new UnsupportedOperationException("The provided recordClass must have exactly one constructor.");
}
BatchDataDistribution[] dataDist = BatchDataDistribution.values();
Class[] columns = constructors[0].getParameterTypes();
for (int i = 0; i < dataDist.length * 3; i++) {
Object[][] rows = new Object[columns.length][VectorizedRowBatch.DEFAULT_SIZE];
for (int c = 0; c < columns.length; c++) {
if (!TYPE_TO_BATCH_GEN_MAP.containsKey(columns[c])) {
throw new UnsupportedOperationException("No batch generator defined for type " + columns[c].getName());
}
rows[c] = TYPE_TO_BATCH_GEN_MAP.get(columns[c]).generateBatch(dataDist[(i + c) % dataDist.length]);
}
for (int r = 0; r < VectorizedRowBatch.DEFAULT_SIZE; r++) {
Object[] row = new Object[columns.length];
for (int c = 0; c < columns.length; c++) {
row[c] = rows[c][r];
}
writer.addRow(constructors[0].newInstance(row));
}
}
} finally {
writer.close();
}
}
Aggregations