use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class TestGenericUDFDecode method verifyDecode.
public void verifyDecode(String string, String charsetName) throws UnsupportedEncodingException, HiveException {
GenericUDFDecode udf = new GenericUDFDecode();
byte[] bytes = string.getBytes(charsetName);
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector;
ObjectInspector charsetOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArguments = { valueOI, charsetOI };
udf.initialize(initArguments);
DeferredObject valueObj = new DeferredJavaObject(bytes);
DeferredObject charsetObj = new DeferredJavaObject(charsetName);
DeferredObject[] arguments = { valueObj, charsetObj };
String output = (String) udf.evaluate(arguments);
assertEquals("Decoding failed for CharSet: " + charsetName, string, output);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class TestGenericUDFEncode method verifyEncode.
public void verifyEncode(String string, String charsetName) throws UnsupportedEncodingException, HiveException {
GenericUDFEncode udf = new GenericUDFEncode();
byte[] expected = string.getBytes(charsetName);
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector charsetOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] initArguments = { valueOI, charsetOI };
udf.initialize(initArguments);
DeferredObject valueObj = new DeferredJavaObject(string);
DeferredObject charsetObj = new DeferredJavaObject(charsetName);
DeferredObject[] arguments = { valueObj, charsetObj };
BytesWritable outputWritable = (BytesWritable) udf.evaluate(arguments);
byte[] output = outputWritable.getBytes();
assertTrue("Encoding failed for CharSet: " + charsetName, expected.length == outputWritable.getLength());
for (int i = 0; i < expected.length; i++) {
assertEquals("Encoding failed for CharSet: " + charsetName, expected[i], output[i]);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class TestGenericUDFDateAdd method testStringToDate.
public void testStringToDate() throws HiveException {
GenericUDFDateAdd udf = new GenericUDFDateAdd();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2 };
udf.initialize(arguments);
DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52"));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2"));
DeferredObject[] args = { valueObj1, valueObj2 };
DateWritable output = (DateWritable) udf.evaluate(args);
assertEquals("date_add() test for STRING failed ", "2009-07-22", output.toString());
// Test with null args
args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
assertNull("date_add() 1st arg null", udf.evaluate(args));
args = new DeferredObject[] { valueObj1, new DeferredJavaObject(null) };
assertNull("date_add() 2nd arg null", udf.evaluate(args));
args = new DeferredObject[] { new DeferredJavaObject(null), new DeferredJavaObject(null) };
assertNull("date_add() both args null", udf.evaluate(args));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class TestGenericUDFDateDiff method testStringToDate.
public void testStringToDate() throws HiveException {
GenericUDFDateDiff udf = new GenericUDFDateDiff();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2 };
udf.initialize(arguments);
DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20"));
DeferredObject valueObj2 = new DeferredJavaObject(new Text("2009-07-22"));
DeferredObject[] args = { valueObj1, valueObj2 };
IntWritable output = (IntWritable) udf.evaluate(args);
assertEquals("date_iff() test for STRING failed ", "-2", output.toString());
// Test with null args
args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
assertNull("date_add() 1st arg null", udf.evaluate(args));
args = new DeferredObject[] { valueObj1, new DeferredJavaObject(null) };
assertNull("date_add() 2nd arg null", udf.evaluate(args));
args = new DeferredObject[] { new DeferredJavaObject(null), new DeferredJavaObject(null) };
assertNull("date_add() both args null", udf.evaluate(args));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector in project hive by apache.
the class MapOperator method initObjectInspector.
private MapOpCtx initObjectInspector(Configuration hconf, MapOpCtx opCtx, StructObjectInspector tableRowOI) throws Exception {
PartitionDesc pd = opCtx.partDesc;
TableDesc td = pd.getTableDesc();
// Use table properties in case of unpartitioned tables,
// and the union of table properties and partition properties, with partition
// taking precedence, in the case of partitioned tables
Properties overlayedProps = SerDeUtils.createOverlayedProperties(td.getProperties(), pd.getProperties());
Map<String, String> partSpec = pd.getPartSpec();
opCtx.tableName = String.valueOf(overlayedProps.getProperty("name"));
opCtx.partName = String.valueOf(partSpec);
opCtx.deserializer = pd.getDeserializer(hconf);
StructObjectInspector partRawRowObjectInspector;
boolean isAcid = AcidUtils.isTablePropertyTransactional(td.getProperties());
if (Utilities.isSchemaEvolutionEnabled(hconf, isAcid) && Utilities.isInputFileFormatSelfDescribing(pd)) {
partRawRowObjectInspector = tableRowOI;
} else {
partRawRowObjectInspector = (StructObjectInspector) opCtx.deserializer.getObjectInspector();
}
opCtx.partTblObjectInspectorConverter = ObjectInspectorConverters.getConverter(partRawRowObjectInspector, tableRowOI);
// Next check if this table has partitions and if so
// get the list of partition names as well as allocate
// the serdes for the partition columns
String pcols = overlayedProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS);
if (pcols != null && pcols.length() > 0) {
String[] partKeys = pcols.trim().split("/");
String pcolTypes = overlayedProps.getProperty(hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES);
String[] partKeyTypes = pcolTypes.trim().split(":");
if (partKeys.length > partKeyTypes.length) {
throw new HiveException("Internal error : partKeys length, " + partKeys.length + " greater than partKeyTypes length, " + partKeyTypes.length);
}
List<String> partNames = new ArrayList<String>(partKeys.length);
Object[] partValues = new Object[partKeys.length];
List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>(partKeys.length);
for (int i = 0; i < partKeys.length; i++) {
String key = partKeys[i];
partNames.add(key);
ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(partKeyTypes[i]));
// Partitions do not exist for this table
if (partSpec == null) {
// for partitionless table, initialize partValue to null
partValues[i] = null;
} else {
partValues[i] = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi).convert(partSpec.get(key));
}
partObjectInspectors.add(oi);
}
opCtx.rowWithPart = new Object[] { null, partValues };
opCtx.partObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partNames, partObjectInspectors);
}
// In that case, it will be a Select, but the rowOI need not be amended
if (opCtx.op instanceof TableScanOperator) {
TableScanOperator tsOp = (TableScanOperator) opCtx.op;
TableScanDesc tsDesc = tsOp.getConf();
if (tsDesc != null && tsDesc.hasVirtualCols()) {
opCtx.vcs = tsDesc.getVirtualCols();
opCtx.vcValues = new Object[opCtx.vcs.size()];
opCtx.vcsObjectInspector = VirtualColumn.getVCSObjectInspector(opCtx.vcs);
if (opCtx.isPartitioned()) {
opCtx.rowWithPartAndVC = Arrays.copyOfRange(opCtx.rowWithPart, 0, 3);
} else {
opCtx.rowWithPartAndVC = new Object[2];
}
}
}
if (!opCtx.hasVC() && !opCtx.isPartitioned()) {
opCtx.rowObjectInspector = tableRowOI;
return opCtx;
}
List<StructObjectInspector> inspectors = new ArrayList<StructObjectInspector>();
inspectors.add(tableRowOI);
if (opCtx.isPartitioned()) {
inspectors.add(opCtx.partObjectInspector);
}
if (opCtx.hasVC()) {
inspectors.add(opCtx.vcsObjectInspector);
}
opCtx.rowObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(inspectors);
return opCtx;
}
Aggregations