use of org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe in project hive by apache.
the class TestCrossMapEqualComparer method testIncompatibleType.
public void testIncompatibleType() throws SerDeException, IOException {
// empty maps
StringTextMapHolder o1 = new StringTextMapHolder();
StructObjectInspector oi1 = (StructObjectInspector) ObjectInspectorFactory.getReflectionObjectInspector(StringTextMapHolder.class, ObjectInspectorOptions.JAVA);
LazySimpleSerDe serde = new LazySimpleSerDe();
Configuration conf = new Configuration();
Properties tbl = new Properties();
tbl.setProperty(serdeConstants.LIST_COLUMNS, ObjectInspectorUtils.getFieldNames(oi1));
tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi1));
LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tbl, LazySimpleSerDe.class.getName());
SerDeUtils.initializeSerDe(serde, conf, tbl, null);
ObjectInspector oi2 = serde.getObjectInspector();
Object o2 = serializeAndDeserialize(o1, oi1, serde, serdeParams);
int rc = ObjectInspectorUtils.compare(o1, oi1, o2, oi2, new CrossMapEqualComparer());
assertEquals(0, rc);
// equal maps
o1.mMap.put("42", new Text("The answer to Life, Universe And Everything"));
o1.mMap.put("1729", new Text("A taxi cab number"));
o2 = serializeAndDeserialize(o1, oi1, serde, serdeParams);
rc = ObjectInspectorUtils.compare(o1, oi1, o2, oi2, new CrossMapEqualComparer());
assertEquals(0, rc);
// unequal maps
o1.mMap.put("1729", new Text("Hardy-Ramanujan Number"));
rc = ObjectInspectorUtils.compare(o1, oi1, o2, oi2, new CrossMapEqualComparer());
assertFalse(0 == rc);
}
use of org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe in project hive by apache.
the class TestHCatRecordSerDe method testRW.
public void testRW() throws Exception {
Configuration conf = new Configuration();
for (Entry<Properties, HCatRecord> e : getData().entrySet()) {
Properties tblProps = e.getKey();
HCatRecord r = e.getValue();
HCatRecordSerDe hrsd = new HCatRecordSerDe();
SerDeUtils.initializeSerDe(hrsd, conf, tblProps, null);
LOG.info("ORIG: {}", r);
Writable s = hrsd.serialize(r, hrsd.getObjectInspector());
LOG.info("ONE: {}", s);
HCatRecord r2 = (HCatRecord) hrsd.deserialize(s);
Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r, r2));
// If it went through correctly, then s is also a HCatRecord,
// and also equal to the above, and a deepcopy, and this holds
// through for multiple levels more of serialization as well.
Writable s2 = hrsd.serialize(s, hrsd.getObjectInspector());
LOG.info("TWO: {}", s2);
Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) s));
Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) s2));
// serialize using another serde, and read out that object repr.
LazySimpleSerDe testSD = new LazySimpleSerDe();
SerDeUtils.initializeSerDe(testSD, conf, tblProps, null);
Writable s3 = testSD.serialize(s, hrsd.getObjectInspector());
LOG.info("THREE: {}", s3);
Object o3 = testSD.deserialize(s3);
Assert.assertFalse(r.getClass().equals(o3.getClass()));
// then serialize again using hrsd, and compare results
HCatRecord s4 = (HCatRecord) hrsd.serialize(o3, testSD.getObjectInspector());
LOG.info("FOUR: {}", s4);
// Test LazyHCatRecord init and read
LazyHCatRecord s5 = new LazyHCatRecord(o3, testSD.getObjectInspector());
LOG.info("FIVE: {}", s5);
LazyHCatRecord s6 = new LazyHCatRecord(s4, hrsd.getObjectInspector());
LOG.info("SIX: {}", s6);
}
}
use of org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe in project hive by apache.
the class TestLazySimpleSerDe method deserializeAndSerialize.
private void deserializeAndSerialize(LazySimpleSerDe serDe, Text t, String s, Object[] expectedFieldsData) throws SerDeException {
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
assertEquals(expectedFieldsData.length, fieldRefs.size());
// Deserialize
Object row = serDe.deserialize(t);
for (int i = 0; i < fieldRefs.size(); i++) {
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
if (fieldData != null) {
fieldData = ((LazyPrimitive) fieldData).getWritableObject();
}
assertEquals("Field " + i, expectedFieldsData[i], fieldData);
}
// Serialize
assertEquals(Text.class, serDe.getSerializedClass());
Text serializedText = (Text) serDe.serialize(row, oi);
assertEquals("Serialized data", s, serializedText.toString());
}
use of org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe in project hive by apache.
the class TestStatsSerde method deserializeAndSerializeLazySimple.
private void deserializeAndSerializeLazySimple(LazySimpleSerDe serDe, Text t) throws SerDeException {
// Get the row structure
StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
// Deserialize
Object row = serDe.deserialize(t);
assertEquals("serialized size correct after deserialization", serDe.getSerDeStats().getRawDataSize(), t.getLength());
// Serialize
Text serializedText = (Text) serDe.serialize(row, oi);
assertEquals("serialized size correct after serialization", serDe.getSerDeStats().getRawDataSize(), serializedText.toString().length());
}
use of org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe in project SQLWindowing by hbutani.
the class Utils method createLazySimpleSerDe.
public static LazySimpleSerDe createLazySimpleSerDe(String columns, String types, String fieldDelim) throws BaseException {
try {
Properties p = new Properties();
p.put(Constants.FIELD_DELIM, fieldDelim);
p.put(Constants.LIST_COLUMNS, columns);
p.put(Constants.LIST_COLUMN_TYPES, types);
LazySimpleSerDe sd = new LazySimpleSerDe();
sd.initialize(null, p);
return sd;
} catch (SerDeException se) {
throw new BaseException(se);
}
}
Aggregations