use of org.apache.hadoop.hive.serde2.io.ParquetHiveRecord in project hive by apache.
the class TestParquetSerDe method deserializeAndSerializeLazySimple.
private void deserializeAndSerializeLazySimple(final ParquetHiveSerDe serDe, final ArrayWritable t) throws SerDeException {
// Get the row structure
final StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
// Deserialize
final Object row = serDe.deserialize(t);
assertEquals("deserialization gives the wrong object class", row.getClass(), ArrayWritable.class);
assertEquals("size correct after deserialization", serDe.getSerDeStats().getRawDataSize(), t.get().length);
assertEquals("deserialization gives the wrong object", t, row);
// Serialize
final ParquetHiveRecord serializedArr = (ParquetHiveRecord) serDe.serialize(row, oi);
assertEquals("size correct after serialization", serDe.getSerDeStats().getRawDataSize(), ((ArrayWritable) serializedArr.getObject()).get().length);
assertTrue("serialized object should be equal to starting object", arrayWritableEquals(t, (ArrayWritable) serializedArr.getObject()));
}
use of org.apache.hadoop.hive.serde2.io.ParquetHiveRecord in project hive by apache.
the class TestDataWritableWriter method getParquetWritable.
private ParquetHiveRecord getParquetWritable(String columnNames, String columnTypes, ArrayWritable record) throws SerDeException {
Properties recordProperties = new Properties();
recordProperties.setProperty("columns", columnNames);
recordProperties.setProperty("columns.types", columnTypes);
ParquetHiveSerDe serDe = new ParquetHiveSerDe();
SerDeUtils.initializeSerDe(serDe, new Configuration(), recordProperties, null);
return new ParquetHiveRecord(serDe.deserialize(record), getObjectInspector(columnNames, columnTypes));
}
use of org.apache.hadoop.hive.serde2.io.ParquetHiveRecord in project hive by apache.
the class TestMapredParquetOutputFormat method testGetHiveRecordWriter.
@SuppressWarnings("unchecked")
@Test
public void testGetHiveRecordWriter() throws IOException {
Properties tableProps = new Properties();
tableProps.setProperty("columns", "foo,bar");
tableProps.setProperty("columns.types", "int:int");
final Progressable mockProgress = mock(Progressable.class);
final ParquetOutputFormat<ParquetHiveRecord> outputFormat = (ParquetOutputFormat<ParquetHiveRecord>) mock(ParquetOutputFormat.class);
JobConf jobConf = new JobConf();
try {
new MapredParquetOutputFormat(outputFormat) {
@Override
protected ParquetRecordWriterWrapper getParquerRecordWriterWrapper(ParquetOutputFormat<ParquetHiveRecord> realOutputFormat, JobConf jobConf, String finalOutPath, Progressable progress, Properties tableProperties) throws IOException {
assertEquals(outputFormat, realOutputFormat);
assertNotNull(jobConf.get(DataWritableWriteSupport.PARQUET_HIVE_SCHEMA));
assertEquals("/foo", finalOutPath.toString());
assertEquals(mockProgress, progress);
throw new RuntimeException("passed tests");
}
}.getHiveRecordWriter(jobConf, new Path("/foo"), null, false, tableProps, mockProgress);
fail("should throw runtime exception.");
} catch (RuntimeException e) {
assertEquals("passed tests", e.getMessage());
}
}
Aggregations