use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestStandardParquetHiveMapInspector method testNullContainer.
@Test
public void testNullContainer() {
final ArrayWritable map = new ArrayWritable(ArrayWritable.class, null);
assertNull("Should be null", inspector.getMapValueElement(map, new IntWritable(0)));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestStandardParquetHiveMapInspector method testEmptyContainer.
@Test
public void testEmptyContainer() {
final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new ArrayWritable[0]);
assertNull("Should be null", inspector.getMapValueElement(map, new IntWritable(0)));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestAbstractParquetMapInspector method testRegularMap.
@Test
public void testRegularMap() {
final Writable[] entry1 = new Writable[] { new IntWritable(0), new IntWritable(1) };
final Writable[] entry2 = new Writable[] { new IntWritable(2), new IntWritable(3) };
final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new Writable[] { new ArrayWritable(Writable.class, entry1), new ArrayWritable(Writable.class, entry2) });
final Map<Writable, Writable> expected = new HashMap<Writable, Writable>();
expected.put(new IntWritable(0), new IntWritable(1));
expected.put(new IntWritable(2), new IntWritable(3));
assertEquals("Wrong size", 2, inspector.getMapSize(map));
assertEquals("Wrong result of inspection", expected, inspector.getMap(map));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TestDeepParquetHiveMapInspector method testNullContainer.
@Test
public void testNullContainer() {
final ArrayWritable map = new ArrayWritable(ArrayWritable.class, null);
assertNull("Should be null", inspector.getMapValueElement(map, new ShortWritable((short) 0)));
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class ParquetRecordReaderWrapper method next.
@Override
public boolean next(final NullWritable key, final ArrayWritable value) throws IOException {
if (eof) {
return false;
}
try {
if (firstRecord) {
// key & value are already read.
firstRecord = false;
} else if (!realReader.nextKeyValue()) {
// strictly not required, just for consistency
eof = true;
return false;
}
final ArrayWritable tmpCurValue = realReader.getCurrentValue();
if (value != tmpCurValue) {
final Writable[] arrValue = value.get();
final Writable[] arrCurrent = tmpCurValue.get();
if (value != null && arrValue.length == arrCurrent.length) {
System.arraycopy(arrCurrent, 0, arrValue, 0, arrCurrent.length);
} else {
if (arrValue.length != arrCurrent.length) {
throw new IOException("DeprecatedParquetHiveInput : size of object differs. Value" + " size : " + arrValue.length + ", Current Object size : " + arrCurrent.length);
} else {
throw new IOException("DeprecatedParquetHiveInput can not support RecordReaders that" + " don't return same key & value & value is null");
}
}
}
return true;
} catch (final InterruptedException e) {
throw new IOException(e);
}
}
Aggregations