use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getReflectionObjectInspector in project hive by apache.
the class TestReflectionObjectInspectors method testObjectInspectorThreadSafety.
public void testObjectInspectorThreadSafety() throws InterruptedException {
// 5 workers to run getReflectionObjectInspector concurrently
final int workerCount = 5;
final ScheduledExecutorService executorService = Executors.newScheduledThreadPool(workerCount);
final MutableObject exception = new MutableObject();
Thread runner = new Thread(new Runnable() {
@Override
@SuppressWarnings("unchecked")
public void run() {
Future<ObjectInspector>[] results = (Future<ObjectInspector>[]) new Future[workerCount];
ObjectPair<Type, ObjectInspectorFactory.ObjectInspectorOptions>[] types = (ObjectPair<Type, ObjectInspectorFactory.ObjectInspectorOptions>[]) new ObjectPair[] { new ObjectPair<Type, ObjectInspectorFactory.ObjectInspectorOptions>(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT), new ObjectPair<Type, ObjectInspectorFactory.ObjectInspectorOptions>(MyStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA) };
try {
for (int i = 0; i < 20; i++) {
// repeat 20 times
for (final ObjectPair<Type, ObjectInspectorFactory.ObjectInspectorOptions> t : types) {
ObjectInspectorFactory.objectInspectorCache.clear();
for (int k = 0; k < workerCount; k++) {
results[k] = executorService.schedule(new Callable<ObjectInspector>() {
@Override
public ObjectInspector call() throws Exception {
return ObjectInspectorFactory.getReflectionObjectInspector(t.getFirst(), t.getSecond());
}
}, 50, TimeUnit.MILLISECONDS);
}
ObjectInspector oi = results[0].get();
for (int k = 1; k < workerCount; k++) {
assertEquals(oi, results[k].get());
}
}
}
} catch (Throwable e) {
exception.setValue(e);
}
}
});
try {
runner.start();
// timeout in 5 minutes
long endTime = System.currentTimeMillis() + 300000;
while (runner.isAlive()) {
if (System.currentTimeMillis() > endTime) {
// Interrupt the runner thread
runner.interrupt();
fail("Timed out waiting for the runner to finish");
}
runner.join(10000);
}
if (exception.getValue() != null) {
fail("Got exception: " + exception.getValue());
}
} finally {
executorService.shutdownNow();
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getReflectionObjectInspector in project hive by apache.
the class TestSimpleMapEqualComparer method testCompatibleType.
public void testCompatibleType() throws SerDeException, IOException {
// empty maps
TextStringMapHolder o1 = new TextStringMapHolder();
StructObjectInspector oi1 = (StructObjectInspector) ObjectInspectorFactory.getReflectionObjectInspector(TextStringMapHolder.class, ObjectInspectorOptions.JAVA);
LazySimpleSerDe serde = new LazySimpleSerDe();
Configuration conf = new Configuration();
Properties tbl = new Properties();
tbl.setProperty(serdeConstants.LIST_COLUMNS, ObjectInspectorUtils.getFieldNames(oi1));
tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi1));
LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tbl, LazySimpleSerDe.class.getName());
SerDeUtils.initializeSerDe(serde, conf, tbl, null);
ObjectInspector oi2 = serde.getObjectInspector();
Object o2 = serializeAndDeserialize(o1, oi1, serde, serdeParams);
int rc = ObjectInspectorUtils.compare(o1, oi1, o2, oi2, new SimpleMapEqualComparer());
assertEquals(0, rc);
// equal maps
o1.mMap.put(new Text("42"), "The answer to Life, Universe And Everything");
o1.mMap.put(new Text("1729"), "A taxi cab number");
o2 = serializeAndDeserialize(o1, oi1, serde, serdeParams);
rc = ObjectInspectorUtils.compare(o1, oi1, o2, oi2, new SimpleMapEqualComparer());
assertEquals(0, rc);
// unequal maps
o1.mMap.put(new Text("1729"), "Hardy-Ramanujan Number");
rc = ObjectInspectorUtils.compare(o1, oi1, o2, oi2, new SimpleMapEqualComparer());
assertFalse(0 == rc);
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getReflectionObjectInspector in project hive by apache.
the class TestThriftObjectInspectors method testThriftObjectInspectors.
public void testThriftObjectInspectors() throws Throwable {
try {
ObjectInspector oi1 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
ObjectInspector oi2 = ObjectInspectorFactory.getReflectionObjectInspector(Complex.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
assertEquals(oi1, oi2);
// metadata
assertEquals(Category.STRUCT, oi1.getCategory());
StructObjectInspector soi = (StructObjectInspector) oi1;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
assertEquals(10, fields.size());
assertEquals(fields.get(0), soi.getStructFieldRef("aint"));
// null
for (int i = 0; i < fields.size(); i++) {
assertNull(soi.getStructFieldData(null, fields.get(i)));
}
ObjectInspector oi = ObjectInspectorFactory.getReflectionObjectInspector(PropValueUnion.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT);
assertNotNull(oi.toString());
// real object
Complex c = new Complex();
c.setAint(1);
c.setAString("test");
List<Integer> c2 = Arrays.asList(new Integer[] { 1, 2, 3 });
c.setLint(c2);
List<String> c3 = Arrays.asList(new String[] { "one", "two" });
c.setLString(c3);
List<IntString> c4 = new ArrayList<IntString>();
c.setLintString(c4);
c.setMStringString(null);
c.setAttributes(null);
c.setUnionField1(null);
c.setUnionField2(null);
c.setUnionField3(null);
assertEquals(1, soi.getStructFieldData(c, fields.get(0)));
assertEquals("test", soi.getStructFieldData(c, fields.get(1)));
assertEquals(c2, soi.getStructFieldData(c, fields.get(2)));
assertEquals(c3, soi.getStructFieldData(c, fields.get(3)));
assertEquals(c4, soi.getStructFieldData(c, fields.get(4)));
assertNull(soi.getStructFieldData(c, fields.get(5)));
assertNull(soi.getStructFieldData(c, fields.get(6)));
assertNull(soi.getStructFieldData(c, fields.get(7)));
assertNull(soi.getStructFieldData(c, fields.get(8)));
assertNull(soi.getStructFieldData(c, fields.get(9)));
ArrayList<Object> cfields = new ArrayList<Object>();
for (int i = 0; i < 10; i++) {
cfields.add(soi.getStructFieldData(c, fields.get(i)));
}
assertEquals(cfields, soi.getStructFieldsDataAsList(c));
// sub fields
assertEquals(PrimitiveObjectInspectorFactory.javaIntObjectInspector, fields.get(0).getFieldObjectInspector());
assertEquals(PrimitiveObjectInspectorFactory.javaStringObjectInspector, fields.get(1).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector), fields.get(2).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector), fields.get(3).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getReflectionObjectInspector(IntString.class, ObjectInspectorFactory.ObjectInspectorOptions.THRIFT)), fields.get(4).getFieldObjectInspector());
assertEquals(ObjectInspectorFactory.getStandardMapObjectInspector(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.javaStringObjectInspector), fields.get(5).getFieldObjectInspector());
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getReflectionObjectInspector in project hive by apache.
the class TestOrcSerDeStats method testOrcSerDeStatsComplexOldFormat.
@Test
public void testOrcSerDeStatsComplexOldFormat() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector = ObjectInspectorFactory.getReflectionObjectInspector(BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(100000).version(OrcFile.Version.V_0_11).bufferSize(10000));
// 1 + 2 + 4 + 8 + 4 + 8 + 5 + 2 + 4 + 3 + 4 + 4 + 4 + 4 + 4 + 3 = 64
writer.addRow(new BigRow(false, (byte) 1, (short) 1024, 65536, Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0, 1, 2, 3, 4), "hi", new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(inner(3, "good"), inner(4, "bad")), map(), Timestamp.valueOf("2000-03-12 15:00:00"), HiveDecimal.create("12345678.6547456")));
// 1 + 2 + 4 + 8 + 4 + 8 + 3 + 4 + 3 + 4 + 4 + 4 + 3 + 4 + 2 + 4 + 3 + 5 + 4 + 5 + 7 + 4 + 7 =
// 97
writer.addRow(new BigRow(true, (byte) 100, (short) 2048, 65536, Long.MAX_VALUE, (float) 2.0, -5.0, bytes(), "bye", new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(inner(100000000, "cat"), inner(-100000, "in"), inner(1234, "hat")), map(inner(5, "chani"), inner(1, "mauddib")), Timestamp.valueOf("2000-03-11 15:00:00"), HiveDecimal.create("12345678.6547452")));
writer.close();
long rowCount = writer.getNumberOfRows();
long rawDataSize = writer.getRawDataSize();
assertEquals(2, rowCount);
assertEquals(1740, rawDataSize);
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
assertEquals(2, reader.getNumberOfRows());
assertEquals(1740, reader.getRawDataSize());
assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1")));
assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1")));
assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1")));
assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("int1")));
assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("long1")));
assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("float1")));
assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("double1")));
assertEquals(5, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1")));
assertEquals(172, reader.getRawDataSizeOfColumns(Lists.newArrayList("string1")));
assertEquals(455, reader.getRawDataSizeOfColumns(Lists.newArrayList("list")));
assertEquals(368, reader.getRawDataSizeOfColumns(Lists.newArrayList("map")));
assertEquals(364, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle")));
assertEquals(80, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts")));
assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1")));
assertEquals(88, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1")));
assertEquals(1195, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1")));
assertEquals(185, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1", "byte1", "string1")));
assertEquals(rawDataSize, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1", "byte1", "short1", "int1", "long1", "float1", "double1", "bytes1", "string1", "list", "map", "middle", "ts", "decimal1")));
// check the stats
ColumnStatistics[] stats = reader.getStatistics();
assertEquals(2, stats[1].getNumberOfValues());
assertEquals(1, ((BooleanColumnStatistics) stats[1]).getFalseCount());
assertEquals(1, ((BooleanColumnStatistics) stats[1]).getTrueCount());
assertEquals("count: 2 hasNull: false true: 1", stats[1].toString());
assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum());
assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum());
assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined());
assertEquals(3072, ((IntegerColumnStatistics) stats[3]).getSum());
assertEquals("count: 2 hasNull: false min: 1024 max: 2048 sum: 3072", stats[3].toString());
assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMaximum());
assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMinimum());
assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined());
assertEquals("count: 2 hasNull: false min: 9223372036854775807 max: 9223372036854775807", stats[5].toString());
assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum());
assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum());
assertEquals(-20.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001);
assertEquals("count: 2 hasNull: false min: -15.0 max: -5.0 sum: -20.0", stats[7].toString());
assertEquals(5, ((BinaryColumnStatistics) stats[8]).getSum());
assertEquals("count: 2 hasNull: false sum: 5", stats[8].toString());
assertEquals("bye", ((StringColumnStatistics) stats[9]).getMinimum());
assertEquals("hi", ((StringColumnStatistics) stats[9]).getMaximum());
assertEquals(5, ((StringColumnStatistics) stats[9]).getSum());
assertEquals("count: 2 hasNull: false min: bye max: hi sum: 5", stats[9].toString());
}
use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getReflectionObjectInspector in project hive by apache.
the class TestOrcSerDeStats method testOrcSerDeStatsSimpleWithNulls.
@Test
public void testOrcSerDeStatsSimpleWithNulls() throws Exception {
ObjectInspector inspector;
synchronized (TestOrcSerDeStats.class) {
inspector = ObjectInspectorFactory.getReflectionObjectInspector(SimpleStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
}
Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf).inspector(inspector).stripeSize(10000).bufferSize(10000));
for (int row = 0; row < 1000; row++) {
if (row % 2 == 0) {
writer.addRow(new SimpleStruct(new BytesWritable(new byte[] { 1, 2, 3 }), "hi"));
} else {
writer.addRow(null);
}
}
writer.close();
// stats from writer
assertEquals(1000, writer.getNumberOfRows());
assertEquals(44500, writer.getRawDataSize());
Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs));
// stats from reader
assertEquals(1000, reader.getNumberOfRows());
assertEquals(44500, reader.getRawDataSize());
assertEquals(1500, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1")));
assertEquals(43000, reader.getRawDataSizeOfColumns(Lists.newArrayList("string1")));
assertEquals(44500, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1", "string1")));
}
Aggregations