use of org.apache.parquet.schema.MessageType in project hive by apache.
the class TestDataWritableWriter method writeParquetRecord.
private void writeParquetRecord(String schema, ParquetHiveRecord record, TimeZone timeZone) throws SerDeException {
MessageType fileSchema = MessageTypeParser.parseMessageType(schema);
DataWritableWriter hiveParquetWriter = new DataWritableWriter(mockRecordConsumer, fileSchema, timeZone);
hiveParquetWriter.write(record);
}
use of org.apache.parquet.schema.MessageType in project hive by apache.
the class TestConvertAstToSearchArg method testExpression10.
@Test
public void testExpression10() throws Exception {
/* id >= 10 and not (10 > id) */
String exprStr = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> \n" + "<java version=\"1.6.0_31\" class=\"java.beans.XMLDecoder\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + " <void property=\"children\"> \n" + " <object class=\"java.util.ArrayList\"> \n" + " <void method=\"add\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + " <void property=\"children\"> \n" + " <object class=\"java.util.ArrayList\"> \n" + " <void method=\"add\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc\"> \n" + " <void property=\"column\"> \n" + " <string>id</string> \n" + " </void> \n" + " <void property=\"tabAlias\"> \n" + " <string>orc_people</string> \n" + " </void> \n" + " <void property=\"typeInfo\"> \n" + " <object id=\"PrimitiveTypeInfo0\" class=\"org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo\"> \n" + " <void property=\"typeName\"> \n" + " <string>int</string> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " <void method=\"add\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc\"> \n" + " <void property=\"typeInfo\"> \n" + " <object idref=\"PrimitiveTypeInfo0\"/> \n" + " </void> \n" + " <void property=\"value\"> \n" + " <int>10</int> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " <void property=\"genericUDF\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan\"/> \n" + " </void> \n" + " <void property=\"typeInfo\"> \n" + " <object id=\"PrimitiveTypeInfo1\" class=\"org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo\"> \n" + " <void property=\"typeName\"> \n" + " <string>boolean</string> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " <void method=\"add\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + " <void property=\"children\"> \n" + " <object class=\"java.util.ArrayList\"> \n" + " <void method=\"add\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + " <void property=\"children\"> \n" + " <object class=\"java.util.ArrayList\"> \n" + " <void method=\"add\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc\"> \n" + " <void property=\"column\"> \n" + " <string>id</string> \n" + " </void> \n" + " <void property=\"tabAlias\"> \n" + " <string>orc_people</string> \n" + " </void> \n" + " <void property=\"typeInfo\"> \n" + " <object idref=\"PrimitiveTypeInfo0\"/> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " <void method=\"add\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc\"> \n" + " <void property=\"typeInfo\"> \n" + " <object idref=\"PrimitiveTypeInfo0\"/> \n" + " </void> \n" + " <void property=\"value\"> \n" + " <int>10</int> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " <void property=\"genericUDF\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan\"/> \n" + " </void> \n" + " <void property=\"typeInfo\"> \n" + " <object idref=\"PrimitiveTypeInfo1\"/> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " <void property=\"genericUDF\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot\"/> \n" + " </void> \n" + " <void property=\"typeInfo\"> \n" + " <object idref=\"PrimitiveTypeInfo1\"/> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " </object> \n" + " </void> \n" + " <void property=\"genericUDF\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd\"/> \n" + " </void> \n" + " <void property=\"typeInfo\"> \n" + " <object idref=\"PrimitiveTypeInfo1\"/> \n" + " </void> \n" + " </object> \n" + "</java>";
SearchArgumentImpl sarg = (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr));
List<PredicateLeaf> leaves = sarg.getLeaves();
assertEquals(1, leaves.size());
MessageType schema = MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema);
String expected = "and(not(lt(id, 10)), not(lt(id, 10)))";
assertEquals(expected, p.toString());
assertEquals(PredicateLeaf.Type.LONG, leaves.get(0).getType());
assertEquals(PredicateLeaf.Operator.LESS_THAN, leaves.get(0).getOperator());
assertEquals("id", leaves.get(0).getColumnName());
assertEquals(10L, leaves.get(0).getLiteral());
assertEquals("(and (not leaf-0) (not leaf-0))", sarg.getExpression().toString());
assertNoSharedNodes(sarg.getExpression(), Sets.<ExpressionTree>newIdentityHashSet());
assertEquals(TruthValue.NO, sarg.evaluate(values(TruthValue.YES)));
assertEquals(TruthValue.YES, sarg.evaluate(values(TruthValue.NO)));
assertEquals(TruthValue.NULL, sarg.evaluate(values(TruthValue.NULL)));
assertEquals(TruthValue.NO_NULL, sarg.evaluate(values(TruthValue.YES_NULL)));
assertEquals(TruthValue.YES_NULL, sarg.evaluate(values(TruthValue.NO_NULL)));
assertEquals(TruthValue.YES_NO, sarg.evaluate(values(TruthValue.YES_NO)));
assertEquals(TruthValue.YES_NO_NULL, sarg.evaluate(values(TruthValue.YES_NO_NULL)));
}
use of org.apache.parquet.schema.MessageType in project hive by apache.
the class TestDataWritableReadSupport method testGetProjectedSchema3.
@Test
public void testGetProjectedSchema3() throws Exception {
MessageType originalMsg = MessageTypeParser.parseMessageType("message hive_schema {\n" + " optional group structCol {\n" + " optional int32 a;\n" + " optional double b;\n" + " }\n" + " optional boolean c;\n" + "}\n");
testConversion("structCol,c", "struct<b:double>,boolean", DataWritableReadSupport.getProjectedSchema(originalMsg, Arrays.asList("structCol", "c"), Arrays.asList(0, 1), Sets.newHashSet("structCol.b", "c")).toString());
}
use of org.apache.parquet.schema.MessageType in project hive by apache.
the class TestDataWritableReadSupport method testGetProjectedSchema5.
@Test
public void testGetProjectedSchema5() throws Exception {
MessageType originalMsg = MessageTypeParser.parseMessageType("message hive_schema {\n" + " optional group structCol {\n" + " optional int32 a;\n" + " optional group subStructCol {\n" + " optional int64 b;\n" + " optional boolean c;\n" + " }\n" + " }\n" + " optional boolean d;\n" + "}\n");
testConversion("structCol", "struct<subStructCol:struct<b:bigint,c:boolean>>", DataWritableReadSupport.getProjectedSchema(originalMsg, Arrays.asList("structCol"), Arrays.asList(0), Sets.newHashSet("structCol.subStructCol", "structCol.subStructCol.b", "structCol.subStructCol.c")).toString());
}
use of org.apache.parquet.schema.MessageType in project hive by apache.
the class TestParquetFilterPredicate method testFilterColumnsThatDoNoExistOnSchema.
@Test
public void testFilterColumnsThatDoNoExistOnSchema() {
MessageType schema = MessageTypeParser.parseMessageType("message test { required int32 a; required binary stinger; }");
SearchArgument sarg = SearchArgumentFactory.newBuilder().startNot().startOr().isNull("a", PredicateLeaf.Type.LONG).between("y", PredicateLeaf.Type.LONG, 10L, // Column will be removed from filter
20L).in("z", PredicateLeaf.Type.LONG, 1L, 2L, // Column will be removed from filter
3L).nullSafeEquals("a", PredicateLeaf.Type.STRING, "stinger").end().end().build();
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema);
String expected = "and(not(eq(a, null)), not(eq(a, Binary{\"stinger\"})))";
assertEquals(expected, p.toString());
}
Aggregations