Search in sources :

Example 31 with MessageType

use of org.apache.parquet.schema.MessageType in project hive by apache.

the class TestDataWritableWriter method writeParquetRecord.

private void writeParquetRecord(String schema, ParquetHiveRecord record, TimeZone timeZone) throws SerDeException {
    MessageType fileSchema = MessageTypeParser.parseMessageType(schema);
    DataWritableWriter hiveParquetWriter = new DataWritableWriter(mockRecordConsumer, fileSchema, timeZone);
    hiveParquetWriter.write(record);
}
Also used : DataWritableWriter(org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriter) MessageType(org.apache.parquet.schema.MessageType)

Example 32 with MessageType

use of org.apache.parquet.schema.MessageType in project hive by apache.

the class TestConvertAstToSearchArg method testExpression10.

@Test
public void testExpression10() throws Exception {
    /* id >= 10 and not (10 > id) */
    String exprStr = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> \n" + "<java version=\"1.6.0_31\" class=\"java.beans.XMLDecoder\"> \n" + " <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + "  <void property=\"children\"> \n" + "   <object class=\"java.util.ArrayList\"> \n" + "    <void method=\"add\"> \n" + "     <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + "      <void property=\"children\"> \n" + "       <object class=\"java.util.ArrayList\"> \n" + "        <void method=\"add\"> \n" + "         <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc\"> \n" + "          <void property=\"column\"> \n" + "           <string>id</string> \n" + "          </void> \n" + "          <void property=\"tabAlias\"> \n" + "           <string>orc_people</string> \n" + "          </void> \n" + "          <void property=\"typeInfo\"> \n" + "           <object id=\"PrimitiveTypeInfo0\" class=\"org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo\"> \n" + "            <void property=\"typeName\"> \n" + "             <string>int</string> \n" + "            </void> \n" + "           </object> \n" + "          </void> \n" + "         </object> \n" + "        </void> \n" + "        <void method=\"add\"> \n" + "         <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc\"> \n" + "          <void property=\"typeInfo\"> \n" + "           <object idref=\"PrimitiveTypeInfo0\"/> \n" + "          </void> \n" + "          <void property=\"value\"> \n" + "           <int>10</int> \n" + "          </void> \n" + "         </object> \n" + "        </void> \n" + "       </object> \n" + "      </void> \n" + "      <void property=\"genericUDF\"> \n" + "       <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan\"/> \n" + "      </void> \n" + "      <void property=\"typeInfo\"> \n" + "       <object id=\"PrimitiveTypeInfo1\" class=\"org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo\"> \n" + "        <void property=\"typeName\"> \n" + "         <string>boolean</string> \n" + "        </void> \n" + "       </object> \n" + "      </void> \n" + "     </object> \n" + "    </void> \n" + "    <void method=\"add\"> \n" + "     <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + "      <void property=\"children\"> \n" + "       <object class=\"java.util.ArrayList\"> \n" + "        <void method=\"add\"> \n" + "         <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc\"> \n" + "          <void property=\"children\"> \n" + "           <object class=\"java.util.ArrayList\"> \n" + "            <void method=\"add\"> \n" + "             <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc\"> \n" + "              <void property=\"column\"> \n" + "               <string>id</string> \n" + "              </void> \n" + "              <void property=\"tabAlias\"> \n" + "               <string>orc_people</string> \n" + "              </void> \n" + "              <void property=\"typeInfo\"> \n" + "               <object idref=\"PrimitiveTypeInfo0\"/> \n" + "              </void> \n" + "             </object> \n" + "            </void> \n" + "            <void method=\"add\"> \n" + "             <object class=\"org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc\"> \n" + "              <void property=\"typeInfo\"> \n" + "               <object idref=\"PrimitiveTypeInfo0\"/> \n" + "              </void> \n" + "              <void property=\"value\"> \n" + "               <int>10</int> \n" + "              </void> \n" + "             </object> \n" + "            </void> \n" + "           </object> \n" + "          </void> \n" + "          <void property=\"genericUDF\"> \n" + "           <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan\"/> \n" + "          </void> \n" + "          <void property=\"typeInfo\"> \n" + "           <object idref=\"PrimitiveTypeInfo1\"/> \n" + "          </void> \n" + "         </object> \n" + "        </void> \n" + "       </object> \n" + "      </void> \n" + "      <void property=\"genericUDF\"> \n" + "       <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot\"/> \n" + "      </void> \n" + "      <void property=\"typeInfo\"> \n" + "       <object idref=\"PrimitiveTypeInfo1\"/> \n" + "      </void> \n" + "     </object> \n" + "    </void> \n" + "   </object> \n" + "  </void> \n" + "  <void property=\"genericUDF\"> \n" + "   <object class=\"org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd\"/> \n" + "  </void> \n" + "  <void property=\"typeInfo\"> \n" + "   <object idref=\"PrimitiveTypeInfo1\"/> \n" + "  </void> \n" + " </object> \n" + "</java>";
    SearchArgumentImpl sarg = (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr));
    List<PredicateLeaf> leaves = sarg.getLeaves();
    assertEquals(1, leaves.size());
    MessageType schema = MessageTypeParser.parseMessageType("message test { required int32 id;" + " required binary first_name; }");
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema);
    String expected = "and(not(lt(id, 10)), not(lt(id, 10)))";
    assertEquals(expected, p.toString());
    assertEquals(PredicateLeaf.Type.LONG, leaves.get(0).getType());
    assertEquals(PredicateLeaf.Operator.LESS_THAN, leaves.get(0).getOperator());
    assertEquals("id", leaves.get(0).getColumnName());
    assertEquals(10L, leaves.get(0).getLiteral());
    assertEquals("(and (not leaf-0) (not leaf-0))", sarg.getExpression().toString());
    assertNoSharedNodes(sarg.getExpression(), Sets.<ExpressionTree>newIdentityHashSet());
    assertEquals(TruthValue.NO, sarg.evaluate(values(TruthValue.YES)));
    assertEquals(TruthValue.YES, sarg.evaluate(values(TruthValue.NO)));
    assertEquals(TruthValue.NULL, sarg.evaluate(values(TruthValue.NULL)));
    assertEquals(TruthValue.NO_NULL, sarg.evaluate(values(TruthValue.YES_NULL)));
    assertEquals(TruthValue.YES_NULL, sarg.evaluate(values(TruthValue.NO_NULL)));
    assertEquals(TruthValue.YES_NO, sarg.evaluate(values(TruthValue.YES_NO)));
    assertEquals(TruthValue.YES_NO_NULL, sarg.evaluate(values(TruthValue.YES_NO_NULL)));
}
Also used : FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 33 with MessageType

use of org.apache.parquet.schema.MessageType in project hive by apache.

the class TestDataWritableReadSupport method testGetProjectedSchema3.

@Test
public void testGetProjectedSchema3() throws Exception {
    MessageType originalMsg = MessageTypeParser.parseMessageType("message hive_schema {\n" + "  optional group structCol {\n" + "    optional int32 a;\n" + "    optional double b;\n" + "  }\n" + "  optional boolean c;\n" + "}\n");
    testConversion("structCol,c", "struct<b:double>,boolean", DataWritableReadSupport.getProjectedSchema(originalMsg, Arrays.asList("structCol", "c"), Arrays.asList(0, 1), Sets.newHashSet("structCol.b", "c")).toString());
}
Also used : MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 34 with MessageType

use of org.apache.parquet.schema.MessageType in project hive by apache.

the class TestDataWritableReadSupport method testGetProjectedSchema5.

@Test
public void testGetProjectedSchema5() throws Exception {
    MessageType originalMsg = MessageTypeParser.parseMessageType("message hive_schema {\n" + "  optional group structCol {\n" + "    optional int32 a;\n" + "    optional group subStructCol {\n" + "      optional int64 b;\n" + "      optional boolean c;\n" + "    }\n" + "  }\n" + "  optional boolean d;\n" + "}\n");
    testConversion("structCol", "struct<subStructCol:struct<b:bigint,c:boolean>>", DataWritableReadSupport.getProjectedSchema(originalMsg, Arrays.asList("structCol"), Arrays.asList(0), Sets.newHashSet("structCol.subStructCol", "structCol.subStructCol.b", "structCol.subStructCol.c")).toString());
}
Also used : MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 35 with MessageType

use of org.apache.parquet.schema.MessageType in project hive by apache.

the class TestParquetFilterPredicate method testFilterColumnsThatDoNoExistOnSchema.

@Test
public void testFilterColumnsThatDoNoExistOnSchema() {
    MessageType schema = MessageTypeParser.parseMessageType("message test { required int32 a; required binary stinger; }");
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startNot().startOr().isNull("a", PredicateLeaf.Type.LONG).between("y", PredicateLeaf.Type.LONG, 10L, // Column will be removed from filter
    20L).in("z", PredicateLeaf.Type.LONG, 1L, 2L, // Column will be removed from filter
    3L).nullSafeEquals("a", PredicateLeaf.Type.STRING, "stinger").end().end().build();
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema);
    String expected = "and(not(eq(a, null)), not(eq(a, Binary{\"stinger\"})))";
    assertEquals(expected, p.toString());
}
Also used : SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Aggregations

MessageType (org.apache.parquet.schema.MessageType)46 Test (org.junit.Test)25 FilterPredicate (org.apache.parquet.filter2.predicate.FilterPredicate)15 SearchArgument (org.apache.hadoop.hive.ql.io.sarg.SearchArgument)9 Path (org.apache.hadoop.fs.Path)8 Type (org.apache.parquet.schema.Type)7 GroupType (org.apache.parquet.schema.GroupType)6 Configuration (org.apache.hadoop.conf.Configuration)5 BlockMetaData (org.apache.parquet.hadoop.metadata.BlockMetaData)5 OriginalType (org.apache.parquet.schema.OriginalType)4 ArrayList (java.util.ArrayList)3 HashMap (java.util.HashMap)3 SchemaPath (org.apache.drill.common.expression.SchemaPath)3 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)3 ParquetMetadata (org.apache.parquet.hadoop.metadata.ParquetMetadata)3 PrimitiveType (org.apache.parquet.schema.PrimitiveType)3 DimensionSchema (io.druid.data.input.impl.DimensionSchema)2 File (java.io.File)2 HashSet (java.util.HashSet)2 FileSystem (org.apache.hadoop.fs.FileSystem)2