use of org.apache.drill.exec.vector.complex.writer.BaseWriter.DictWriter in project drill by apache.
the class DrillParquetGroupConverter method createFieldConverter.
private Converter createFieldConverter(boolean skipRepeated, Type fieldType, String name, PathSegment colNextChild) {
Converter converter;
if (fieldType.isPrimitive()) {
converter = getConverterForType(name, fieldType.asPrimitiveType());
} else {
while (colNextChild != null && !colNextChild.isNamed()) {
colNextChild = colNextChild.getChild();
}
Collection<SchemaPath> columns = colNextChild == null ? Collections.emptyList() : Collections.singletonList(new SchemaPath(colNextChild.getNameSegment()));
BaseWriter writer;
GroupType fieldGroupType = fieldType.asGroupType();
if (ParquetReaderUtility.isLogicalListType(fieldGroupType)) {
writer = getWriter(name, MapWriter::list, ListWriter::list);
converter = new DrillParquetGroupConverter(mutator, writer, fieldGroupType, columns, options, containsCorruptedDates, true, converterName);
} else if (options.getOption(ExecConstants.PARQUET_READER_ENABLE_MAP_SUPPORT_VALIDATOR) && ParquetReaderUtility.isLogicalMapType(fieldGroupType)) {
writer = getWriter(name, MapWriter::dict, ListWriter::dict);
converter = new DrillParquetMapGroupConverter(mutator, (DictWriter) writer, fieldGroupType, options, containsCorruptedDates);
} else if (fieldType.isRepetition(Repetition.REPEATED)) {
if (skipRepeated) {
converter = new DrillIntermediateParquetGroupConverter(mutator, baseWriter, fieldGroupType, columns, options, containsCorruptedDates, false, converterName);
} else {
writer = getWriter(name, (m, s) -> m.list(s).map(), l -> l.list().map());
converter = new DrillParquetGroupConverter(mutator, writer, fieldGroupType, columns, options, containsCorruptedDates, false, converterName);
}
} else {
writer = getWriter(name, MapWriter::map, ListWriter::map);
converter = new DrillParquetGroupConverter(mutator, writer, fieldGroupType, columns, options, containsCorruptedDates, false, converterName);
}
}
return converter;
}
Aggregations