use of org.apache.drill.exec.vector.complex.reader.FieldReader in project drill by apache.
the class JsonWriter method writeValue.
private void writeValue(FieldReader reader) throws JsonGenerationException, IOException {
final DataMode m = reader.getType().getMode();
final MinorType mt = reader.getType().getMinorType();
switch(m) {
case OPTIONAL:
case REQUIRED:
switch(mt) {
case FLOAT4:
gen.writeFloat(reader);
break;
case FLOAT8:
gen.writeDouble(reader);
break;
case INT:
gen.writeInt(reader);
break;
case SMALLINT:
gen.writeSmallInt(reader);
break;
case TINYINT:
gen.writeTinyInt(reader);
break;
case BIGINT:
gen.writeBigInt(reader);
break;
case BIT:
gen.writeBoolean(reader);
break;
case DATE:
gen.writeDate(reader);
break;
case TIME:
gen.writeTime(reader);
break;
case TIMESTAMP:
gen.writeTimestamp(reader);
break;
case INTERVALYEAR:
case INTERVALDAY:
case INTERVAL:
gen.writeInterval(reader);
break;
case DECIMAL28DENSE:
case DECIMAL28SPARSE:
case DECIMAL38DENSE:
case DECIMAL38SPARSE:
case DECIMAL9:
case DECIMAL18:
case VARDECIMAL:
gen.writeDecimal(reader);
break;
case LIST:
// this is a pseudo class, doesn't actually contain the real reader so we have to drop down.
gen.writeStartArray();
while (reader.next()) {
writeValue(reader.reader());
}
gen.writeEndArray();
break;
case MAP:
gen.writeStartObject();
if (reader.isSet()) {
for (String name : reader) {
FieldReader childReader = reader.reader(name);
if (childReader.isSet()) {
gen.writeFieldName(name);
writeValue(childReader);
}
}
}
gen.writeEndObject();
break;
case NULL:
case LATE:
gen.writeUntypedNull();
break;
case VAR16CHAR:
gen.writeVar16Char(reader);
break;
case VARBINARY:
gen.writeBinary(reader);
break;
case VARCHAR:
gen.writeVarChar(reader);
break;
}
break;
case REPEATED:
gen.writeStartArray();
switch(mt) {
case FLOAT4:
for (int i = 0; i < reader.size(); i++) {
gen.writeFloat(i, reader);
}
break;
case FLOAT8:
for (int i = 0; i < reader.size(); i++) {
gen.writeDouble(i, reader);
}
break;
case INT:
for (int i = 0; i < reader.size(); i++) {
gen.writeInt(i, reader);
}
break;
case SMALLINT:
for (int i = 0; i < reader.size(); i++) {
gen.writeSmallInt(i, reader);
}
break;
case TINYINT:
for (int i = 0; i < reader.size(); i++) {
gen.writeTinyInt(i, reader);
}
break;
case BIGINT:
for (int i = 0; i < reader.size(); i++) {
gen.writeBigInt(i, reader);
}
break;
case BIT:
for (int i = 0; i < reader.size(); i++) {
gen.writeBoolean(i, reader);
}
break;
case DATE:
for (int i = 0; i < reader.size(); i++) {
gen.writeDate(i, reader);
}
break;
case TIME:
for (int i = 0; i < reader.size(); i++) {
gen.writeTime(i, reader);
}
break;
case TIMESTAMP:
for (int i = 0; i < reader.size(); i++) {
gen.writeTimestamp(i, reader);
}
break;
case INTERVALYEAR:
case INTERVALDAY:
case INTERVAL:
for (int i = 0; i < reader.size(); i++) {
gen.writeInterval(i, reader);
}
break;
case DECIMAL28DENSE:
case DECIMAL28SPARSE:
case DECIMAL38DENSE:
case DECIMAL38SPARSE:
case DECIMAL9:
case DECIMAL18:
case VARDECIMAL:
for (int i = 0; i < reader.size(); i++) {
gen.writeDecimal(i, reader);
}
break;
case LIST:
for (int i = 0; i < reader.size(); i++) {
while (reader.next()) {
writeValue(reader.reader());
}
}
break;
case MAP:
while (reader.next()) {
gen.writeStartObject();
for (String name : reader) {
FieldReader mapField = reader.reader(name);
if (mapField.isSet()) {
gen.writeFieldName(name);
writeValue(mapField);
}
}
gen.writeEndObject();
}
break;
case NULL:
break;
case VAR16CHAR:
for (int i = 0; i < reader.size(); i++) {
gen.writeVar16Char(i, reader);
}
break;
case VARBINARY:
for (int i = 0; i < reader.size(); i++) {
gen.writeBinary(i, reader);
}
break;
case VARCHAR:
for (int i = 0; i < reader.size(); i++) {
gen.writeVarChar(i, reader);
}
break;
default:
throw new IllegalStateException(String.format("Unable to handle type %s.", mt));
}
gen.writeEndArray();
break;
}
}
use of org.apache.drill.exec.vector.complex.reader.FieldReader in project drill by apache.
the class FunctionConverter method getHolder.
public DrillFuncHolder getHolder(AnnotatedClassDescriptor func, ClassLoader classLoader) {
FunctionTemplate template = func.getAnnotationProxy(FunctionTemplate.class);
if (template == null) {
return failure("Class does not declare FunctionTemplate annotation.", func);
}
String name = template.name();
List<String> names = Arrays.asList(template.names());
if (name.isEmpty() && names.isEmpty()) {
// none set
return failure("Must define 'name' or 'names'", func);
}
if (!name.isEmpty() && !names.isEmpty()) {
// both are set
return failure("Must use only one annotations 'name' or 'names', not both", func);
}
// start by getting field information.
List<ValueReference> params = Lists.newArrayList();
List<WorkspaceReference> workspaceFields = Lists.newArrayList();
ValueReference outputField = null;
int varArgsCount = 0;
for (FieldDescriptor field : func.getFields()) {
Param param = field.getAnnotationProxy(Param.class);
Output output = field.getAnnotationProxy(Output.class);
Workspace workspace = field.getAnnotationProxy(Workspace.class);
Inject inject = field.getAnnotationProxy(Inject.class);
Annotation[] annotations = { param, output, workspace, inject };
int annotationCount = 0;
for (Annotation annotationDescriptor : annotations) {
if (annotationDescriptor != null) {
annotationCount += 1;
}
}
if (annotationCount == 0) {
return failure("The field must be either a @Param, @Output, @Inject or @Workspace field.", func, field);
} else if (annotationCount > 1) {
return failure("The field must be only one of @Param, @Output, @Inject or @Workspace. It currently has more than one of these annotations.", func, field);
}
// TODO(Julien): verify there are a few of those and we can load them
Class<?> fieldClass = field.getFieldClass();
if (param != null || output != null) {
if (Object[].class.isAssignableFrom(fieldClass)) {
fieldClass = fieldClass.getComponentType();
varArgsCount++;
} else if (varArgsCount > 0 && param != null) {
return failure("Vararg should be the last argument in the function.", func, field);
}
if (varArgsCount > 1) {
return failure("Function should contain single vararg argument", func, field);
}
// Special processing for @Param FieldReader
if (param != null && FieldReader.class.isAssignableFrom(fieldClass)) {
ValueReference fieldReaderRef = ValueReference.createFieldReaderRef(field.getName());
fieldReaderRef.setVarArg(varArgsCount > 0);
params.add(fieldReaderRef);
continue;
}
// Special processing for @Output ComplexWriter
if (output != null && ComplexWriter.class.isAssignableFrom(fieldClass)) {
if (outputField != null) {
return failure("You've declared more than one @Output field.\n" + "You must declare one and only @Output field per Function class.", func, field);
} else {
outputField = ValueReference.createComplexWriterRef(field.getName());
}
continue;
}
// check that param and output are value holders.
if (!ValueHolder.class.isAssignableFrom(fieldClass)) {
return failure(String.format("The field doesn't holds value of type %s which does not implement the ValueHolder or ComplexWriter interfaces.\n" + "All fields of type @Param or @Output must extend this interface.", fieldClass), func, field);
}
// get the type field from the value holder.
MajorType type;
try {
type = getStaticFieldValue("TYPE", fieldClass, MajorType.class);
} catch (Exception e) {
return failure("Failure while trying to access the ValueHolder's TYPE static variable. All ValueHolders must contain a static TYPE variable that defines their MajorType.", e, func, field);
}
ValueReference p = new ValueReference(type, field.getName());
if (param != null) {
p.setConstant(param.constant());
p.setVarArg(varArgsCount > 0);
params.add(p);
} else {
if (outputField != null) {
return failure("You've declared more than one @Output field. You must declare one and only @Output field per Function class.", func, field);
} else {
outputField = p;
}
}
} else {
// workspace work.
boolean isInject = inject != null;
if (isInject && UdfUtilities.INJECTABLE_GETTER_METHODS.get(fieldClass) == null) {
return failure(String.format("A %s cannot be injected into a %s," + " available injectable classes are: %s.", fieldClass, DrillFunc.class.getSimpleName(), Joiner.on(",").join(UdfUtilities.INJECTABLE_GETTER_METHODS.keySet())), func, field);
}
WorkspaceReference wsReference = new WorkspaceReference(fieldClass, field.getName(), isInject);
if (!isInject && template.scope() == FunctionScope.POINT_AGGREGATE && !ValueHolder.class.isAssignableFrom(fieldClass)) {
return failure(String.format("Aggregate function '%s' workspace variable '%s' is of type '%s'. Please change it to Holder type.", func.getClassName(), field.getName(), fieldClass), func, field);
}
// If the workspace var is of Holder type, get its MajorType and assign to WorkspaceReference.
if (ValueHolder.class.isAssignableFrom(fieldClass)) {
MajorType majorType;
try {
majorType = getStaticFieldValue("TYPE", fieldClass, MajorType.class);
} catch (Exception e) {
return failure("Failure while trying to access the ValueHolder's TYPE static variable. All ValueHolders must contain a static TYPE variable that defines their MajorType.", e, func, field);
}
wsReference.setMajorType(majorType);
}
workspaceFields.add(wsReference);
}
}
if (outputField == null) {
return failure("This function declares zero output fields. A function must declare one output field.", func);
}
FunctionInitializer initializer = new FunctionInitializer(func.getClassName(), classLoader);
try {
// return holder
ValueReference[] ps = params.toArray(new ValueReference[0]);
WorkspaceReference[] works = workspaceFields.toArray(new WorkspaceReference[0]);
FunctionAttributes functionAttributes = new FunctionAttributes(template, ps, outputField, works);
switch(template.scope()) {
case POINT_AGGREGATE:
return outputField.isComplexWriter() ? new DrillComplexWriterAggFuncHolder(functionAttributes, initializer) : new DrillAggFuncHolder(functionAttributes, initializer);
case SIMPLE:
return outputField.isComplexWriter() ? new DrillComplexWriterFuncHolder(functionAttributes, initializer) : new DrillSimpleFuncHolder(functionAttributes, initializer);
case HOLISTIC_AGGREGATE:
case RANGE_AGGREGATE:
default:
return failure("Unsupported Function Type.", func);
}
} catch (Exception | NoSuchFieldError | AbstractMethodError ex) {
return failure("Failure while creating function holder.", ex, func);
}
}
use of org.apache.drill.exec.vector.complex.reader.FieldReader in project drill by apache.
the class MetadataControllerBatch method appendStatistics.
private void appendStatistics(StatisticsRecordCollector statisticsCollector) {
if (context.getOptions().getOption(PlannerSettings.STATISTICS_USE)) {
List<FieldConverter> fieldConverters = new ArrayList<>();
int fieldId = 0;
for (VectorWrapper<?> wrapper : right) {
if (wrapper.getField().getName().equalsIgnoreCase(WriterPrel.PARTITION_COMPARATOR_FIELD)) {
continue;
}
FieldReader reader = wrapper.getValueVector().getReader();
FieldConverter converter = StatisticsRecordWriterImpl.getConverter(statisticsCollector, fieldId++, wrapper.getField().getName(), reader);
fieldConverters.add(converter);
}
try {
for (int counter = 0; counter < right.getRecordCount(); counter++) {
statisticsCollector.startStatisticsRecord();
// write the current record
for (FieldConverter converter : fieldConverters) {
converter.setPosition(counter);
converter.startField();
converter.writeField();
converter.endField();
}
statisticsCollector.endStatisticsRecord();
}
} catch (IOException e) {
throw UserException.dataWriteError(e).addContext("Failed to write metadata").build(logger);
}
}
}
use of org.apache.drill.exec.vector.complex.reader.FieldReader in project drill by apache.
the class TestBsonRecordReader method testArrayType.
@Test
public void testArrayType() throws IOException {
BsonDocument bsonDoc = new BsonDocument();
BsonWriter bw = new BsonDocumentWriter(bsonDoc);
bw.writeStartDocument();
bw.writeName("arrayKey");
bw.writeStartArray();
bw.writeInt32(1);
bw.writeInt32(2);
bw.writeInt32(3);
bw.writeEndArray();
bw.writeEndDocument();
bw.flush();
bsonReader.write(writer, new BsonDocumentReader(bsonDoc));
SingleMapReaderImpl mapReader = (SingleMapReaderImpl) writer.getMapVector().getReader();
FieldReader reader = mapReader.reader("arrayKey");
assertEquals(3, reader.size());
}
use of org.apache.drill.exec.vector.complex.reader.FieldReader in project drill by apache.
the class RowConstructorFunction method eval.
@Override
public void eval() {
org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter writer = out.rootAsMap();
for (int nameIdx = 0, valIdx = 1; valIdx < in.length; nameIdx += 2, valIdx += 2) {
String fieldName = in[nameIdx].readObject().toString();
FieldReader reader = in[valIdx];
org.apache.drill.exec.vector.complex.MapUtility.writeToMapFromReader(reader, writer, fieldName, "RowConstructorFunction");
}
}
Aggregations