Search in sources :

Example 1 with ValueWriter

use of org.apache.drill.exec.vector.accessor.ValueWriter in project drill by apache.

the class CompliantTextBatchReader method buildWithSchema.

/**
 * File has headers and a provided schema is provided. Convert from VARCHAR
 * input type to the provided output type, but only if the column is projected.
 */
private FieldVarCharOutput buildWithSchema(ColumnsSchemaNegotiator schemaNegotiator, String[] fieldNames) {
    TupleMetadata readerSchema = mergeSchemas(schemaNegotiator.providedSchema(), fieldNames);
    schemaNegotiator.tableSchema(readerSchema, true);
    writer = schemaNegotiator.build().writer();
    StandardConversions conversions = conversions(schemaNegotiator.providedSchema());
    ValueWriter[] colWriters = new ValueWriter[fieldNames.length];
    for (int i = 0; i < fieldNames.length; i++) {
        ScalarWriter colWriter = writer.scalar(fieldNames[i]);
        if (writer.isProjected()) {
            colWriters[i] = conversions.converterFor(colWriter, MinorType.VARCHAR);
        } else {
            colWriters[i] = colWriter;
        }
    }
    return new FieldVarCharOutput(writer, colWriters);
}
Also used : TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) ValueWriter(org.apache.drill.exec.vector.accessor.ValueWriter) StandardConversions(org.apache.drill.exec.physical.impl.scan.convert.StandardConversions) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter)

Example 2 with ValueWriter

use of org.apache.drill.exec.vector.accessor.ValueWriter in project drill by apache.

the class CompliantTextBatchReader method buildFromColumnHeaders.

/**
 * File has column headers. No provided schema. Build schema from the
 * column headers.
 */
private FieldVarCharOutput buildFromColumnHeaders(ColumnsSchemaNegotiator schemaNegotiator, String[] fieldNames) {
    final TupleMetadata schema = new TupleSchema();
    for (final String colName : fieldNames) {
        schema.addColumn(textColumn(colName));
    }
    schemaNegotiator.tableSchema(schema, true);
    writer = schemaNegotiator.build().writer();
    ValueWriter[] colWriters = new ValueWriter[fieldNames.length];
    for (int i = 0; i < fieldNames.length; i++) {
        colWriters[i] = writer.column(i).scalar();
    }
    return new FieldVarCharOutput(writer, colWriters);
}
Also used : TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) ValueWriter(org.apache.drill.exec.vector.accessor.ValueWriter) TupleSchema(org.apache.drill.exec.record.metadata.TupleSchema)

Example 3 with ValueWriter

use of org.apache.drill.exec.vector.accessor.ValueWriter in project drill by apache.

the class CompliantTextBatchReader method buildWithSchema.

private FieldVarCharOutput buildWithSchema(ColumnsSchemaNegotiator schemaNegotiator) {
    TupleMetadata providedSchema = schemaNegotiator.providedSchema();
    schemaNegotiator.tableSchema(providedSchema, true);
    writer = schemaNegotiator.build().writer();
    StandardConversions conversions = conversions(providedSchema);
    ValueWriter[] colWriters = new ValueWriter[providedSchema.size()];
    for (int i = 0; i < colWriters.length; i++) {
        colWriters[i] = conversions.converterFor(writer.scalar(providedSchema.metadata(i).name()), MinorType.VARCHAR);
    }
    return new ConstrainedFieldOutput(writer, colWriters);
}
Also used : TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) ValueWriter(org.apache.drill.exec.vector.accessor.ValueWriter) StandardConversions(org.apache.drill.exec.physical.impl.scan.convert.StandardConversions)

Example 4 with ValueWriter

use of org.apache.drill.exec.vector.accessor.ValueWriter in project drill by apache.

the class BaseFieldOutput method writeToVector.

/**
 * Write a buffer of data to the underlying vector using the
 * column writer. The buffer holds a complete or partial chunk
 * of data for the field. If this is the first data for the field,
 * write the bytes. If this is a second buffer for the same field,
 * append the bytes. The append will work if the underlying vector
 * is VarChar, it will fail if a type conversion shim is in between.
 * (This is generally OK because the previous setBytes should have
 * failed because a large int or date is not supported.)
 */
protected void writeToVector() {
    if (!fieldProjected) {
        return;
    }
    ValueWriter colWriter = columnWriter();
    if (fieldWriteCount == 0) {
        colWriter.setBytes(fieldBytes, currentDataPointer);
    } else {
        colWriter.appendBytes(fieldBytes, currentDataPointer);
    }
    fieldWriteCount += currentDataPointer;
    currentDataPointer = 0;
}
Also used : ValueWriter(org.apache.drill.exec.vector.accessor.ValueWriter)

Aggregations

ValueWriter (org.apache.drill.exec.vector.accessor.ValueWriter)4 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)3 StandardConversions (org.apache.drill.exec.physical.impl.scan.convert.StandardConversions)2 TupleSchema (org.apache.drill.exec.record.metadata.TupleSchema)1 ScalarWriter (org.apache.drill.exec.vector.accessor.ScalarWriter)1