Search in sources :

Example 26 with ScalarWriter

use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.

the class HDF5BatchReader method writeBooleanColumn.

/**
 * Helper function to write a 1D boolean column
 *
 * @param rowWriter The row to which the data will be written
 * @param name The column name
 * @param value The value to be written
 */
private void writeBooleanColumn(TupleWriter rowWriter, String name, boolean value) {
    ScalarWriter colWriter = getColWriter(rowWriter, name, TypeProtos.MinorType.BIT);
    colWriter.setBoolean(value);
}
Also used : ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter)

Example 27 with ScalarWriter

use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.

the class HDF5BatchReader method intMatrixHelper.

private void intMatrixHelper(int[][] colData, int cols, int rows, RowSetLoader rowWriter) {
    // This is the case where a dataset is projected in a metadata query.
    // The result should be a list of lists
    TupleMetadata nestedSchema = new SchemaBuilder().addRepeatedList(INT_COLUMN_NAME).addArray(TypeProtos.MinorType.INT).resumeSchema().buildSchema();
    int index = rowWriter.tupleSchema().index(INT_COLUMN_NAME);
    if (index == -1) {
        index = rowWriter.addColumn(nestedSchema.column(INT_COLUMN_NAME));
    }
    // The outer array
    ArrayWriter listWriter = rowWriter.column(index).array();
    // The inner array
    ArrayWriter innerWriter = listWriter.array();
    // The strings within the inner array
    ScalarWriter intWriter = innerWriter.scalar();
    int maxElements = Math.min(rows, PREVIEW_ROW_LIMIT);
    for (int i = 0; i < maxElements; i++) {
        for (int k = 0; k < cols; k++) {
            intWriter.setInt(colData[i][k]);
        }
        listWriter.save();
    }
}
Also used : TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ArrayWriter(org.apache.drill.exec.vector.accessor.ArrayWriter) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter)

Example 28 with ScalarWriter

use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.

the class HDF5BatchReader method floatMatrixHelper.

private void floatMatrixHelper(float[][] colData, int cols, int rows, RowSetLoader rowWriter) {
    // This is the case where a dataset is projected in a metadata query.  The result should be a list of lists
    TupleMetadata nestedSchema = new SchemaBuilder().addRepeatedList(FLOAT_COLUMN_NAME).addArray(TypeProtos.MinorType.FLOAT4).resumeSchema().buildSchema();
    int index = rowWriter.tupleSchema().index(FLOAT_COLUMN_NAME);
    if (index == -1) {
        index = rowWriter.addColumn(nestedSchema.column(FLOAT_COLUMN_NAME));
    }
    // The outer array
    ArrayWriter listWriter = rowWriter.column(index).array();
    // The inner array
    ArrayWriter innerWriter = listWriter.array();
    // The strings within the inner array
    ScalarWriter floatWriter = innerWriter.scalar();
    int maxElements = Math.min(colData.length, PREVIEW_ROW_LIMIT);
    int maxCols = Math.min(colData[0].length, PREVIEW_COL_LIMIT);
    for (int i = 0; i < maxElements; i++) {
        for (int k = 0; k < maxCols; k++) {
            floatWriter.setDouble(colData[i][k]);
        }
        listWriter.save();
    }
}
Also used : TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ArrayWriter(org.apache.drill.exec.vector.accessor.ArrayWriter) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter)

Example 29 with ScalarWriter

use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.

the class HDF5BatchReader method writeIntColumn.

/**
 * Helper function to write a 1D int column
 *
 * @param rowWriter The row to which the data will be written
 * @param name The column name
 * @param value The value to be written
 */
private void writeIntColumn(TupleWriter rowWriter, String name, int value) {
    ScalarWriter colWriter = getColWriter(rowWriter, name, TypeProtos.MinorType.INT);
    colWriter.setInt(value);
}
Also used : ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter)

Example 30 with ScalarWriter

use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.

the class HDF5BatchReader method bigIntMatrixHelper.

private void bigIntMatrixHelper(long[][] colData, int cols, int rows, RowSetLoader rowWriter) {
    // This is the case where a dataset is projected in a metadata query.  The result should be a list of lists
    TupleMetadata nestedSchema = new SchemaBuilder().addRepeatedList(LONG_COLUMN_NAME).addArray(TypeProtos.MinorType.BIGINT).resumeSchema().buildSchema();
    int index = rowWriter.tupleSchema().index(LONG_COLUMN_NAME);
    if (index == -1) {
        index = rowWriter.addColumn(nestedSchema.column(LONG_COLUMN_NAME));
    }
    // The outer array
    ArrayWriter listWriter = rowWriter.column(index).array();
    // The inner array
    ArrayWriter innerWriter = listWriter.array();
    // The strings within the inner array
    ScalarWriter bigintWriter = innerWriter.scalar();
    int maxElements = Math.min(colData.length, PREVIEW_ROW_LIMIT);
    int maxCols = Math.min(colData[0].length, PREVIEW_COL_LIMIT);
    for (int i = 0; i < maxElements; i++) {
        for (int k = 0; k < maxCols; k++) {
            bigintWriter.setLong(colData[i][k]);
        }
        listWriter.save();
    }
}
Also used : TupleMetadata(org.apache.drill.exec.record.metadata.TupleMetadata) SchemaBuilder(org.apache.drill.exec.record.metadata.SchemaBuilder) ArrayWriter(org.apache.drill.exec.vector.accessor.ArrayWriter) ScalarWriter(org.apache.drill.exec.vector.accessor.ScalarWriter)

Aggregations

ScalarWriter (org.apache.drill.exec.vector.accessor.ScalarWriter)120 TupleMetadata (org.apache.drill.exec.record.metadata.TupleMetadata)69 SubOperatorTest (org.apache.drill.test.SubOperatorTest)68 Test (org.junit.Test)68 SchemaBuilder (org.apache.drill.exec.record.metadata.SchemaBuilder)51 SingleRowSet (org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet)44 ScalarReader (org.apache.drill.exec.vector.accessor.ScalarReader)31 ArrayWriter (org.apache.drill.exec.vector.accessor.ArrayWriter)26 RowSetLoader (org.apache.drill.exec.physical.resultSet.RowSetLoader)25 ResultSetLoader (org.apache.drill.exec.physical.resultSet.ResultSetLoader)24 TupleWriter (org.apache.drill.exec.vector.accessor.TupleWriter)23 ArrayReader (org.apache.drill.exec.vector.accessor.ArrayReader)22 RowSet (org.apache.drill.exec.physical.rowSet.RowSet)21 ExtendableRowSet (org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet)19 SchemaBuilder (org.apache.drill.test.rowSet.schema.SchemaBuilder)18 ColumnMetadata (org.apache.drill.exec.record.metadata.ColumnMetadata)17 TupleReader (org.apache.drill.exec.vector.accessor.TupleReader)17 SingleRowSet (org.apache.drill.test.rowSet.RowSet.SingleRowSet)14 RowSetReader (org.apache.drill.test.rowSet.RowSetReader)14 ResultSetLoader (org.apache.drill.exec.physical.rowSet.ResultSetLoader)13