use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.
the class HDF5BatchReader method writeBooleanColumn.
/**
* Helper function to write a 1D boolean column
*
* @param rowWriter The row to which the data will be written
* @param name The column name
* @param value The value to be written
*/
private void writeBooleanColumn(TupleWriter rowWriter, String name, boolean value) {
ScalarWriter colWriter = getColWriter(rowWriter, name, TypeProtos.MinorType.BIT);
colWriter.setBoolean(value);
}
use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.
the class HDF5BatchReader method intMatrixHelper.
private void intMatrixHelper(int[][] colData, int cols, int rows, RowSetLoader rowWriter) {
// This is the case where a dataset is projected in a metadata query.
// The result should be a list of lists
TupleMetadata nestedSchema = new SchemaBuilder().addRepeatedList(INT_COLUMN_NAME).addArray(TypeProtos.MinorType.INT).resumeSchema().buildSchema();
int index = rowWriter.tupleSchema().index(INT_COLUMN_NAME);
if (index == -1) {
index = rowWriter.addColumn(nestedSchema.column(INT_COLUMN_NAME));
}
// The outer array
ArrayWriter listWriter = rowWriter.column(index).array();
// The inner array
ArrayWriter innerWriter = listWriter.array();
// The strings within the inner array
ScalarWriter intWriter = innerWriter.scalar();
int maxElements = Math.min(rows, PREVIEW_ROW_LIMIT);
for (int i = 0; i < maxElements; i++) {
for (int k = 0; k < cols; k++) {
intWriter.setInt(colData[i][k]);
}
listWriter.save();
}
}
use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.
the class HDF5BatchReader method floatMatrixHelper.
private void floatMatrixHelper(float[][] colData, int cols, int rows, RowSetLoader rowWriter) {
// This is the case where a dataset is projected in a metadata query. The result should be a list of lists
TupleMetadata nestedSchema = new SchemaBuilder().addRepeatedList(FLOAT_COLUMN_NAME).addArray(TypeProtos.MinorType.FLOAT4).resumeSchema().buildSchema();
int index = rowWriter.tupleSchema().index(FLOAT_COLUMN_NAME);
if (index == -1) {
index = rowWriter.addColumn(nestedSchema.column(FLOAT_COLUMN_NAME));
}
// The outer array
ArrayWriter listWriter = rowWriter.column(index).array();
// The inner array
ArrayWriter innerWriter = listWriter.array();
// The strings within the inner array
ScalarWriter floatWriter = innerWriter.scalar();
int maxElements = Math.min(colData.length, PREVIEW_ROW_LIMIT);
int maxCols = Math.min(colData[0].length, PREVIEW_COL_LIMIT);
for (int i = 0; i < maxElements; i++) {
for (int k = 0; k < maxCols; k++) {
floatWriter.setDouble(colData[i][k]);
}
listWriter.save();
}
}
use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.
the class HDF5BatchReader method writeIntColumn.
/**
* Helper function to write a 1D int column
*
* @param rowWriter The row to which the data will be written
* @param name The column name
* @param value The value to be written
*/
private void writeIntColumn(TupleWriter rowWriter, String name, int value) {
ScalarWriter colWriter = getColWriter(rowWriter, name, TypeProtos.MinorType.INT);
colWriter.setInt(value);
}
use of org.apache.drill.exec.vector.accessor.ScalarWriter in project drill by apache.
the class HDF5BatchReader method bigIntMatrixHelper.
private void bigIntMatrixHelper(long[][] colData, int cols, int rows, RowSetLoader rowWriter) {
// This is the case where a dataset is projected in a metadata query. The result should be a list of lists
TupleMetadata nestedSchema = new SchemaBuilder().addRepeatedList(LONG_COLUMN_NAME).addArray(TypeProtos.MinorType.BIGINT).resumeSchema().buildSchema();
int index = rowWriter.tupleSchema().index(LONG_COLUMN_NAME);
if (index == -1) {
index = rowWriter.addColumn(nestedSchema.column(LONG_COLUMN_NAME));
}
// The outer array
ArrayWriter listWriter = rowWriter.column(index).array();
// The inner array
ArrayWriter innerWriter = listWriter.array();
// The strings within the inner array
ScalarWriter bigintWriter = innerWriter.scalar();
int maxElements = Math.min(colData.length, PREVIEW_ROW_LIMIT);
int maxCols = Math.min(colData[0].length, PREVIEW_COL_LIMIT);
for (int i = 0; i < maxElements; i++) {
for (int k = 0; k < maxCols; k++) {
bigintWriter.setLong(colData[i][k]);
}
listWriter.save();
}
}
Aggregations