use of org.apache.drill.exec.vector.accessor.writer.AbstractObjectWriter in project drill by apache.
the class TestDummyWriter method testDummyDict.
@Test
public void testDummyDict() {
final String dictName = "d";
final String dictArrayName = "da";
TupleMetadata schema = new SchemaBuilder().addDict(dictName, MinorType.INT).repeatedValue(MinorType.VARCHAR).resumeSchema().addDictArray(dictArrayName, MinorType.VARCHAR).value(MinorType.INT).resumeSchema().buildSchema();
List<AbstractObjectWriter> writers = new ArrayList<>();
final String keyFieldName = DictVector.FIELD_KEY_NAME;
final String valueFieldName = DictVector.FIELD_VALUE_NAME;
// Create key and value writers for dict
ColumnMetadata dictMetadata = schema.metadata(dictName);
TupleMetadata dictSchema = dictMetadata.tupleSchema();
List<AbstractObjectWriter> dictFields = new ArrayList<>();
dictFields.add(ColumnWriterFactory.buildColumnWriter(dictSchema.metadata(keyFieldName), null));
dictFields.add(ColumnWriterFactory.buildColumnWriter(dictSchema.metadata(valueFieldName), null));
writers.add(ObjectDictWriter.buildDict(dictMetadata, null, dictFields));
// Create key and value writers for dict array
ColumnMetadata dictArrayMetadata = schema.metadata(dictArrayName);
TupleMetadata dictArraySchema = dictArrayMetadata.tupleSchema();
List<AbstractObjectWriter> dictArrayFields = new ArrayList<>();
dictArrayFields.add(ColumnWriterFactory.buildColumnWriter(dictArraySchema.metadata(keyFieldName), null));
dictArrayFields.add(ColumnWriterFactory.buildColumnWriter(dictArraySchema.metadata(valueFieldName), null));
writers.add(ObjectDictWriter.buildDictArray(dictArrayMetadata, null, dictArrayFields));
AbstractTupleWriter rootWriter = new RootWriterFixture(schema, writers);
// Events are ignored.
rootWriter.startWrite();
rootWriter.startRow();
// Nothing is projected
DictWriter dictWriter = rootWriter.dict(dictName);
assertFalse(dictWriter.isProjected());
assertFalse(dictWriter.keyWriter().isProjected());
assertFalse(dictWriter.valueWriter().array().scalar().isProjected());
DictWriter dictWriter1 = rootWriter.array(dictArrayName).dict();
assertFalse(dictWriter1.isProjected());
assertFalse(dictWriter1.keyWriter().isProjected());
assertFalse(dictWriter1.valueWriter().scalar().isProjected());
// Dummy columns seem real.
rootWriter.dict(dictName).keyWriter().setInt(20);
rootWriter.dict(0).valueWriter().array().scalar().setString("foo");
// Dummy array dict seems real.
rootWriter.array(dictArrayName).dict().keyWriter().setString("foo");
rootWriter.array(dictArrayName).dict().valueWriter().scalar().setInt(30);
rootWriter.array(dictArrayName).save();
rootWriter.array(1).dict().keyWriter().setString("bar");
rootWriter.array(1).dict().valueWriter().scalar().setInt(40);
rootWriter.array(1).save();
// More ignored events.
rootWriter.restartRow();
rootWriter.saveRow();
rootWriter.endWrite();
}
use of org.apache.drill.exec.vector.accessor.writer.AbstractObjectWriter in project drill by apache.
the class TestDummyWriter method testDummyScalar.
/**
* Test dummy column writers for scalars and arrays of
* scalars.
*/
@Test
public void testDummyScalar() {
TupleMetadata schema = new SchemaBuilder().add("a", MinorType.INT).addArray("b", MinorType.VARCHAR).buildSchema();
List<AbstractObjectWriter> writers = new ArrayList<>();
// We provide no vector. Factory should build us "dummy" writers.
writers.add(ColumnWriterFactory.buildColumnWriter(schema.metadata("a"), null));
writers.add(ColumnWriterFactory.buildColumnWriter(schema.metadata("b"), null));
AbstractTupleWriter rootWriter = new RootWriterFixture(schema, writers);
// Events are ignored.
rootWriter.startWrite();
rootWriter.startRow();
// At present, dummy writers report no type (because they don't have one.)
assertEquals(ValueType.NULL, rootWriter.scalar(0).valueType());
// First column. Set int value.
rootWriter.scalar(0).setInt(10);
// Dummy writer does not do type checking. Write "wrong" type.
// Should be allowed.
rootWriter.scalar("a").setString("foo");
// Column is required, but writer does no checking. Can set
// a null value.
rootWriter.column(0).scalar().setNull();
// Second column: is an array.
rootWriter.array(1).scalar().setString("bar");
rootWriter.array(1).scalar().setString("mumble");
// Again, type is not checked.
rootWriter.array("b").scalar().setInt(200);
// More ignored events.
rootWriter.restartRow();
rootWriter.saveRow();
rootWriter.endWrite();
}
use of org.apache.drill.exec.vector.accessor.writer.AbstractObjectWriter in project drill by apache.
the class TestDummyWriter method testDummyMap.
/**
* Test a dummy map or map array. A (non-enforced) rule is that such maps
* contain only dummy writers. The writers act like "real" writers.
*/
@Test
public void testDummyMap() {
TupleMetadata schema = new SchemaBuilder().addMap("m1").add("a", MinorType.INT).addArray("b", MinorType.VARCHAR).resumeSchema().addMapArray("m2").add("c", MinorType.INT).resumeSchema().buildSchema();
List<AbstractObjectWriter> writers = new ArrayList<>();
// Create the writers
{
TupleMetadata mapSchema = schema.metadata("m1").tupleSchema();
List<AbstractObjectWriter> members = new ArrayList<>();
members.add(ColumnWriterFactory.buildColumnWriter(mapSchema.metadata("a"), null));
members.add(ColumnWriterFactory.buildColumnWriter(mapSchema.metadata("b"), null));
writers.add(MapWriter.buildMapWriter(schema.metadata("m1"), null, members));
}
{
TupleMetadata mapSchema = schema.metadata("m2").tupleSchema();
List<AbstractObjectWriter> members = new ArrayList<>();
members.add(ColumnWriterFactory.buildColumnWriter(mapSchema.metadata("c"), null));
writers.add(MapWriter.buildMapWriter(schema.metadata("m2"), null, members));
}
AbstractTupleWriter rootWriter = new RootWriterFixture(schema, writers);
// Events are ignored.
rootWriter.startWrite();
rootWriter.startRow();
// Nothing is projected
assertFalse(rootWriter.tuple("m1").isProjected());
assertFalse(rootWriter.tuple("m1").scalar("a").isProjected());
assertFalse(rootWriter.tuple("m1").array("b").isProjected());
assertFalse(rootWriter.array("m2").isProjected());
assertFalse(rootWriter.array("m2").tuple().isProjected());
assertFalse(rootWriter.array("m2").tuple().scalar("c").isProjected());
// Dummy columns seem real.
rootWriter.tuple("m1").scalar("a").setInt(20);
rootWriter.tuple(0).array("b").scalar().setString("foo");
// Dummy array map seems real.
rootWriter.array("m2").tuple().scalar("c").setInt(30);
rootWriter.array("m2").save();
rootWriter.array(1).tuple().scalar(0).setInt(40);
rootWriter.array(1).save();
// More ignored events.
rootWriter.restartRow();
rootWriter.saveRow();
rootWriter.endWrite();
}
use of org.apache.drill.exec.vector.accessor.writer.AbstractObjectWriter in project drill by axbaretto.
the class TupleState method buildPrimitive.
/**
* Build a primitive column. Check if the column is projected. If not,
* allocate a dummy writer for the column. If projected, then allocate
* a vector, a writer, and the column state which binds the two together
* and manages the column.
*
* @param columnSchema schema of the new primitive column
* @return column state for the new column
*/
@SuppressWarnings("resource")
private ColumnState buildPrimitive(ColumnMetadata columnSchema) {
ValueVector vector;
if (columnSchema.isProjected()) {
// Create the vector for the column.
vector = resultSetLoader.vectorCache().addOrGet(columnSchema.schema());
} else {
// Column is not projected. No materialized backing for the column.
vector = null;
}
// Create the writer. Will be returned to the tuple writer.
AbstractObjectWriter colWriter = ColumnWriterFactory.buildColumnWriter(columnSchema, vector);
if (columnSchema.isArray()) {
return PrimitiveColumnState.newPrimitiveArray(resultSetLoader, vector, colWriter);
} else {
return PrimitiveColumnState.newPrimitive(resultSetLoader, vector, colWriter);
}
}
use of org.apache.drill.exec.vector.accessor.writer.AbstractObjectWriter in project drill by axbaretto.
the class TupleState method buildSchema.
/**
* When creating a schema up front, provide the schema of the desired tuple,
* then build vectors and writers to match. Allows up-front schema definition
* in addition to on-the-fly schema creation handled elsewhere.
*
* @param schema desired tuple schema to be materialized
*/
public void buildSchema(TupleMetadata schema) {
for (int i = 0; i < schema.size(); i++) {
ColumnMetadata colSchema = schema.metadata(i);
AbstractObjectWriter colWriter;
if (colSchema.isMap()) {
colWriter = addColumn(colSchema.cloneEmpty());
BaseMapColumnState mapColState = (BaseMapColumnState) columns.get(columns.size() - 1);
mapColState.mapState().buildSchema(colSchema.mapSchema());
} else {
colWriter = addColumn(colSchema);
}
writer().addColumnWriter(colWriter);
}
}
Aggregations