Search in sources :

Example 6 with WritableBatch

use of org.apache.drill.exec.record.WritableBatch in project drill by apache.

the class BroadcastSenderRootExec method innerNext.

@Override
public boolean innerNext() {
    RecordBatch.IterOutcome out = next(incoming);
    logger.debug("Outcome of sender next {}", out);
    switch(out) {
        case OUT_OF_MEMORY:
            throw new OutOfMemoryException();
        case STOP:
        case NONE:
            for (int i = 0; i < tunnels.length; ++i) {
                FragmentWritableBatch b2 = FragmentWritableBatch.getEmptyLast(handle.getQueryId(), handle.getMajorFragmentId(), handle.getMinorFragmentId(), config.getOppositeMajorFragmentId(), receivingMinorFragments[i]);
                stats.startWait();
                try {
                    tunnels[i].sendRecordBatch(b2);
                } finally {
                    stats.stopWait();
                }
            }
            return false;
        case OK_NEW_SCHEMA:
        case OK:
            WritableBatch writableBatch = incoming.getWritableBatch().transfer(oContext.getAllocator());
            if (tunnels.length > 1) {
                writableBatch.retainBuffers(tunnels.length - 1);
            }
            for (int i = 0; i < tunnels.length; ++i) {
                FragmentWritableBatch batch = new FragmentWritableBatch(false, handle.getQueryId(), handle.getMajorFragmentId(), handle.getMinorFragmentId(), config.getOppositeMajorFragmentId(), receivingMinorFragments[i], writableBatch);
                updateStats(batch);
                stats.startWait();
                try {
                    tunnels[i].sendRecordBatch(batch);
                } finally {
                    stats.stopWait();
                }
            }
            return ok;
        case NOT_YET:
        default:
            throw new IllegalStateException();
    }
}
Also used : FragmentWritableBatch(org.apache.drill.exec.record.FragmentWritableBatch) RecordBatch(org.apache.drill.exec.record.RecordBatch) WritableBatch(org.apache.drill.exec.record.WritableBatch) FragmentWritableBatch(org.apache.drill.exec.record.FragmentWritableBatch) OutOfMemoryException(org.apache.drill.exec.exception.OutOfMemoryException) DrillbitEndpoint(org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint) MinorFragmentEndpoint(org.apache.drill.exec.physical.MinorFragmentEndpoint)

Example 7 with WritableBatch

use of org.apache.drill.exec.record.WritableBatch in project drill by apache.

the class TestWriteToDisk method test.

@Test
@SuppressWarnings("static-method")
public void test() throws Exception {
    final List<ValueVector> vectorList = Lists.newArrayList();
    final DrillConfig config = DrillConfig.create();
    try (final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
        final Drillbit bit = new Drillbit(config, serviceSet)) {
        bit.run();
        final DrillbitContext context = bit.getContext();
        final MaterializedField intField = MaterializedField.create("int", Types.required(TypeProtos.MinorType.INT));
        final MaterializedField binField = MaterializedField.create("binary", Types.required(TypeProtos.MinorType.VARBINARY));
        try (final IntVector intVector = (IntVector) TypeHelper.getNewVector(intField, context.getAllocator());
            final VarBinaryVector binVector = (VarBinaryVector) TypeHelper.getNewVector(binField, context.getAllocator())) {
            AllocationHelper.allocate(intVector, 4, 4);
            AllocationHelper.allocate(binVector, 4, 5);
            vectorList.add(intVector);
            vectorList.add(binVector);
            intVector.getMutator().setSafe(0, 0);
            binVector.getMutator().setSafe(0, "ZERO".getBytes());
            intVector.getMutator().setSafe(1, 1);
            binVector.getMutator().setSafe(1, "ONE".getBytes());
            intVector.getMutator().setSafe(2, 2);
            binVector.getMutator().setSafe(2, "TWO".getBytes());
            intVector.getMutator().setSafe(3, 3);
            binVector.getMutator().setSafe(3, "THREE".getBytes());
            intVector.getMutator().setValueCount(4);
            binVector.getMutator().setValueCount(4);
            VectorContainer container = new VectorContainer();
            container.addCollection(vectorList);
            container.setRecordCount(4);
            WritableBatch batch = WritableBatch.getBatchNoHVWrap(container.getRecordCount(), container, false);
            VectorAccessibleSerializable wrap = new VectorAccessibleSerializable(batch, context.getAllocator());
            Configuration conf = new Configuration();
            conf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
            final VectorAccessibleSerializable newWrap = new VectorAccessibleSerializable(context.getAllocator());
            try (final FileSystem fs = FileSystem.get(conf)) {
                final File tempDir = Files.createTempDir();
                tempDir.deleteOnExit();
                final Path path = new Path(tempDir.getAbsolutePath(), "drillSerializable");
                try (final FSDataOutputStream out = fs.create(path)) {
                    wrap.writeToStream(out);
                    out.close();
                }
                try (final FSDataInputStream in = fs.open(path)) {
                    newWrap.readFromStream(in);
                }
            }
            final VectorAccessible newContainer = newWrap.get();
            for (VectorWrapper<?> w : newContainer) {
                try (ValueVector vv = w.getValueVector()) {
                    int values = vv.getAccessor().getValueCount();
                    for (int i = 0; i < values; i++) {
                        final Object o = vv.getAccessor().getObject(i);
                        if (o instanceof byte[]) {
                            System.out.println(new String((byte[]) o));
                        } else {
                            System.out.println(o);
                        }
                    }
                }
            }
        }
    }
}
Also used : DrillbitContext(org.apache.drill.exec.server.DrillbitContext) Path(org.apache.hadoop.fs.Path) SchemaPath(org.apache.drill.common.expression.SchemaPath) IntVector(org.apache.drill.exec.vector.IntVector) Configuration(org.apache.hadoop.conf.Configuration) VectorAccessible(org.apache.drill.exec.record.VectorAccessible) MaterializedField(org.apache.drill.exec.record.MaterializedField) VarBinaryVector(org.apache.drill.exec.vector.VarBinaryVector) VectorContainer(org.apache.drill.exec.record.VectorContainer) ValueVector(org.apache.drill.exec.vector.ValueVector) DrillConfig(org.apache.drill.common.config.DrillConfig) Drillbit(org.apache.drill.exec.server.Drillbit) RemoteServiceSet(org.apache.drill.exec.server.RemoteServiceSet) FileSystem(org.apache.hadoop.fs.FileSystem) WritableBatch(org.apache.drill.exec.record.WritableBatch) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) File(java.io.File) ExecTest(org.apache.drill.exec.ExecTest) Test(org.junit.Test)

Example 8 with WritableBatch

use of org.apache.drill.exec.record.WritableBatch in project drill by apache.

the class TestLoad method testLoadValueVector.

@Test
public void testLoadValueVector() throws Exception {
    final BufferAllocator allocator = RootAllocatorFactory.newRoot(drillConfig);
    final ValueVector fixedV = new IntVector(MaterializedField.create("ints", Types.required(MinorType.INT)), allocator);
    final ValueVector varlenV = new VarCharVector(MaterializedField.create("chars", Types.required(MinorType.VARCHAR)), allocator);
    final ValueVector nullableVarlenV = new NullableVarCharVector(MaterializedField.create("chars", Types.optional(MinorType.VARCHAR)), allocator);
    final List<ValueVector> vectors = Lists.newArrayList(fixedV, varlenV, nullableVarlenV);
    for (final ValueVector v : vectors) {
        AllocationHelper.allocate(v, 100, 50);
        v.getMutator().generateTestData(100);
    }
    final WritableBatch writableBatch = WritableBatch.getBatchNoHV(100, vectors, false);
    final RecordBatchLoader batchLoader = new RecordBatchLoader(allocator);
    final ByteBuf[] byteBufs = writableBatch.getBuffers();
    int bytes = 0;
    for (int i = 0; i < byteBufs.length; i++) {
        bytes += byteBufs[i].writerIndex();
    }
    final DrillBuf byteBuf = allocator.buffer(bytes);
    int index = 0;
    for (int i = 0; i < byteBufs.length; i++) {
        byteBufs[i].readBytes(byteBuf, index, byteBufs[i].writerIndex());
        index += byteBufs[i].writerIndex();
    }
    byteBuf.writerIndex(bytes);
    batchLoader.load(writableBatch.getDef(), byteBuf);
    boolean firstColumn = true;
    int recordCount = 0;
    for (final VectorWrapper<?> v : batchLoader) {
        if (firstColumn) {
            firstColumn = false;
        } else {
            System.out.print("\t");
        }
        System.out.print(v.getField().getPath());
        System.out.print("[");
        System.out.print(v.getField().getType().getMinorType());
        System.out.print("]");
    }
    System.out.println();
    for (int r = 0; r < batchLoader.getRecordCount(); r++) {
        boolean first = true;
        recordCount++;
        for (final VectorWrapper<?> v : batchLoader) {
            if (first) {
                first = false;
            } else {
                System.out.print("\t");
            }
            final ValueVector.Accessor accessor = v.getValueVector().getAccessor();
            if (v.getField().getType().getMinorType() == TypeProtos.MinorType.VARCHAR) {
                final Object obj = accessor.getObject(r);
                if (obj != null) {
                    System.out.print(accessor.getObject(r));
                } else {
                    System.out.print("NULL");
                }
            } else {
                System.out.print(accessor.getObject(r));
            }
        }
        if (!first) {
            System.out.println();
        }
    }
    assertEquals(100, recordCount);
    batchLoader.clear();
    writableBatch.clear();
}
Also used : IntVector(org.apache.drill.exec.vector.IntVector) RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) NullableVarCharVector(org.apache.drill.exec.vector.NullableVarCharVector) VarCharVector(org.apache.drill.exec.vector.VarCharVector) ByteBuf(io.netty.buffer.ByteBuf) BufferAllocator(org.apache.drill.exec.memory.BufferAllocator) ValueVector(org.apache.drill.exec.vector.ValueVector) NullableVarCharVector(org.apache.drill.exec.vector.NullableVarCharVector) WritableBatch(org.apache.drill.exec.record.WritableBatch) DrillBuf(io.netty.buffer.DrillBuf) Test(org.junit.Test) ExecTest(org.apache.drill.exec.ExecTest)

Aggregations

WritableBatch (org.apache.drill.exec.record.WritableBatch)8 ValueVector (org.apache.drill.exec.vector.ValueVector)4 VectorContainer (org.apache.drill.exec.record.VectorContainer)3 SchemaPath (org.apache.drill.common.expression.SchemaPath)2 ExecTest (org.apache.drill.exec.ExecTest)2 CachedVectorContainer (org.apache.drill.exec.cache.CachedVectorContainer)2 VectorAccessibleSerializable (org.apache.drill.exec.cache.VectorAccessibleSerializable)2 SortRecordBatchBuilder (org.apache.drill.exec.physical.impl.sort.SortRecordBatchBuilder)2 Sorter (org.apache.drill.exec.physical.impl.sort.Sorter)2 SelectionVector4 (org.apache.drill.exec.record.selection.SelectionVector4)2 IntVector (org.apache.drill.exec.vector.IntVector)2 Test (org.junit.Test)2 Stopwatch (com.google.common.base.Stopwatch)1 ByteBuf (io.netty.buffer.ByteBuf)1 DrillBuf (io.netty.buffer.DrillBuf)1 File (java.io.File)1 IOException (java.io.IOException)1 DrillConfig (org.apache.drill.common.config.DrillConfig)1 FieldReference (org.apache.drill.common.expression.FieldReference)1 Ordering (org.apache.drill.common.logical.data.Order.Ordering)1