Search in sources :

Example 6 with DrillBuf

use of io.netty.buffer.DrillBuf in project drill by apache.

the class AsyncPageReader method decompress.

private DrillBuf decompress(PageHeader pageHeader, DrillBuf compressedData) {
    DrillBuf pageDataBuf = null;
    Stopwatch timer = Stopwatch.createUnstarted();
    long timeToRead;
    int compressedSize = pageHeader.getCompressed_page_size();
    int uncompressedSize = pageHeader.getUncompressed_page_size();
    pageDataBuf = allocateTemporaryBuffer(uncompressedSize);
    try {
        timer.start();
        CompressionCodecName codecName = parentColumnReader.columnChunkMetaData.getCodec();
        ByteBuffer input = compressedData.nioBuffer(0, compressedSize);
        ByteBuffer output = pageDataBuf.nioBuffer(0, uncompressedSize);
        DecompressionHelper decompressionHelper = new DecompressionHelper(codecName);
        decompressionHelper.decompress(input, compressedSize, output, uncompressedSize);
        pageDataBuf.writerIndex(uncompressedSize);
        timeToRead = timer.elapsed(TimeUnit.NANOSECONDS);
        this.updateStats(pageHeader, "Decompress", 0, timeToRead, compressedSize, uncompressedSize);
    } catch (IOException e) {
        handleAndThrowException(e, "Error decompressing data.");
    }
    return pageDataBuf;
}
Also used : CompressionCodecName(org.apache.parquet.hadoop.metadata.CompressionCodecName) Stopwatch(com.google.common.base.Stopwatch) IOException(java.io.IOException) ByteBuffer(java.nio.ByteBuffer) DrillBuf(io.netty.buffer.DrillBuf)

Example 7 with DrillBuf

use of io.netty.buffer.DrillBuf in project drill by apache.

the class MSortTemplate method setup.

@Override
public void setup(final FragmentContext context, final BufferAllocator allocator, final SelectionVector4 vector4, final VectorContainer hyperBatch, int outputBatchSize) throws SchemaChangeException {
    // we pass in the local hyperBatch since that is where we'll be reading data.
    Preconditions.checkNotNull(vector4);
    this.vector4 = vector4.createNewWrapperCurrent();
    this.context = context;
    vector4.clear();
    doSetup(context, hyperBatch, null);
    // Populate the queue with the offset in the SV4 of each
    // batch. Note that this is expensive as it requires a scan
    // of all items to be sorted: potentially millions.
    runStarts.add(0);
    int batch = 0;
    final int totalCount = this.vector4.getTotalCount();
    for (int i = 0; i < totalCount; i++) {
        final int newBatch = this.vector4.get(i) >>> 16;
        if (newBatch == batch) {
            continue;
        } else if (newBatch == batch + 1) {
            runStarts.add(i);
            batch = newBatch;
        } else {
            throw new UnsupportedOperationException(String.format("Missing batch. batch: %d newBatch: %d", batch, newBatch));
        }
    }
    // Create a temporary SV4 to hold the merged results.
    @SuppressWarnings("resource") final DrillBuf drillBuf = allocator.buffer(4 * totalCount);
    desiredRecordBatchCount = Math.min(outputBatchSize, Character.MAX_VALUE);
    desiredRecordBatchCount = Math.min(desiredRecordBatchCount, totalCount);
    aux = new SelectionVector4(drillBuf, totalCount, desiredRecordBatchCount);
}
Also used : DrillBuf(io.netty.buffer.DrillBuf) SelectionVector4(org.apache.drill.exec.record.selection.SelectionVector4)

Example 8 with DrillBuf

use of io.netty.buffer.DrillBuf in project drill by apache.

the class PageReader method readDictionaryPage.

private void readDictionaryPage(final PageHeader pageHeader, final ColumnReader<?> parentStatus) throws IOException {
    int compressedSize = pageHeader.getCompressed_page_size();
    int uncompressedSize = pageHeader.getUncompressed_page_size();
    final DrillBuf dictionaryData = readPage(pageHeader, compressedSize, uncompressedSize);
    allocatedDictionaryBuffers.add(dictionaryData);
    DictionaryPage page = new DictionaryPage(asBytesInput(dictionaryData, 0, uncompressedSize), pageHeader.uncompressed_page_size, pageHeader.dictionary_page_header.num_values, valueOf(pageHeader.dictionary_page_header.encoding.name()));
    this.dictionary = page.getEncoding().initDictionary(parentStatus.columnDescriptor, page);
}
Also used : DictionaryPage(org.apache.parquet.column.page.DictionaryPage) DrillBuf(io.netty.buffer.DrillBuf)

Example 9 with DrillBuf

use of io.netty.buffer.DrillBuf in project drill by axbaretto.

the class PrintingResultsListener method dataArrived.

@Override
@SuppressWarnings("resource")
public void dataArrived(QueryDataBatch result, ConnectionThrottle throttle) {
    final QueryData header = result.getHeader();
    final DrillBuf data = result.getData();
    try {
        if (data != null) {
            count.addAndGet(header.getRowCount());
            try {
                loader.load(header.getDef(), data);
            // TODO:  Clean:  DRILL-2933:  That load(...) no longer throws
            // SchemaChangeException, so check/clean catch clause below.
            } catch (SchemaChangeException e) {
                submissionFailed(UserException.systemError(e).build(logger));
            }
            try {
                switch(format) {
                    case TABLE:
                        VectorUtil.showVectorAccessibleContent(loader, columnWidth);
                        break;
                    case TSV:
                        VectorUtil.showVectorAccessibleContent(loader, "\t");
                        break;
                    case CSV:
                        VectorUtil.showVectorAccessibleContent(loader, ",");
                        break;
                    default:
                        throw new IllegalStateException(format.toString());
                }
            } finally {
                loader.clear();
            }
        }
    } finally {
        result.release();
    }
}
Also used : SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) QueryData(org.apache.drill.exec.proto.UserBitShared.QueryData) DrillBuf(io.netty.buffer.DrillBuf)

Example 10 with DrillBuf

use of io.netty.buffer.DrillBuf in project drill by axbaretto.

the class VectorAccessibleSerializable method writeToStream.

/**
 * Serializes the VectorAccessible va and writes it to an output stream
 * @param output the OutputStream to write to
 * @throws IOException
 */
@SuppressWarnings("resource")
@Override
public void writeToStream(OutputStream output) throws IOException {
    Preconditions.checkNotNull(output);
    final Timer.Context timerContext = metrics.timer(WRITER_TIMER).time();
    final DrillBuf[] incomingBuffers = batch.getBuffers();
    final UserBitShared.RecordBatchDef batchDef = batch.getDef();
    try {
        /* Write the metadata to the file */
        batchDef.writeDelimitedTo(output);
        /* If we have a selection vector, dump it to file first */
        if (svMode == BatchSchema.SelectionVectorMode.TWO_BYTE) {
            recordCount = sv2.getCount();
            final int dataLength = recordCount * SelectionVector2.RECORD_SIZE;
            allocator.write(sv2.getBuffer(false), dataLength, output);
        }
        /* Dump the array of ByteBuf's associated with the value vectors */
        for (DrillBuf buf : incomingBuffers) {
            /* dump the buffer into the OutputStream */
            allocator.write(buf, output);
        }
        timeNs += timerContext.stop();
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        clear();
    }
}
Also used : Timer(com.codahale.metrics.Timer) RecordBatchDef(org.apache.drill.exec.proto.UserBitShared.RecordBatchDef) IOException(java.io.IOException) UserBitShared(org.apache.drill.exec.proto.UserBitShared) DrillBuf(io.netty.buffer.DrillBuf)

Aggregations

DrillBuf (io.netty.buffer.DrillBuf)187 Test (org.junit.Test)63 MemoryTest (org.apache.drill.categories.MemoryTest)38 SelectionVector4 (org.apache.drill.exec.record.selection.SelectionVector4)22 ValueVector (org.apache.drill.exec.vector.ValueVector)18 BaseTest (org.apache.drill.test.BaseTest)18 MaterializedField (org.apache.drill.exec.record.MaterializedField)16 IOException (java.io.IOException)13 VectorTest (org.apache.drill.categories.VectorTest)13 OutOfMemoryException (org.apache.drill.exec.exception.OutOfMemoryException)13 ExecTest (org.apache.drill.exec.ExecTest)11 BufferAllocator (org.apache.drill.exec.memory.BufferAllocator)11 VectorContainer (org.apache.drill.exec.record.VectorContainer)10 Stopwatch (com.google.common.base.Stopwatch)9 ByteBuffer (java.nio.ByteBuffer)9 BatchSchema (org.apache.drill.exec.record.BatchSchema)9 UnlikelyTest (org.apache.drill.categories.UnlikelyTest)8 UserBitShared (org.apache.drill.exec.proto.UserBitShared)8 SerializedField (org.apache.drill.exec.proto.UserBitShared.SerializedField)8 DrillTest (org.apache.drill.test.DrillTest)8