Search in sources :

Example 86 with DataSize

use of io.airlift.units.DataSize in project presto by prestodb.

the class StatusPrinter method printStageTree.

private void printStageTree(StageStats stage, String indent, AtomicInteger stageNumberCounter) {
    Duration elapsedTime = nanosSince(start);
    // STAGE  S    ROWS  ROWS/s  BYTES  BYTES/s  QUEUED    RUN   DONE
    // 0......Q     26M   9077M  9993G    9077M   9077M  9077M  9077M
    //   2....R     17K    627M   673M     627M    627M   627M   627M
    //     3..C     999    627M   673M     627M    627M   627M   627M
    //   4....R     26M    627M   673T     627M    627M   627M   627M
    //     5..F     29T    627M   673M     627M    627M   627M   627M
    String id = String.valueOf(stageNumberCounter.getAndIncrement());
    String name = indent + id;
    name += Strings.repeat(".", max(0, 10 - name.length()));
    String bytesPerSecond;
    String rowsPerSecond;
    if (stage.isDone()) {
        bytesPerSecond = formatDataRate(new DataSize(0, BYTE), new Duration(0, SECONDS), false);
        rowsPerSecond = formatCountRate(0, new Duration(0, SECONDS), false);
    } else {
        bytesPerSecond = formatDataRate(bytes(stage.getProcessedBytes()), elapsedTime, false);
        rowsPerSecond = formatCountRate(stage.getProcessedRows(), elapsedTime, false);
    }
    String stageSummary = String.format("%10s%1s  %5s  %6s  %5s  %7s  %6s  %5s  %5s", name, stageStateCharacter(stage.getState()), formatCount(stage.getProcessedRows()), rowsPerSecond, formatDataSize(bytes(stage.getProcessedBytes()), false), bytesPerSecond, stage.getQueuedSplits(), stage.getRunningSplits(), stage.getCompletedSplits());
    reprintLine(stageSummary);
    for (StageStats subStage : stage.getSubStages()) {
        printStageTree(subStage, indent + "  ", stageNumberCounter);
    }
}
Also used : StageStats(com.facebook.presto.client.StageStats) FormatUtils.formatDataSize(com.facebook.presto.cli.FormatUtils.formatDataSize) DataSize(io.airlift.units.DataSize) Duration(io.airlift.units.Duration)

Example 87 with DataSize

use of io.airlift.units.DataSize in project presto by prestodb.

the class TestMemoryPagesStore method setUp.

@BeforeMethod
public void setUp() {
    pagesStore = new MemoryPagesStore(new MemoryConfig().setMaxDataPerNode(new DataSize(1, DataSize.Unit.MEGABYTE)));
    pageSinkProvider = new MemoryPageSinkProvider(pagesStore);
}
Also used : DataSize(io.airlift.units.DataSize) BeforeMethod(org.testng.annotations.BeforeMethod)

Example 88 with DataSize

use of io.airlift.units.DataSize in project presto by prestodb.

the class HiveFileFormatBenchmark method createTpchDataSet.

private static <E extends TpchEntity> TestData createTpchDataSet(FileFormat format, TpchTable<E> tpchTable, List<TpchColumn<E>> columns) {
    List<String> columnNames = columns.stream().map(TpchColumn::getColumnName).collect(toList());
    List<Type> columnTypes = columns.stream().map(HiveFileFormatBenchmark::getColumnType).map(type -> format.supportsDate() || !DATE.equals(type) ? type : createUnboundedVarcharType()).collect(toList());
    PageBuilder pageBuilder = new PageBuilder(columnTypes);
    ImmutableList.Builder<Page> pages = ImmutableList.builder();
    long dataSize = 0;
    for (E row : tpchTable.createGenerator(10, 1, 1)) {
        pageBuilder.declarePosition();
        for (int i = 0; i < columns.size(); i++) {
            TpchColumn<E> column = columns.get(i);
            BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
            switch(column.getType().getBase()) {
                case IDENTIFIER:
                    BIGINT.writeLong(blockBuilder, column.getIdentifier(row));
                    break;
                case INTEGER:
                    INTEGER.writeLong(blockBuilder, column.getInteger(row));
                    break;
                case DATE:
                    if (format.supportsDate()) {
                        DATE.writeLong(blockBuilder, column.getDate(row));
                    } else {
                        createUnboundedVarcharType().writeString(blockBuilder, column.getString(row));
                    }
                    break;
                case DOUBLE:
                    DOUBLE.writeDouble(blockBuilder, column.getDouble(row));
                    break;
                case VARCHAR:
                    createUnboundedVarcharType().writeSlice(blockBuilder, Slices.utf8Slice(column.getString(row)));
                    break;
                default:
                    throw new IllegalArgumentException("Unsupported type " + column.getType());
            }
        }
        if (pageBuilder.isFull()) {
            Page page = pageBuilder.build();
            pages.add(page);
            pageBuilder.reset();
            dataSize += page.getSizeInBytes();
            if (dataSize >= MIN_DATA_SIZE) {
                break;
            }
        }
    }
    return new TestData(columnNames, columnTypes, pages.build());
}
Also used : Page(com.facebook.presto.spi.Page) HdfsEnvironment(com.facebook.presto.hive.HdfsEnvironment) RunResult(org.openjdk.jmh.results.RunResult) LINE_ITEM(io.airlift.tpch.TpchTable.LINE_ITEM) Random(java.util.Random) Warmup(org.openjdk.jmh.annotations.Warmup) BIGINT(com.facebook.presto.spi.type.BigintType.BIGINT) OutputTimeUnit(org.openjdk.jmh.annotations.OutputTimeUnit) Slices(io.airlift.slice.Slices) HiveCompressionCodec(com.facebook.presto.hive.HiveCompressionCodec) TearDown(org.openjdk.jmh.annotations.TearDown) FileUtils.deleteRecursively(io.airlift.testing.FileUtils.deleteRecursively) HiveClientConfig(com.facebook.presto.hive.HiveClientConfig) Setup(org.openjdk.jmh.annotations.Setup) Param(org.openjdk.jmh.annotations.Param) Collection(java.util.Collection) BlockBuilder(com.facebook.presto.spi.block.BlockBuilder) UUID(java.util.UUID) String.format(java.lang.String.format) ConnectorSession(com.facebook.presto.spi.ConnectorSession) TpchTable(io.airlift.tpch.TpchTable) TpchEntity(io.airlift.tpch.TpchEntity) OptionsBuilder(org.openjdk.jmh.runner.options.OptionsBuilder) DataSize(io.airlift.units.DataSize) List(java.util.List) INTEGER(com.facebook.presto.spi.type.IntegerType.INTEGER) HiveSessionProperties(com.facebook.presto.hive.HiveSessionProperties) Options(org.openjdk.jmh.runner.options.Options) TpchColumn(io.airlift.tpch.TpchColumn) DOUBLE(com.facebook.presto.spi.type.DoubleType.DOUBLE) Measurement(org.openjdk.jmh.annotations.Measurement) ArrayType(com.facebook.presto.type.ArrayType) ORDERS(io.airlift.tpch.TpchTable.ORDERS) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) MapType(com.facebook.presto.type.MapType) Scope(org.openjdk.jmh.annotations.Scope) OrderColumn(io.airlift.tpch.OrderColumn) ArrayList(java.util.ArrayList) Statistics(org.openjdk.jmh.util.Statistics) HiveTestUtils.createTestHdfsEnvironment(com.facebook.presto.hive.HiveTestUtils.createTestHdfsEnvironment) AuxCounters(org.openjdk.jmh.annotations.AuxCounters) ImmutableList(com.google.common.collect.ImmutableList) Type(com.facebook.presto.spi.type.Type) Runner(org.openjdk.jmh.runner.Runner) FileUtils.createTempDir(io.airlift.testing.FileUtils.createTempDir) HadoopNative(com.facebook.presto.hadoop.HadoopNative) IOException(java.io.IOException) TestingConnectorSession(com.facebook.presto.testing.TestingConnectorSession) State(org.openjdk.jmh.annotations.State) Benchmark(org.openjdk.jmh.annotations.Benchmark) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) VarcharType.createUnboundedVarcharType(com.facebook.presto.spi.type.VarcharType.createUnboundedVarcharType) Collectors.toList(java.util.stream.Collectors.toList) ConnectorPageSource(com.facebook.presto.spi.ConnectorPageSource) PageBuilder(com.facebook.presto.spi.PageBuilder) DATE(com.facebook.presto.spi.type.DateType.DATE) Fork(org.openjdk.jmh.annotations.Fork) IntArrays(it.unimi.dsi.fastutil.ints.IntArrays) DOUBLE(com.facebook.presto.spi.type.DoubleType.DOUBLE) MEGABYTE(io.airlift.units.DataSize.Unit.MEGABYTE) DATE(com.facebook.presto.spi.type.DateType.DATE) ImmutableList(com.google.common.collect.ImmutableList) Page(com.facebook.presto.spi.Page) PageBuilder(com.facebook.presto.spi.PageBuilder) ArrayType(com.facebook.presto.type.ArrayType) MapType(com.facebook.presto.type.MapType) Type(com.facebook.presto.spi.type.Type) VarcharType.createUnboundedVarcharType(com.facebook.presto.spi.type.VarcharType.createUnboundedVarcharType) BlockBuilder(com.facebook.presto.spi.block.BlockBuilder)

Example 89 with DataSize

use of io.airlift.units.DataSize in project presto by prestodb.

the class RcFileTester method writeRcFileColumnOld.

private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values) throws Exception {
    ObjectInspector columnObjectInspector = getJavaObjectInspector(type);
    RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector);
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector);
    Object row = objectInspector.create();
    List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
    @SuppressWarnings("deprecation") Serializer serializer = format.createSerializer();
    Properties tableProperties = new Properties();
    tableProperties.setProperty("columns", "test");
    tableProperties.setProperty("columns.types", objectInspector.getTypeName());
    serializer.initialize(new JobConf(false), tableProperties);
    while (values.hasNext()) {
        Object value = values.next();
        value = preprocessWriteValueOld(type, value);
        objectInspector.setStructFieldData(row, fields.get(0), value);
        Writable record = serializer.serialize(row, objectInspector);
        recordWriter.write(record);
    }
    recordWriter.close(false);
    return new DataSize(outputFile.length(), BYTE).convertToMostSuccinctDataSize();
}
Also used : SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) PrimitiveObjectInspectorFactory.javaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteObjectInspector) PrimitiveObjectInspectorFactory.javaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaLongObjectInspector) PrimitiveObjectInspectorFactory.javaTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaTimestampObjectInspector) PrimitiveObjectInspectorFactory.javaDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDateObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaByteArrayObjectInspector) PrimitiveObjectInspectorFactory.javaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaFloatObjectInspector) PrimitiveObjectInspectorFactory.javaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaDoubleObjectInspector) PrimitiveObjectInspectorFactory.javaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaIntObjectInspector) PrimitiveObjectInspectorFactory.javaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaShortObjectInspector) ObjectInspectorFactory.getStandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.getStandardStructObjectInspector) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspectorFactory.javaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaBooleanObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector) PrimitiveObjectInspectorFactory.javaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.javaStringObjectInspector) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) DataSize(io.airlift.units.DataSize) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) IntWritable(org.apache.hadoop.io.IntWritable) BytesRefArrayWritable(org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) LongWritable(org.apache.hadoop.io.LongWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) StructObject(org.apache.hadoop.hive.serde2.StructObject) Properties(java.util.Properties) JobConf(org.apache.hadoop.mapred.JobConf) Serializer(org.apache.hadoop.hive.serde2.Serializer)

Example 90 with DataSize

use of io.airlift.units.DataSize in project presto by prestodb.

the class TestingTaskContext method createTaskContext.

public static TaskContext createTaskContext(Executor executor, Session session, DataSize maxMemory) {
    MemoryPool memoryPool = new MemoryPool(new MemoryPoolId("test"), new DataSize(1, GIGABYTE));
    MemoryPool systemMemoryPool = new MemoryPool(new MemoryPoolId("testSystem"), new DataSize(1, GIGABYTE));
    QueryContext queryContext = new QueryContext(new QueryId("test_query"), maxMemory, memoryPool, systemMemoryPool, executor);
    return createTaskContext(queryContext, executor, session);
}
Also used : DataSize(io.airlift.units.DataSize) QueryId(com.facebook.presto.spi.QueryId) QueryContext(com.facebook.presto.memory.QueryContext) MemoryPoolId(com.facebook.presto.spi.memory.MemoryPoolId) MemoryPool(com.facebook.presto.memory.MemoryPool)

Aggregations

DataSize (io.airlift.units.DataSize)114 Test (org.testng.annotations.Test)71 Duration (io.airlift.units.Duration)36 Page (com.facebook.presto.spi.Page)23 PlanNodeId (com.facebook.presto.sql.planner.plan.PlanNodeId)19 RowPagesBuilder (com.facebook.presto.RowPagesBuilder)11 HashAggregationOperatorFactory (com.facebook.presto.operator.HashAggregationOperator.HashAggregationOperatorFactory)11 URI (java.net.URI)11 MockQueryExecution (com.facebook.presto.execution.MockQueryExecution)10 RootInternalResourceGroup (com.facebook.presto.execution.resourceGroups.InternalResourceGroup.RootInternalResourceGroup)10 TestingHttpClient (io.airlift.http.client.testing.TestingHttpClient)10 Type (com.facebook.presto.spi.type.Type)9 MaterializedResult (com.facebook.presto.testing.MaterializedResult)9 MemoryPoolId (com.facebook.presto.spi.memory.MemoryPoolId)7 QueryId (com.facebook.presto.spi.QueryId)6 BufferResult (com.facebook.presto.execution.buffer.BufferResult)5 MetadataManager (com.facebook.presto.metadata.MetadataManager)5 TopNOperatorFactory (com.facebook.presto.operator.TopNOperator.TopNOperatorFactory)5 ImmutableMap (com.google.common.collect.ImmutableMap)5 ArrayList (java.util.ArrayList)5