Search in sources :

Example 1 with JavaDataModel

use of org.apache.hadoop.hive.ql.util.JavaDataModel in project hive by apache.

the class VectorAggregationBufferBatch method compileAggregationBatchInfo.

public void compileAggregationBatchInfo(VectorAggregateExpression[] aggregators) {
    JavaDataModel model = JavaDataModel.get();
    int[] variableSizeAggregators = new int[aggregators.length];
    int indexVariableSizes = 0;
    aggregatorsFixedSize = JavaDataModel.alignUp(model.object() + model.primitive1() * 2 + model.ref(), model.memoryAlign());
    aggregatorsFixedSize += model.lengthForObjectArrayOfSize(aggregators.length);
    for (int i = 0; i < aggregators.length; ++i) {
        VectorAggregateExpression aggregator = aggregators[i];
        aggregatorsFixedSize += aggregator.getAggregationBufferFixedSize();
        if (aggregator.hasVariableSize()) {
            variableSizeAggregators[indexVariableSizes] = i;
            ++indexVariableSizes;
        }
    }
    this.variableSizeAggregators = Arrays.copyOfRange(variableSizeAggregators, 0, indexVariableSizes);
}
Also used : VectorAggregateExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression) JavaDataModel(org.apache.hadoop.hive.ql.util.JavaDataModel)

Example 2 with JavaDataModel

use of org.apache.hadoop.hive.ql.util.JavaDataModel in project hive by apache.

the class VectorHashKeyWrapper method getVariableSize.

public int getVariableSize() {
    int variableSize = 0;
    for (int i = 0; i < byteLengths.length; ++i) {
        JavaDataModel model = JavaDataModel.get();
        variableSize += model.lengthForByteArrayOfSize(byteLengths[i]);
    }
    return variableSize;
}
Also used : JavaDataModel(org.apache.hadoop.hive.ql.util.JavaDataModel)

Example 3 with JavaDataModel

use of org.apache.hadoop.hive.ql.util.JavaDataModel in project hive by apache.

the class VectorHashKeyWrapperBatch method compileKeyWrapperBatch.

/**
   * Prepares a VectorHashKeyWrapperBatch to work for a specific set of keys.
   * Computes the fast access lookup indices, preallocates all needed internal arrays.
   * This step is done only once per query, not once per batch. The information computed now
   * will be used to generate proper individual VectorKeyHashWrapper objects.
   */
public static VectorHashKeyWrapperBatch compileKeyWrapperBatch(VectorExpression[] keyExpressions) throws HiveException {
    VectorHashKeyWrapperBatch compiledKeyWrapperBatch = new VectorHashKeyWrapperBatch(keyExpressions.length);
    compiledKeyWrapperBatch.keyExpressions = keyExpressions;
    compiledKeyWrapperBatch.keysFixedSize = 0;
    // Inspect the output type of each key expression.
    for (int i = 0; i < keyExpressions.length; ++i) {
        compiledKeyWrapperBatch.addKey(keyExpressions[i].getOutputType());
    }
    compiledKeyWrapperBatch.finishAdding();
    compiledKeyWrapperBatch.vectorHashKeyWrappers = new VectorHashKeyWrapper[VectorizedRowBatch.DEFAULT_SIZE];
    for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; ++i) {
        compiledKeyWrapperBatch.vectorHashKeyWrappers[i] = compiledKeyWrapperBatch.allocateKeyWrapper();
    }
    JavaDataModel model = JavaDataModel.get();
    // Compute the fixed size overhead for the keys
    // start with the keywrapper itself
    compiledKeyWrapperBatch.keysFixedSize += JavaDataModel.alignUp(model.object() + model.ref() * MODEL_REFERENCES_COUNT + model.primitive1(), model.memoryAlign());
    // Now add the key wrapper arrays
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForLongArrayOfSize(compiledKeyWrapperBatch.longIndices.length);
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForDoubleArrayOfSize(compiledKeyWrapperBatch.doubleIndices.length);
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.stringIndices.length);
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.decimalIndices.length);
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.timestampIndices.length);
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForObjectArrayOfSize(compiledKeyWrapperBatch.intervalDayTimeIndices.length);
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForIntArrayOfSize(compiledKeyWrapperBatch.longIndices.length) * 2;
    compiledKeyWrapperBatch.keysFixedSize += model.lengthForBooleanArrayOfSize(keyExpressions.length);
    return compiledKeyWrapperBatch;
}
Also used : JavaDataModel(org.apache.hadoop.hive.ql.util.JavaDataModel)

Example 4 with JavaDataModel

use of org.apache.hadoop.hive.ql.util.JavaDataModel in project hive by apache.

the class VectorUDAFBloomFilter method getAggregationBufferFixedSize.

@Override
public int getAggregationBufferFixedSize() {
    if (bitSetSize < 0) {
        // Not pretty, but we need a way to get the size
        try {
            Aggregation agg = (Aggregation) getNewAggregationBuffer();
            bitSetSize = agg.bf.getBitSet().length;
        } catch (Exception e) {
            throw new RuntimeException("Unexpected error while creating AggregationBuffer", e);
        }
    }
    // BloomFilter: object(BitSet: object(data: long[]), numBits: int, numHashFunctions: int)
    JavaDataModel model = JavaDataModel.get();
    int bloomFilterSize = JavaDataModel.alignUp(model.object() + model.lengthForLongArrayOfSize(bitSetSize), model.memoryAlign());
    return JavaDataModel.alignUp(model.object() + bloomFilterSize + model.primitive1() + model.primitive1(), model.memoryAlign());
}
Also used : JavaDataModel(org.apache.hadoop.hive.ql.util.JavaDataModel) IOException(java.io.IOException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 5 with JavaDataModel

use of org.apache.hadoop.hive.ql.util.JavaDataModel in project hive by apache.

the class TestIncrementalObjectSizeEstimator method testSimpleTypes.

@Test
public void testSimpleTypes() {
    JavaDataModel memModel = JavaDataModel.get();
    int intSize = runEstimate(new Integer(0), memModel, null);
    runEstimate(new String(""), memModel, "empty string");
    runEstimate(new String("foobarzzzzzzzzzzzzzz"), memModel, null);
    List<Object> list = new ArrayList<Object>(0);
    runEstimate(list, memModel, "empty ArrayList");
    list.add(new String("zzz"));
    runEstimate(list, memModel, "ArrayList - one string");
    list.add(new Integer(5));
    list.add(new Integer(6));
    int arrayListSize = runEstimate(list, memModel, "ArrayList - 3 elements");
    LinkedHashSet<Object> list2 = new LinkedHashSet<Object>(0);
    runEstimate(list2, memModel, "empty LinkedHashSet");
    list2.add(new String("zzzz"));
    runEstimate(list2, memModel, "LinkedHashSet - one string");
    list2.add(new Integer(7));
    list2.add(new Integer(4));
    int lhsSize = runEstimate(list2, memModel, "LinkedHashSet - 3 elements");
    Struct struct = new Struct();
    int structSize = runEstimate(struct, memModel, "Struct - empty");
    struct.i = 10;
    int structSize2 = runEstimate(struct, memModel, "Struct - one reference");
    assertEquals(intSize + structSize, structSize2);
    struct.list = list;
    int structSize3 = runEstimate(struct, memModel, "Struct - with ArrayList");
    assertEquals(arrayListSize + structSize2, structSize3);
    struct.list2 = list2;
    int structSize4 = runEstimate(struct, memModel, "Struct - with LinkedHashSet");
    assertEquals(lhsSize + structSize3, structSize4);
    Struct2 struct2 = new Struct2();
    int recSize1 = runEstimate(struct2, memModel, "recursive struct - empty");
    struct2.next = new Struct2();
    struct2.top = new Struct2();
    int recSize2 = runEstimate(struct2, memModel, "recursive struct - no ring");
    assertEquals(recSize1 * 3, recSize2);
    struct2.next.prev = struct2;
    int recSize3 = runEstimate(struct2, memModel, "recursive struct - ring added");
    assertEquals(recSize2, recSize3);
}
Also used : LinkedHashSet(java.util.LinkedHashSet) ArrayList(java.util.ArrayList) JavaDataModel(org.apache.hadoop.hive.ql.util.JavaDataModel) Test(org.junit.Test)

Aggregations

JavaDataModel (org.apache.hadoop.hive.ql.util.JavaDataModel)5 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 LinkedHashSet (java.util.LinkedHashSet)1 VectorAggregateExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 Test (org.junit.Test)1