Search in sources :

Example 96 with PDataType

use of org.apache.phoenix.schema.types.PDataType in project phoenix by apache.

the class ArrayLengthFunction method evaluate.

@Override
public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
    Expression arrayExpr = children.get(0);
    if (!arrayExpr.evaluate(tuple, ptr)) {
        return false;
    } else if (ptr.getLength() == 0) {
        return true;
    }
    PDataType baseType = PDataType.fromTypeId(children.get(0).getDataType().getSqlType() - PDataType.ARRAY_TYPE_BASE);
    int length = Math.abs(PArrayDataType.getArrayLength(ptr, baseType, arrayExpr.getMaxLength()));
    byte[] lengthBuf = new byte[PInteger.INSTANCE.getByteSize()];
    PInteger.INSTANCE.getCodec().encodeInt(length, lengthBuf, 0);
    ptr.set(lengthBuf);
    return true;
}
Also used : PDataType(org.apache.phoenix.schema.types.PDataType) Expression(org.apache.phoenix.expression.Expression)

Example 97 with PDataType

use of org.apache.phoenix.schema.types.PDataType in project phoenix by apache.

the class IndexMaintainer method getEstimatedByteSize.

public int getEstimatedByteSize() {
    int size = WritableUtils.getVIntSize(nIndexSaltBuckets);
    size += WritableUtils.getVIntSize(estimatedIndexRowKeyBytes);
    size += WritableUtils.getVIntSize(indexedColumns.size());
    size += viewIndexId == null ? 0 : viewIndexId.length;
    for (ColumnReference ref : indexedColumns) {
        size += WritableUtils.getVIntSize(ref.getFamily().length);
        size += ref.getFamily().length;
        size += WritableUtils.getVIntSize(ref.getQualifier().length);
        size += ref.getQualifier().length;
    }
    for (int i = 0; i < indexedColumnTypes.size(); i++) {
        PDataType type = indexedColumnTypes.get(i);
        size += WritableUtils.getVIntSize(type.ordinal());
    }
    Set<ColumnReference> dataTableColRefs = coveredColumnsMap.keySet();
    size += WritableUtils.getVIntSize(dataTableColRefs.size());
    for (ColumnReference ref : dataTableColRefs) {
        size += WritableUtils.getVIntSize(ref.getFamilyWritable().getSize());
        size += ref.getFamily().length;
        size += WritableUtils.getVIntSize(ref.getQualifierWritable().getSize());
        size += ref.getQualifier().length;
    }
    size += indexTableName.length + WritableUtils.getVIntSize(indexTableName.length);
    size += rowKeyMetaData.getByteSize();
    size += dataEmptyKeyValueCF.length + WritableUtils.getVIntSize(dataEmptyKeyValueCF.length);
    size += emptyKeyValueCFPtr.getLength() + WritableUtils.getVIntSize(emptyKeyValueCFPtr.getLength());
    size += WritableUtils.getVIntSize(nDataCFs + 1);
    size += WritableUtils.getVIntSize(indexedExpressions.size());
    for (Expression expression : indexedExpressions) {
        size += WritableUtils.getVIntSize(ExpressionType.valueOf(expression).ordinal());
    }
    size += estimatedExpressionSize;
    return size;
}
Also used : PDataType(org.apache.phoenix.schema.types.PDataType) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) SingleCellConstructorExpression(org.apache.phoenix.expression.SingleCellConstructorExpression) Expression(org.apache.phoenix.expression.Expression) SingleCellColumnExpression(org.apache.phoenix.expression.SingleCellColumnExpression) CoerceExpression(org.apache.phoenix.expression.CoerceExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) ColumnReference(org.apache.phoenix.hbase.index.covered.update.ColumnReference)

Example 98 with PDataType

use of org.apache.phoenix.schema.types.PDataType in project phoenix by apache.

the class IndexMaintainer method buildUpdateMutation.

public Put buildUpdateMutation(KeyValueBuilder kvBuilder, ValueGetter valueGetter, ImmutableBytesWritable dataRowKeyPtr, long ts, byte[] regionStartKey, byte[] regionEndKey) throws IOException {
    byte[] indexRowKey = this.buildRowKey(valueGetter, dataRowKeyPtr, regionStartKey, regionEndKey);
    Put put = null;
    // New row being inserted: add the empty key value
    if (valueGetter == null || valueGetter.getLatestValue(dataEmptyKeyValueRef) == null) {
        put = new Put(indexRowKey);
        // add the keyvalue for the empty row
        put.add(kvBuilder.buildPut(new ImmutableBytesPtr(indexRowKey), this.getEmptyKeyValueFamily(), dataEmptyKeyValueRef.getQualifierWritable(), ts, // set the value to the empty column name
        dataEmptyKeyValueRef.getQualifierWritable()));
        put.setDurability(!indexWALDisabled ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
    }
    ImmutableBytesPtr rowKey = new ImmutableBytesPtr(indexRowKey);
    if (immutableStorageScheme != ImmutableStorageScheme.ONE_CELL_PER_COLUMN) {
        // map from index column family to list of pair of index column and data column (for covered columns)
        Map<ImmutableBytesPtr, List<Pair<ColumnReference, ColumnReference>>> familyToColListMap = Maps.newHashMap();
        for (ColumnReference ref : this.getCoveredColumns()) {
            ColumnReference indexColRef = this.coveredColumnsMap.get(ref);
            ImmutableBytesPtr cf = new ImmutableBytesPtr(indexColRef.getFamily());
            if (!familyToColListMap.containsKey(cf)) {
                familyToColListMap.put(cf, Lists.<Pair<ColumnReference, ColumnReference>>newArrayList());
            }
            familyToColListMap.get(cf).add(Pair.newPair(indexColRef, ref));
        }
        // iterate over each column family and create a byte[] containing all the columns 
        for (Entry<ImmutableBytesPtr, List<Pair<ColumnReference, ColumnReference>>> entry : familyToColListMap.entrySet()) {
            byte[] columnFamily = entry.getKey().copyBytesIfNecessary();
            List<Pair<ColumnReference, ColumnReference>> colRefPairs = entry.getValue();
            int maxEncodedColumnQualifier = Integer.MIN_VALUE;
            // find the max col qualifier
            for (Pair<ColumnReference, ColumnReference> colRefPair : colRefPairs) {
                maxEncodedColumnQualifier = Math.max(maxEncodedColumnQualifier, encodingScheme.decode(colRefPair.getFirst().getQualifier()));
            }
            Expression[] colValues = EncodedColumnsUtil.createColumnExpressionArray(maxEncodedColumnQualifier);
            // set the values of the columns
            for (Pair<ColumnReference, ColumnReference> colRefPair : colRefPairs) {
                ColumnReference indexColRef = colRefPair.getFirst();
                ColumnReference dataColRef = colRefPair.getSecond();
                Expression expression = new SingleCellColumnExpression(new PDatum() {

                    @Override
                    public boolean isNullable() {
                        return false;
                    }

                    @Override
                    public SortOrder getSortOrder() {
                        return null;
                    }

                    @Override
                    public Integer getScale() {
                        return null;
                    }

                    @Override
                    public Integer getMaxLength() {
                        return null;
                    }

                    @Override
                    public PDataType getDataType() {
                        return null;
                    }
                }, dataColRef.getFamily(), dataColRef.getQualifier(), encodingScheme);
                ImmutableBytesPtr ptr = new ImmutableBytesPtr();
                expression.evaluate(new ValueGetterTuple(valueGetter), ptr);
                byte[] value = ptr.copyBytesIfNecessary();
                if (value != null) {
                    int indexArrayPos = encodingScheme.decode(indexColRef.getQualifier()) - QueryConstants.ENCODED_CQ_COUNTER_INITIAL_VALUE + 1;
                    colValues[indexArrayPos] = new LiteralExpression(value);
                }
            }
            List<Expression> children = Arrays.asList(colValues);
            // we use SingleCellConstructorExpression to serialize multiple columns into a single byte[]
            SingleCellConstructorExpression singleCellConstructorExpression = new SingleCellConstructorExpression(immutableStorageScheme, children);
            ImmutableBytesWritable ptr = new ImmutableBytesWritable();
            singleCellConstructorExpression.evaluate(new BaseTuple() {
            }, ptr);
            if (put == null) {
                put = new Put(indexRowKey);
                put.setDurability(!indexWALDisabled ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
            }
            ImmutableBytesPtr colFamilyPtr = new ImmutableBytesPtr(columnFamily);
            //this is a little bit of extra work for installations that are running <0.94.14, but that should be rare and is a short-term set of wrappers - it shouldn't kill GC
            put.add(kvBuilder.buildPut(rowKey, colFamilyPtr, QueryConstants.SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES_PTR, ts, ptr));
        }
    } else {
        for (ColumnReference ref : this.getCoveredColumns()) {
            ColumnReference indexColRef = this.coveredColumnsMap.get(ref);
            ImmutableBytesPtr cq = indexColRef.getQualifierWritable();
            ImmutableBytesPtr cf = indexColRef.getFamilyWritable();
            ImmutableBytesWritable value = valueGetter.getLatestValue(ref);
            if (value != null) {
                if (put == null) {
                    put = new Put(indexRowKey);
                    put.setDurability(!indexWALDisabled ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
                }
                put.add(kvBuilder.buildPut(rowKey, cf, cq, ts, value));
            }
        }
    }
    return put;
}
Also used : BaseTuple(org.apache.phoenix.schema.tuple.BaseTuple) PDatum(org.apache.phoenix.schema.PDatum) SingleCellConstructorExpression(org.apache.phoenix.expression.SingleCellConstructorExpression) PDataType(org.apache.phoenix.schema.types.PDataType) List(java.util.List) ArrayList(java.util.ArrayList) Pair(org.apache.hadoop.hbase.util.Pair) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) SortOrder(org.apache.phoenix.schema.SortOrder) Put(org.apache.hadoop.hbase.client.Put) SingleCellColumnExpression(org.apache.phoenix.expression.SingleCellColumnExpression) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) SingleCellConstructorExpression(org.apache.phoenix.expression.SingleCellConstructorExpression) Expression(org.apache.phoenix.expression.Expression) SingleCellColumnExpression(org.apache.phoenix.expression.SingleCellColumnExpression) CoerceExpression(org.apache.phoenix.expression.CoerceExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) ColumnReference(org.apache.phoenix.hbase.index.covered.update.ColumnReference) ValueGetterTuple(org.apache.phoenix.schema.tuple.ValueGetterTuple)

Example 99 with PDataType

use of org.apache.phoenix.schema.types.PDataType in project phoenix by apache.

the class IndexMaintainer method generateIndexRowKeySchema.

// We have enough information to generate the index row key schema
private RowKeySchema generateIndexRowKeySchema() {
    int nIndexedColumns = getIndexPkColumnCount() + (isMultiTenant ? 1 : 0) + (!isLocalIndex && nIndexSaltBuckets > 0 ? 1 : 0) + (viewIndexId != null ? 1 : 0) - getNumViewConstants();
    RowKeySchema.RowKeySchemaBuilder builder = new RowKeySchema.RowKeySchemaBuilder(nIndexedColumns);
    builder.rowKeyOrderOptimizable(rowKeyOrderOptimizable);
    if (!isLocalIndex && nIndexSaltBuckets > 0) {
        builder.addField(SaltingUtil.SALTING_COLUMN, false, SortOrder.ASC);
        nIndexedColumns--;
    }
    int dataPosOffset = isDataTableSalted ? 1 : 0;
    if (viewIndexId != null) {
        nIndexedColumns--;
        builder.addField(new PDatum() {

            @Override
            public boolean isNullable() {
                return false;
            }

            @Override
            public PDataType getDataType() {
                return MetaDataUtil.getViewIndexIdDataType();
            }

            @Override
            public Integer getMaxLength() {
                return null;
            }

            @Override
            public Integer getScale() {
                return null;
            }

            @Override
            public SortOrder getSortOrder() {
                return SortOrder.getDefault();
            }
        }, false, SortOrder.getDefault());
    }
    if (isMultiTenant) {
        Field field = dataRowKeySchema.getField(dataPosOffset++);
        builder.addField(field, field.isNullable(), field.getSortOrder());
        nIndexedColumns--;
    }
    Field[] indexFields = new Field[nIndexedColumns];
    BitSet viewConstantColumnBitSet = this.rowKeyMetaData.getViewConstantColumnBitSet();
    // Add Field for all data row pk columns
    for (int i = dataPosOffset; i < dataRowKeySchema.getFieldCount(); i++) {
        // same for all rows in this index)
        if (!viewConstantColumnBitSet.get(i)) {
            int pos = rowKeyMetaData.getIndexPkPosition(i - dataPosOffset);
            indexFields[pos] = dataRowKeySchema.getField(i);
        }
    }
    BitSet descIndexColumnBitSet = rowKeyMetaData.getDescIndexColumnBitSet();
    Iterator<Expression> expressionItr = indexedExpressions.iterator();
    for (int i = 0; i < indexFields.length; i++) {
        Field indexField = indexFields[i];
        PDataType dataTypeToBe;
        SortOrder sortOrderToBe;
        boolean isNullableToBe;
        Integer maxLengthToBe;
        Integer scaleToBe;
        if (indexField == null) {
            Expression e = expressionItr.next();
            isNullableToBe = e.isNullable();
            dataTypeToBe = IndexUtil.getIndexColumnDataType(isNullableToBe, e.getDataType());
            sortOrderToBe = descIndexColumnBitSet.get(i) ? SortOrder.DESC : SortOrder.ASC;
            maxLengthToBe = e.getMaxLength();
            scaleToBe = e.getScale();
        } else {
            isNullableToBe = indexField.isNullable();
            dataTypeToBe = IndexUtil.getIndexColumnDataType(isNullableToBe, indexField.getDataType());
            sortOrderToBe = descIndexColumnBitSet.get(i) ? SortOrder.DESC : SortOrder.ASC;
            maxLengthToBe = indexField.getMaxLength();
            scaleToBe = indexField.getScale();
        }
        final PDataType dataType = dataTypeToBe;
        final SortOrder sortOrder = sortOrderToBe;
        final boolean isNullable = isNullableToBe;
        final Integer maxLength = maxLengthToBe;
        final Integer scale = scaleToBe;
        builder.addField(new PDatum() {

            @Override
            public boolean isNullable() {
                return isNullable;
            }

            @Override
            public PDataType getDataType() {
                return dataType;
            }

            @Override
            public Integer getMaxLength() {
                return maxLength;
            }

            @Override
            public Integer getScale() {
                return scale;
            }

            @Override
            public SortOrder getSortOrder() {
                return sortOrder;
            }
        }, true, sortOrder);
    }
    return builder.build();
}
Also used : BitSet(org.apache.phoenix.util.BitSet) RowKeySchema(org.apache.phoenix.schema.RowKeySchema) SortOrder(org.apache.phoenix.schema.SortOrder) PDatum(org.apache.phoenix.schema.PDatum) Field(org.apache.phoenix.schema.ValueSchema.Field) PDataType(org.apache.phoenix.schema.types.PDataType) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) SingleCellConstructorExpression(org.apache.phoenix.expression.SingleCellConstructorExpression) Expression(org.apache.phoenix.expression.Expression) SingleCellColumnExpression(org.apache.phoenix.expression.SingleCellColumnExpression) CoerceExpression(org.apache.phoenix.expression.CoerceExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression)

Example 100 with PDataType

use of org.apache.phoenix.schema.types.PDataType in project phoenix by apache.

the class IndexMaintainer method initCachedState.

/**
     * Init calculated state reading/creating
     */
private void initCachedState() {
    byte[] emptyKvQualifier = EncodedColumnsUtil.getEmptyKeyValueInfo(encodingScheme).getFirst();
    dataEmptyKeyValueRef = new ColumnReference(emptyKeyValueCFPtr.copyBytesIfNecessary(), emptyKvQualifier);
    this.allColumns = Sets.newLinkedHashSetWithExpectedSize(indexedExpressions.size() + coveredColumnsMap.size());
    // columns that are required to evaluate all expressions in indexedExpressions (not including columns in data row key)
    this.indexedColumns = Sets.newLinkedHashSetWithExpectedSize(indexedExpressions.size());
    for (Expression expression : indexedExpressions) {
        KeyValueExpressionVisitor visitor = new KeyValueExpressionVisitor() {

            @Override
            public Void visit(KeyValueColumnExpression expression) {
                if (indexedColumns.add(new ColumnReference(expression.getColumnFamily(), expression.getColumnQualifier()))) {
                    indexedColumnTypes.add(expression.getDataType());
                }
                return null;
            }
        };
        expression.accept(visitor);
    }
    allColumns.addAll(indexedColumns);
    for (ColumnReference colRef : coveredColumnsMap.keySet()) {
        if (immutableStorageScheme == ImmutableStorageScheme.ONE_CELL_PER_COLUMN) {
            allColumns.add(colRef);
        } else {
            allColumns.add(new ColumnReference(colRef.getFamily(), QueryConstants.SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES));
        }
    }
    int dataPkOffset = (isDataTableSalted ? 1 : 0) + (isMultiTenant ? 1 : 0);
    int nIndexPkColumns = getIndexPkColumnCount();
    dataPkPosition = new int[nIndexPkColumns];
    Arrays.fill(dataPkPosition, EXPRESSION_NOT_PRESENT);
    int numViewConstantColumns = 0;
    BitSet viewConstantColumnBitSet = rowKeyMetaData.getViewConstantColumnBitSet();
    for (int i = dataPkOffset; i < dataRowKeySchema.getFieldCount(); i++) {
        if (!viewConstantColumnBitSet.get(i)) {
            int indexPkPosition = rowKeyMetaData.getIndexPkPosition(i - dataPkOffset);
            this.dataPkPosition[indexPkPosition] = i;
        } else {
            numViewConstantColumns++;
        }
    }
    // Calculate the max number of trailing nulls that we should get rid of after building the index row key.
    // We only get rid of nulls for variable length types, so we have to be careful to consider the type of the
    // index table, not the data type of the data table
    int expressionsPos = indexedExpressions.size();
    int indexPkPos = nIndexPkColumns - numViewConstantColumns - 1;
    while (indexPkPos >= 0) {
        int dataPkPos = dataPkPosition[indexPkPos];
        boolean isDataNullable;
        PDataType dataType;
        if (dataPkPos == EXPRESSION_NOT_PRESENT) {
            isDataNullable = true;
            dataType = indexedExpressions.get(--expressionsPos).getDataType();
        } else {
            Field dataField = dataRowKeySchema.getField(dataPkPos);
            dataType = dataField.getDataType();
            isDataNullable = dataField.isNullable();
        }
        PDataType indexDataType = IndexUtil.getIndexColumnDataType(isDataNullable, dataType);
        if (indexDataType.isFixedWidth()) {
            break;
        }
        indexPkPos--;
    }
    maxTrailingNulls = nIndexPkColumns - indexPkPos - 1;
}
Also used : Field(org.apache.phoenix.schema.ValueSchema.Field) PDataType(org.apache.phoenix.schema.types.PDataType) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) SingleCellConstructorExpression(org.apache.phoenix.expression.SingleCellConstructorExpression) Expression(org.apache.phoenix.expression.Expression) SingleCellColumnExpression(org.apache.phoenix.expression.SingleCellColumnExpression) CoerceExpression(org.apache.phoenix.expression.CoerceExpression) LiteralExpression(org.apache.phoenix.expression.LiteralExpression) BitSet(org.apache.phoenix.util.BitSet) KeyValueColumnExpression(org.apache.phoenix.expression.KeyValueColumnExpression) KeyValueExpressionVisitor(org.apache.phoenix.expression.visitor.KeyValueExpressionVisitor) ColumnReference(org.apache.phoenix.hbase.index.covered.update.ColumnReference)

Aggregations

PDataType (org.apache.phoenix.schema.types.PDataType)152 Expression (org.apache.phoenix.expression.Expression)54 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)31 SortOrder (org.apache.phoenix.schema.SortOrder)29 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)21 CoerceExpression (org.apache.phoenix.expression.CoerceExpression)18 Test (org.junit.Test)15 PDatum (org.apache.phoenix.schema.PDatum)12 BigDecimal (java.math.BigDecimal)11 SQLException (java.sql.SQLException)11 ArrayList (java.util.ArrayList)11 List (java.util.List)10 KeyValueColumnExpression (org.apache.phoenix.expression.KeyValueColumnExpression)10 RowKeyColumnExpression (org.apache.phoenix.expression.RowKeyColumnExpression)10 SingleCellConstructorExpression (org.apache.phoenix.expression.SingleCellConstructorExpression)10 IOException (java.io.IOException)9 PreparedStatement (java.sql.PreparedStatement)7 Pair (org.apache.hadoop.hbase.util.Pair)7 Date (java.sql.Date)6 AndExpression (org.apache.phoenix.expression.AndExpression)6