use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class SchemaUtil method processSplit.
// Go through each slot in the schema and try match it with the split byte array. If the split
// does not confer to the schema, extends its length to match the schema.
private static byte[] processSplit(byte[] split, LinkedHashSet<PColumn> pkColumns) {
int pos = 0, offset = 0, maxOffset = split.length;
Iterator<PColumn> iterator = pkColumns.iterator();
while (pos < pkColumns.size()) {
PColumn column = iterator.next();
if (column.getDataType().isFixedWidth()) {
// Fixed width
int length = SchemaUtil.getFixedByteSize(column);
if (maxOffset - offset < length) {
// The split truncates the field. Fill in the rest of the part and any fields that
// are missing after this field.
int fillInLength = length - (maxOffset - offset);
fillInLength += estimatePartLength(pos + 1, iterator);
return ByteUtil.fillKey(split, split.length + fillInLength);
}
// Account for this field, move to next position;
offset += length;
pos++;
} else {
// If we are the last slot, then we are done. Nothing needs to be filled in.
if (pos == pkColumns.size() - 1) {
break;
}
while (offset < maxOffset && split[offset] != QueryConstants.SEPARATOR_BYTE) {
offset++;
}
if (offset == maxOffset) {
// The var-length field does not end with a separator and it's not the last field.
// SEPARATOR byte for the current var-length slot.
int fillInLength = 1;
fillInLength += estimatePartLength(pos + 1, iterator);
return ByteUtil.fillKey(split, split.length + fillInLength);
}
// Move to the next position;
// skip separator;
offset += 1;
pos++;
}
}
return split;
}
use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class ColumnExpressionTest method testSerialization.
@Test
public void testSerialization() throws Exception {
int maxLen = 30;
int scale = 5;
PName colName = PNameFactory.newName("c1");
PColumn column = new PColumnImpl(colName, PNameFactory.newName("f1"), PDecimal.INSTANCE, maxLen, scale, true, 20, SortOrder.getDefault(), 0, null, false, null, false, false, colName.getBytes());
ColumnExpression colExp = new KeyValueColumnExpression(column);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dOut = new DataOutputStream(baos);
colExp.write(dOut);
dOut.flush();
ColumnExpression colExp2 = new KeyValueColumnExpression();
byte[] bytes = baos.toByteArray();
DataInputStream dIn = new DataInputStream(new ByteArrayInputStream(bytes, 0, bytes.length));
colExp2.readFields(dIn);
assertEquals(maxLen, colExp2.getMaxLength().intValue());
assertEquals(scale, colExp2.getScale().intValue());
assertEquals(PDecimal.INSTANCE, colExp2.getDataType());
}
use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class ColumnExpressionTest method testSerializationWithNullScaleAndMaxLength.
@Test
public void testSerializationWithNullScaleAndMaxLength() throws Exception {
PName colName = PNameFactory.newName("c1");
PColumn column = new PColumnImpl(colName, PNameFactory.newName("f1"), PDecimal.INSTANCE, null, null, true, 20, SortOrder.getDefault(), 0, null, false, null, false, false, colName.getBytes());
ColumnExpression colExp = new KeyValueColumnExpression(column);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dOut = new DataOutputStream(baos);
colExp.write(dOut);
dOut.flush();
ColumnExpression colExp2 = new KeyValueColumnExpression();
byte[] bytes = baos.toByteArray();
DataInputStream dIn = new DataInputStream(new ByteArrayInputStream(bytes, 0, bytes.length));
colExp2.readFields(dIn);
assertNull(colExp2.getMaxLength());
assertNull(colExp2.getScale());
}
use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class ColumnExpressionTest method testSerializationWithNullMaxLength.
@Test
public void testSerializationWithNullMaxLength() throws Exception {
int scale = 5;
PName colName = PNameFactory.newName("c1");
PColumn column = new PColumnImpl(colName, PNameFactory.newName("f1"), PVarchar.INSTANCE, null, scale, true, 20, SortOrder.getDefault(), 0, null, false, null, false, false, colName.getBytes());
ColumnExpression colExp = new KeyValueColumnExpression(column);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dOut = new DataOutputStream(baos);
colExp.write(dOut);
dOut.flush();
ColumnExpression colExp2 = new KeyValueColumnExpression();
byte[] bytes = baos.toByteArray();
DataInputStream dIn = new DataInputStream(new ByteArrayInputStream(bytes, 0, bytes.length));
colExp2.readFields(dIn);
assertNull(colExp2.getMaxLength());
assertEquals(scale, colExp2.getScale().intValue());
assertEquals(PVarchar.INSTANCE, colExp2.getDataType());
}
use of org.apache.phoenix.schema.PColumn in project phoenix by apache.
the class IndexTestUtil method generateIndexData.
public static List<Mutation> generateIndexData(PTable indexTable, PTable dataTable, Mutation dataMutation, ImmutableBytesWritable ptr, KeyValueBuilder builder) throws SQLException {
byte[] dataRowKey = dataMutation.getRow();
RowKeySchema dataRowKeySchema = dataTable.getRowKeySchema();
List<PColumn> dataPKColumns = dataTable.getPKColumns();
int i = 0;
int indexOffset = 0;
Boolean hasValue;
// Skip salt column
int maxOffset = dataRowKey.length;
dataRowKeySchema.iterator(dataRowKey, ptr, dataTable.getBucketNum() == null ? i : ++i);
List<PColumn> indexPKColumns = indexTable.getPKColumns();
List<PColumn> indexColumns = indexTable.getColumns();
int nIndexColumns = indexPKColumns.size();
int maxIndexValues = indexColumns.size() - nIndexColumns - indexOffset;
BitSet indexValuesSet = new BitSet(maxIndexValues);
byte[][] indexValues = new byte[indexColumns.size() - indexOffset][];
while ((hasValue = dataRowKeySchema.next(ptr, i, maxOffset)) != null) {
if (hasValue) {
PColumn dataColumn = dataPKColumns.get(i);
PColumn indexColumn = indexTable.getColumnForColumnName(IndexUtil.getIndexColumnName(dataColumn));
coerceDataValueToIndexValue(dataColumn, indexColumn, ptr);
indexValues[indexColumn.getPosition() - indexOffset] = ptr.copyBytes();
}
i++;
}
PRow row;
long ts = MetaDataUtil.getClientTimeStamp(dataMutation);
if (dataMutation instanceof Delete && dataMutation.getFamilyCellMap().values().isEmpty()) {
indexTable.newKey(ptr, indexValues);
row = indexTable.newRow(builder, ts, ptr, false);
row.delete();
} else {
// If no column families in table, then nothing to look for
if (!dataTable.getColumnFamilies().isEmpty()) {
for (Map.Entry<byte[], List<Cell>> entry : dataMutation.getFamilyCellMap().entrySet()) {
PColumnFamily family = dataTable.getColumnFamily(entry.getKey());
for (Cell kv : entry.getValue()) {
@SuppressWarnings("deprecation") byte[] cq = kv.getQualifier();
byte[] emptyKVQualifier = EncodedColumnsUtil.getEmptyKeyValueInfo(dataTable).getFirst();
if (Bytes.compareTo(emptyKVQualifier, cq) != 0) {
try {
PColumn dataColumn = family.getPColumnForColumnQualifier(cq);
PColumn indexColumn = indexTable.getColumnForColumnName(IndexUtil.getIndexColumnName(family.getName().getString(), dataColumn.getName().getString()));
ptr.set(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength());
coerceDataValueToIndexValue(dataColumn, indexColumn, ptr);
indexValues[indexPKColumns.indexOf(indexColumn) - indexOffset] = ptr.copyBytes();
if (!SchemaUtil.isPKColumn(indexColumn)) {
indexValuesSet.set(indexColumn.getPosition() - nIndexColumns - indexOffset);
}
} catch (ColumnNotFoundException e) {
// Ignore as this means that the data column isn't in the index
}
}
}
}
}
indexTable.newKey(ptr, indexValues);
row = indexTable.newRow(builder, ts, ptr, false);
int pos = 0;
while ((pos = indexValuesSet.nextSetBit(pos)) >= 0) {
int index = nIndexColumns + indexOffset + pos++;
PColumn indexColumn = indexColumns.get(index);
row.setValue(indexColumn, indexValues[index]);
}
}
return row.toRowMutations();
}
Aggregations