use of com.pingcap.tikv.codec.CodecDataOutput in project tispark by pingcap.
the class RowIDAllocator method getMetaToUpdate.
private Optional<BytePairWrapper> getMetaToUpdate(ByteString key, byte[] oldVal, Snapshot snapshot) {
// 1. encode hash meta key
// 2. load meta via hash meta key from TiKV
// 3. update meta's filed count and set it back to TiKV
CodecDataOutput cdo = new CodecDataOutput();
ByteString metaKey = MetaCodec.encodeHashMetaKey(cdo, key.toByteArray());
long fieldCount = 0;
ByteString metaVal = snapshot.get(metaKey);
// big endian the 8 bytes
if (!metaVal.isEmpty()) {
try {
fieldCount = IntegerCodec.readULong(new CodecDataInput(metaVal.toByteArray()));
} catch (Exception ignored) {
LOG.warn("metaDecode failed, field is ignored." + KeyUtils.formatBytesUTF8(metaVal));
}
}
// update meta field count only oldVal is null
if (oldVal == null || oldVal.length == 0) {
fieldCount++;
cdo.reset();
cdo.writeLong(fieldCount);
return Optional.of(new BytePairWrapper(metaKey.toByteArray(), cdo.toBytes()));
}
return Optional.empty();
}
use of com.pingcap.tikv.codec.CodecDataOutput in project tispark by pingcap.
the class PDClient method getRegionByKey.
@Override
public TiRegion getRegionByKey(BackOffer backOffer, ByteString key) {
CodecDataOutput cdo = new CodecDataOutput();
BytesCodec.writeBytes(cdo, key.toByteArray());
ByteString encodedKey = cdo.toByteString();
Supplier<GetRegionRequest> request = () -> GetRegionRequest.newBuilder().setHeader(header).setRegionKey(encodedKey).build();
PDErrorHandler<GetRegionResponse> handler = new PDErrorHandler<>(getRegionResponseErrorExtractor, this);
GetRegionResponse resp = callWithRetry(backOffer, PDGrpc.getGetRegionMethod(), request, handler);
return new TiRegion(resp.getRegion(), resp.getLeader(), conf.getIsolationLevel(), conf.getCommandPriority());
}
use of com.pingcap.tikv.codec.CodecDataOutput in project tispark by pingcap.
the class ProtoConverter method visit.
@Override
protected Expr visit(Constant node, Object context) {
Expr.Builder builder = Expr.newBuilder();
DataType type = node.getDataType();
if (node.getValue() == null) {
builder.setTp(ExprType.Null);
} else {
// can mark it cannot be pushed down to coprocessor.
if (node.isOverflowed()) {
throw new UnsupportedOperationException("overflowed value cannot be pushed down to coprocessor");
}
builder.setTp(type.getProtoExprType());
CodecDataOutput cdo = new CodecDataOutput();
type.encode(cdo, EncodeType.PROTO, node.getValue());
builder.setVal(cdo.toByteString());
builder.setFieldType(toPBFieldType(getType(node)));
}
return builder.build();
}
use of com.pingcap.tikv.codec.CodecDataOutput in project tispark by pingcap.
the class RowIDAllocator method updateHash.
private long updateHash(ByteString key, ByteString field, Function<byte[], byte[]> calculateNewVal, Snapshot snapshot) {
// 1. encode hash data key
// 2. get value in byte from get operation
// 3. calculate new value via calculateNewVal
// 4. check old value equals to new value or not
// 5. set the new value back to TiKV via 2pc
// 6. encode a hash meta key
// 7. update a hash meta field count if needed
CodecDataOutput cdo = new CodecDataOutput();
MetaCodec.encodeHashDataKey(cdo, key.toByteArray(), field.toByteArray());
ByteString dataKey = cdo.toByteString();
byte[] oldVal = snapshot.get(dataKey.toByteArray());
byte[] newVal = calculateNewVal.apply(oldVal);
if (Arrays.equals(newVal, oldVal)) {
// not need to update
return 0L;
}
List<BytePairWrapper> pairs = new ArrayList<>(2);
pairs.add(new BytePairWrapper(dataKey.toByteArray(), newVal));
getMetaToUpdate(key, oldVal, snapshot).ifPresent(pairs::add);
set(pairs, snapshot.getTimestamp());
return Long.parseLong(new String(newVal));
}
use of com.pingcap.tikv.codec.CodecDataOutput in project tispark by pingcap.
the class IndexKey method encode.
private static byte[] encode(long tableId, long indexId, Key[] dataKeys) {
CodecDataOutput cdo = new CodecDataOutput();
cdo.write(TBL_PREFIX);
IntegerCodec.writeLong(cdo, tableId);
cdo.write(IDX_PREFIX_SEP);
IntegerCodec.writeLong(cdo, indexId);
for (Key key : dataKeys) {
if (key == null) {
throw new TypeException("key cannot be null");
}
cdo.write(key.getBytes());
}
return cdo.toBytes();
}
Aggregations