use of org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo in project hbase by apache.
the class ServerCrashProcedure method deserializeStateData.
@Override
public void deserializeStateData(final InputStream stream) throws IOException {
super.deserializeStateData(stream);
MasterProcedureProtos.ServerCrashStateData state = MasterProcedureProtos.ServerCrashStateData.parseDelimitedFrom(stream);
this.serverName = ProtobufUtil.toServerName(state.getServerName());
this.distributedLogReplay = state.hasDistributedLogReplay() ? state.getDistributedLogReplay() : false;
this.carryingMeta = state.hasCarryingMeta() ? state.getCarryingMeta() : false;
// shouldSplitWAL has a default over in pb so this invocation will always work.
this.shouldSplitWal = state.getShouldSplitWal();
int size = state.getRegionsOnCrashedServerCount();
if (size > 0) {
this.regionsOnCrashedServer = new HashSet<>(size);
for (RegionInfo ri : state.getRegionsOnCrashedServerList()) {
this.regionsOnCrashedServer.add(HRegionInfo.convert(ri));
}
}
size = state.getRegionsAssignedCount();
if (size > 0) {
this.regionsAssigned = new ArrayList<>(size);
for (RegionInfo ri : state.getRegionsOnCrashedServerList()) {
this.regionsAssigned.add(HRegionInfo.convert(ri));
}
}
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo in project hbase by apache.
the class TestHRegionInfo method testConvert.
@Test
public void testConvert() {
final TableName tableName = TableName.valueOf("ns1:" + name.getMethodName());
byte[] startKey = Bytes.toBytes("startKey");
byte[] endKey = Bytes.toBytes("endKey");
boolean split = false;
long regionId = System.currentTimeMillis();
int replicaId = 42;
HRegionInfo hri = new HRegionInfo(tableName, startKey, endKey, split, regionId, replicaId);
// convert two times, compare
HRegionInfo convertedHri = HRegionInfo.convert(HRegionInfo.convert(hri));
assertEquals(hri, convertedHri);
// test convert RegionInfo without replicaId
RegionInfo info = RegionInfo.newBuilder().setTableName(HBaseProtos.TableName.newBuilder().setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace())).build()).setStartKey(UnsafeByteOperations.unsafeWrap(startKey)).setEndKey(UnsafeByteOperations.unsafeWrap(endKey)).setSplit(split).setRegionId(regionId).build();
convertedHri = HRegionInfo.convert(info);
HRegionInfo expectedHri = new HRegionInfo(tableName, startKey, endKey, split, regionId, // expecting default replicaId
0);
assertEquals(expectedHri, convertedHri);
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo in project hbase by apache.
the class MasterRpcServices method reportRegionStateTransition.
@Override
public ReportRegionStateTransitionResponse reportRegionStateTransition(RpcController c, ReportRegionStateTransitionRequest req) throws ServiceException {
try {
master.checkServiceStarted();
RegionStateTransition rt = req.getTransition(0);
RegionStates regionStates = master.getAssignmentManager().getRegionStates();
for (RegionInfo ri : rt.getRegionInfoList()) {
TableName tableName = ProtobufUtil.toTableName(ri.getTableName());
if (!(TableName.META_TABLE_NAME.equals(tableName) && regionStates.getRegionState(HRegionInfo.FIRST_META_REGIONINFO) != null) && !master.getAssignmentManager().isFailoverCleanupDone()) {
// failover cleanup. So no need this check for it
throw new PleaseHoldException("Master is rebuilding user regions");
}
}
ServerName sn = ProtobufUtil.toServerName(req.getServer());
String error = master.getAssignmentManager().onRegionTransition(sn, rt);
ReportRegionStateTransitionResponse.Builder rrtr = ReportRegionStateTransitionResponse.newBuilder();
if (error != null) {
rrtr.setErrorMessage(error);
}
return rrtr.build();
} catch (IOException ioe) {
throw new ServiceException(ioe);
}
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo in project hbase by apache.
the class HRegionInfo method parseFrom.
/**
* @param bytes A pb RegionInfo serialized with a pb magic prefix.
* @param offset starting point in the byte array
* @param len length to read on the byte array
* @return A deserialized {@link HRegionInfo}
* @throws DeserializationException
* @see #toByteArray()
*/
public static HRegionInfo parseFrom(final byte[] bytes, int offset, int len) throws DeserializationException {
if (ProtobufUtil.isPBMagicPrefix(bytes, offset, len)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
ProtobufUtil.mergeFrom(builder, bytes, pblen + offset, len - pblen);
HBaseProtos.RegionInfo ri = builder.build();
return convert(ri);
} catch (IOException e) {
throw new DeserializationException(e);
}
} else {
throw new DeserializationException("PB encoded HRegionInfo expected");
}
}
Aggregations