use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class RowResource method checkAndDelete.
/**
* Validates the input request parameters, parses columns from CellSetModel,
* and invokes checkAndDelete on HTable.
*
* @param model instance of CellSetModel
* @return Response 200 OK, 304 Not modified, 400 Bad request
*/
Response checkAndDelete(final CellSetModel model) {
Table table = null;
Delete delete = null;
try {
table = servlet.getTable(tableResource.getName());
if (model.getRows().size() != 1) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request" + CRLF).build();
}
RowModel rowModel = model.getRows().get(0);
byte[] key = rowModel.getKey();
if (key == null) {
key = rowspec.getRow();
}
if (key == null) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Row key found to be null." + CRLF).build();
}
List<CellModel> cellModels = rowModel.getCells();
int cellModelCount = cellModels.size();
delete = new Delete(key);
boolean retValue;
CellModel valueToDeleteCell = rowModel.getCells().get(cellModelCount - 1);
byte[] valueToDeleteColumn = valueToDeleteCell.getColumn();
if (valueToDeleteColumn == null) {
try {
valueToDeleteColumn = rowspec.getColumns()[0];
} catch (final ArrayIndexOutOfBoundsException e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column not specified for check." + CRLF).build();
}
}
byte[][] parts;
// Copy all the cells to the Delete request if extra cells are sent
if (cellModelCount > 1) {
for (int i = 0, n = cellModelCount - 1; i < n; i++) {
CellModel cell = cellModels.get(i);
byte[] col = cell.getColumn();
if (col == null) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
}
parts = KeyValue.parseColumn(col);
if (parts.length == 1) {
// Only Column Family is specified
delete.addFamily(parts[0], cell.getTimestamp());
} else if (parts.length == 2) {
delete.addColumn(parts[0], parts[1], cell.getTimestamp());
} else {
servlet.getMetrics().incrementFailedDeleteRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column to delete incorrectly specified." + CRLF).build();
}
}
}
parts = KeyValue.parseColumn(valueToDeleteColumn);
if (parts.length == 2) {
if (parts[1].length != 0) {
// if that is the only cell passed to the rest api
if (cellModelCount == 1) {
delete.addColumns(parts[0], parts[1]);
}
retValue = table.checkAndDelete(key, parts[0], parts[1], valueToDeleteCell.getValue(), delete);
} else {
// The case of empty qualifier.
if (cellModelCount == 1) {
delete.addColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY));
}
retValue = table.checkAndDelete(key, parts[0], Bytes.toBytes(StringUtils.EMPTY), valueToDeleteCell.getValue(), delete);
}
} else {
servlet.getMetrics().incrementFailedDeleteRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column to check incorrectly specified." + CRLF).build();
}
if (LOG.isTraceEnabled()) {
LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity(" Delete check failed." + CRLF).build();
}
ResponseBuilder response = Response.ok();
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
return response.build();
} catch (Exception e) {
servlet.getMetrics().incrementFailedDeleteRequests(1);
return processException(e);
} finally {
if (table != null)
try {
table.close();
} catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class RowResource method get.
@GET
@Produces({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF, MIMETYPE_PROTOBUF_IETF })
public Response get(@Context final UriInfo uriInfo) {
if (LOG.isTraceEnabled()) {
LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
try {
ResultGenerator generator = ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null, !params.containsKey(NOCACHE_PARAM_NAME));
if (!generator.hasNext()) {
servlet.getMetrics().incrementFailedGetRequests(1);
return Response.status(Response.Status.NOT_FOUND).type(MIMETYPE_TEXT).entity("Not found" + CRLF).build();
}
int count = 0;
CellSetModel model = new CellSetModel();
Cell value = generator.next();
byte[] rowKey = CellUtil.cloneRow(value);
RowModel rowModel = new RowModel(rowKey);
do {
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
model.addRow(rowModel);
rowKey = CellUtil.cloneRow(value);
rowModel = new RowModel(rowKey);
}
rowModel.addCell(new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value), value.getTimestamp(), CellUtil.cloneValue(value)));
if (++count > rowspec.getMaxValues()) {
break;
}
value = generator.next();
} while (value != null);
model.addRow(rowModel);
servlet.getMetrics().incrementSucessfulGetRequests(1);
return Response.ok(model).build();
} catch (Exception e) {
servlet.getMetrics().incrementFailedPutRequests(1);
return processException(e);
}
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class ScannerInstanceResource method get.
@GET
@Produces({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF, MIMETYPE_PROTOBUF_IETF })
public Response get(@Context final UriInfo uriInfo, @QueryParam("n") int maxRows, @QueryParam("c") final int maxValues) {
if (LOG.isTraceEnabled()) {
LOG.trace("GET " + uriInfo.getAbsolutePath());
}
servlet.getMetrics().incrementRequests(1);
if (generator == null) {
servlet.getMetrics().incrementFailedGetRequests(1);
return Response.status(Response.Status.NOT_FOUND).type(MIMETYPE_TEXT).entity("Not found" + CRLF).build();
}
CellSetModel model = new CellSetModel();
RowModel rowModel = null;
byte[] rowKey = null;
int limit = batch;
if (maxValues > 0) {
limit = maxValues;
}
int count = limit;
do {
Cell value = null;
try {
value = generator.next();
} catch (IllegalStateException e) {
if (ScannerResource.delete(id)) {
servlet.getMetrics().incrementSucessfulDeleteRequests(1);
} else {
servlet.getMetrics().incrementFailedDeleteRequests(1);
}
servlet.getMetrics().incrementFailedGetRequests(1);
return Response.status(Response.Status.GONE).type(MIMETYPE_TEXT).entity("Gone" + CRLF).build();
} catch (IllegalArgumentException e) {
Throwable t = e.getCause();
if (t instanceof TableNotFoundException) {
return Response.status(Response.Status.NOT_FOUND).type(MIMETYPE_TEXT).entity("Not found" + CRLF).build();
}
throw e;
}
if (value == null) {
if (LOG.isTraceEnabled()) {
LOG.trace("generator exhausted");
}
// returned
if (count == limit) {
return Response.noContent().build();
}
break;
}
if (rowKey == null) {
rowKey = CellUtil.cloneRow(value);
rowModel = new RowModel(rowKey);
}
if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
// specified number of rows
if (maxRows > 0) {
if (--maxRows == 0) {
generator.putBack(value);
break;
}
}
model.addRow(rowModel);
rowKey = CellUtil.cloneRow(value);
rowModel = new RowModel(rowKey);
}
rowModel.addCell(new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value), value.getTimestamp(), CellUtil.cloneValue(value)));
} while (--count > 0);
model.addRow(rowModel);
ResponseBuilder response = Response.ok(model);
response.cacheControl(cacheControl);
servlet.getMetrics().incrementSucessfulGetRequests(1);
return response.build();
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class ProtobufStreamingUtil method createModelFromResults.
private CellSetModel createModelFromResults(Result[] results) {
CellSetModel cellSetModel = new CellSetModel();
for (Result rs : results) {
byte[] rowKey = rs.getRow();
RowModel rModel = new RowModel(rowKey);
List<Cell> kvs = rs.listCells();
for (Cell kv : kvs) {
rModel.addCell(new CellModel(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp(), CellUtil.cloneValue(kv)));
}
cellSetModel.addRow(rModel);
}
return cellSetModel;
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class TestScannersWithLabels method countCellSet.
private static int countCellSet(CellSetModel model) {
int count = 0;
Iterator<RowModel> rows = model.getRows().iterator();
while (rows.hasNext()) {
RowModel row = rows.next();
Iterator<CellModel> cells = row.getCells().iterator();
while (cells.hasNext()) {
cells.next();
count++;
}
}
return count;
}
Aggregations