use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class RowResource method checkAndPut.
/**
* Validates the input request parameters, parses columns from CellSetModel,
* and invokes checkAndPut on HTable.
*
* @param model instance of CellSetModel
* @return Response 200 OK, 304 Not modified, 400 Bad request
*/
Response checkAndPut(final CellSetModel model) {
Table table = null;
try {
table = servlet.getTable(tableResource.getName());
if (model.getRows().size() != 1) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Number of rows specified is not 1." + CRLF).build();
}
RowModel rowModel = model.getRows().get(0);
byte[] key = rowModel.getKey();
if (key == null) {
key = rowspec.getRow();
}
List<CellModel> cellModels = rowModel.getCells();
int cellModelCount = cellModels.size();
if (key == null || cellModelCount <= 1) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Either row key is null or no data found for columns specified." + CRLF).build();
}
Put put = new Put(key);
boolean retValue;
CellModel valueToCheckCell = cellModels.get(cellModelCount - 1);
byte[] valueToCheckColumn = valueToCheckCell.getColumn();
byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
CellModel valueToPutCell = null;
// and track if the check cell's latest value is also sent
for (int i = 0, n = cellModelCount - 1; i < n; i++) {
CellModel cell = cellModels.get(i);
byte[] col = cell.getColumn();
if (col == null) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
}
byte[][] parts = KeyValue.parseColumn(col);
if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request" + CRLF).build();
}
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
if (Bytes.equals(col, valueToCheckCell.getColumn())) {
valueToPutCell = cell;
}
}
if (valueToPutCell == null) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: The column to put and check do not match." + CRLF).build();
} else {
retValue = table.checkAndPut(key, valueToPutParts[0], valueToPutParts[1], valueToCheckCell.getValue(), put);
}
} else {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF).build();
}
if (LOG.isTraceEnabled()) {
LOG.trace("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
}
if (!retValue) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity("Value not Modified" + CRLF).build();
}
ResponseBuilder response = Response.ok();
servlet.getMetrics().incrementSucessfulPutRequests(1);
return response.build();
} catch (Exception e) {
servlet.getMetrics().incrementFailedPutRequests(1);
return processException(e);
} finally {
if (table != null)
try {
table.close();
} catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class RowResource method update.
Response update(final CellSetModel model, final boolean replace) {
servlet.getMetrics().incrementRequests(1);
if (servlet.isReadOnly()) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.FORBIDDEN).type(MIMETYPE_TEXT).entity("Forbidden" + CRLF).build();
}
if (CHECK_PUT.equalsIgnoreCase(check)) {
return checkAndPut(model);
} else if (CHECK_DELETE.equalsIgnoreCase(check)) {
return checkAndDelete(model);
} else if (check != null && check.length() > 0) {
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Invalid check value '" + check + "'" + CRLF).build();
}
Table table = null;
try {
List<RowModel> rows = model.getRows();
List<Put> puts = new ArrayList<>();
for (RowModel row : rows) {
byte[] key = row.getKey();
if (key == null) {
key = rowspec.getRow();
}
if (key == null) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Row key not specified." + CRLF).build();
}
Put put = new Put(key);
int i = 0;
for (CellModel cell : row.getCells()) {
byte[] col = cell.getColumn();
if (col == null)
try {
col = rowspec.getColumns()[i++];
} catch (ArrayIndexOutOfBoundsException e) {
col = null;
}
if (col == null) {
servlet.getMetrics().incrementFailedPutRequests(1);
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
}
byte[][] parts = KeyValue.parseColumn(col);
if (parts.length != 2) {
return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request" + CRLF).build();
}
put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
}
puts.add(put);
if (LOG.isTraceEnabled()) {
LOG.trace("PUT " + put.toString());
}
}
table = servlet.getTable(tableResource.getName());
table.put(puts);
ResponseBuilder response = Response.ok();
servlet.getMetrics().incrementSucessfulPutRequests(1);
return response.build();
} catch (Exception e) {
servlet.getMetrics().incrementFailedPutRequests(1);
return processException(e);
} finally {
if (table != null)
try {
table.close();
} catch (IOException ioe) {
LOG.debug("Exception received while closing the table", ioe);
}
}
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class MultiRowResource method get.
@GET
@Produces({ MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF, MIMETYPE_PROTOBUF_IETF })
public Response get(@Context final UriInfo uriInfo) {
MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
servlet.getMetrics().incrementRequests(1);
try {
CellSetModel model = new CellSetModel();
for (String rk : params.get(ROW_KEYS_PARAM_NAME)) {
RowSpec rowSpec = new RowSpec(rk);
if (this.versions != null) {
rowSpec.setMaxVersions(this.versions);
}
if (this.columns != null) {
for (int i = 0; i < this.columns.length; i++) {
rowSpec.addColumn(this.columns[i].getBytes());
}
}
ResultGenerator generator = ResultGenerator.fromRowSpec(this.tableResource.getName(), rowSpec, null, !params.containsKey(NOCACHE_PARAM_NAME));
Cell value = null;
RowModel rowModel = new RowModel(rk);
if (generator.hasNext()) {
while ((value = generator.next()) != null) {
rowModel.addCell(new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value), value.getTimestamp(), CellUtil.cloneValue(value)));
}
model.addRow(rowModel);
} else {
if (LOG.isTraceEnabled()) {
LOG.trace("The row : " + rk + " not found in the table.");
}
}
}
if (model.getRows().isEmpty()) {
//If no rows found.
servlet.getMetrics().incrementFailedGetRequests(1);
return Response.status(Response.Status.NOT_FOUND).type(MIMETYPE_TEXT).entity("No rows found." + CRLF).build();
} else {
servlet.getMetrics().incrementSucessfulGetRequests(1);
return Response.ok(model).build();
}
} catch (IOException e) {
servlet.getMetrics().incrementFailedGetRequests(1);
return processException(e);
}
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class RowResourceBase method checkAndPutValueXML.
protected static Response checkAndPutValueXML(String url, String table, String row, String column, String valueToCheck, String valueToPut, HashMap<String, String> otherCells) throws IOException, JAXBException {
RowModel rowModel = new RowModel(row);
rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes.toBytes(valueToPut)));
if (otherCells != null) {
for (Map.Entry<String, String> entry : otherCells.entrySet()) {
rowModel.addCell(new CellModel(Bytes.toBytes(entry.getKey()), Bytes.toBytes(entry.getValue())));
}
}
// This Cell need to be added as last cell.
rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes.toBytes(valueToCheck)));
CellSetModel cellSetModel = new CellSetModel();
cellSetModel.addRow(rowModel);
StringWriter writer = new StringWriter();
xmlMarshaller.marshal(cellSetModel, writer);
Response response = client.put(url, Constants.MIMETYPE_XML, Bytes.toBytes(writer.toString()));
Thread.yield();
return response;
}
use of org.apache.hadoop.hbase.rest.model.RowModel in project hbase by apache.
the class RowResourceBase method checkAndDeleteValuePB.
protected static Response checkAndDeleteValuePB(String url, String table, String row, String column, String valueToCheck, HashMap<String, String> cellsToDelete) throws IOException {
RowModel rowModel = new RowModel(row);
if (cellsToDelete != null) {
for (Map.Entry<String, String> entry : cellsToDelete.entrySet()) {
rowModel.addCell(new CellModel(Bytes.toBytes(entry.getKey()), Bytes.toBytes(entry.getValue())));
}
}
// Add this at the end
rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes.toBytes(valueToCheck)));
CellSetModel cellSetModel = new CellSetModel();
cellSetModel.addRow(rowModel);
Response response = client.put(url, Constants.MIMETYPE_PROTOBUF, cellSetModel.createProtobufOutput());
Thread.yield();
return response;
}
Aggregations