Search in sources :

Example 1 with ResponseBuilder

use of org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder in project hbase by apache.

the class RowResource method increment.

/**
 * Validates the input request parameters, parses columns from CellSetModel,
 * and invokes Increment on HTable.
 *
 * @param model instance of CellSetModel
 * @return Response 200 OK, 304 Not modified, 400 Bad request
 */
Response increment(final CellSetModel model) {
    Table table = null;
    Increment increment = null;
    try {
        table = servlet.getTable(tableResource.getName());
        if (model.getRows().size() != 1) {
            servlet.getMetrics().incrementFailedIncrementRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Number of rows specified is not 1." + CRLF).build();
        }
        RowModel rowModel = model.getRows().get(0);
        byte[] key = rowModel.getKey();
        if (key == null) {
            key = rowspec.getRow();
        }
        if (key == null) {
            servlet.getMetrics().incrementFailedIncrementRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Row key found to be null." + CRLF).build();
        }
        increment = new Increment(key);
        increment.setReturnResults(returnResult);
        int i = 0;
        for (CellModel cell : rowModel.getCells()) {
            byte[] col = cell.getColumn();
            if (col == null) {
                try {
                    col = rowspec.getColumns()[i++];
                } catch (ArrayIndexOutOfBoundsException e) {
                    col = null;
                }
            }
            if (col == null) {
                servlet.getMetrics().incrementFailedIncrementRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
            }
            byte[][] parts = CellUtil.parseColumn(col);
            if (parts.length != 2) {
                servlet.getMetrics().incrementFailedIncrementRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF).build();
            }
            increment.addColumn(parts[0], parts[1], Long.parseLong(Bytes.toStringBinary(cell.getValue())));
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("INCREMENT " + increment.toString());
        }
        Result result = table.increment(increment);
        if (returnResult) {
            if (result.isEmpty()) {
                servlet.getMetrics().incrementFailedIncrementRequests(1);
                return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity("Increment return empty." + CRLF).build();
            }
            CellSetModel rModel = new CellSetModel();
            RowModel rRowModel = new RowModel(result.getRow());
            for (Cell cell : result.listCells()) {
                rRowModel.addCell(new CellModel(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), cell.getTimestamp(), CellUtil.cloneValue(cell)));
            }
            rModel.addRow(rowModel);
            servlet.getMetrics().incrementSucessfulIncrementRequests(1);
            return Response.ok(rModel).build();
        }
        ResponseBuilder response = Response.ok();
        servlet.getMetrics().incrementSucessfulIncrementRequests(1);
        return response.build();
    } catch (Exception e) {
        servlet.getMetrics().incrementFailedIncrementRequests(1);
        return processException(e);
    } finally {
        if (table != null)
            try {
                table.close();
            } catch (IOException ioe) {
                LOG.debug("Exception received while closing the table " + table.getName(), ioe);
            }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) IOException(java.io.IOException) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) Increment(org.apache.hadoop.hbase.client.Increment) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) ResponseBuilder(org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder) Cell(org.apache.hadoop.hbase.Cell)

Example 2 with ResponseBuilder

use of org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder in project hbase by apache.

the class RowResource method checkAndDelete.

/**
 * Validates the input request parameters, parses columns from CellSetModel,
 * and invokes checkAndDelete on HTable.
 *
 * @param model instance of CellSetModel
 * @return Response 200 OK, 304 Not modified, 400 Bad request
 */
Response checkAndDelete(final CellSetModel model) {
    Table table = null;
    Delete delete = null;
    try {
        table = servlet.getTable(tableResource.getName());
        if (model.getRows().size() != 1) {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Number of rows specified is not 1." + CRLF).build();
        }
        RowModel rowModel = model.getRows().get(0);
        byte[] key = rowModel.getKey();
        if (key == null) {
            key = rowspec.getRow();
        }
        if (key == null) {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Row key found to be null." + CRLF).build();
        }
        List<CellModel> cellModels = rowModel.getCells();
        int cellModelCount = cellModels.size();
        delete = new Delete(key);
        boolean retValue;
        CellModel valueToDeleteCell = rowModel.getCells().get(cellModelCount - 1);
        byte[] valueToDeleteColumn = valueToDeleteCell.getColumn();
        if (valueToDeleteColumn == null) {
            try {
                valueToDeleteColumn = rowspec.getColumns()[0];
            } catch (final ArrayIndexOutOfBoundsException e) {
                servlet.getMetrics().incrementFailedDeleteRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column not specified for check." + CRLF).build();
            }
        }
        byte[][] parts;
        // Copy all the cells to the Delete request if extra cells are sent
        if (cellModelCount > 1) {
            for (int i = 0, n = cellModelCount - 1; i < n; i++) {
                CellModel cell = cellModels.get(i);
                byte[] col = cell.getColumn();
                if (col == null) {
                    servlet.getMetrics().incrementFailedPutRequests(1);
                    return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
                }
                parts = CellUtil.parseColumn(col);
                if (parts.length == 1) {
                    // Only Column Family is specified
                    delete.addFamily(parts[0], cell.getTimestamp());
                } else if (parts.length == 2) {
                    delete.addColumn(parts[0], parts[1], cell.getTimestamp());
                } else {
                    servlet.getMetrics().incrementFailedDeleteRequests(1);
                    return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column to delete incorrectly specified." + CRLF).build();
                }
            }
        }
        parts = CellUtil.parseColumn(valueToDeleteColumn);
        if (parts.length == 2) {
            if (parts[1].length != 0) {
                // if that is the only cell passed to the rest api
                if (cellModelCount == 1) {
                    delete.addColumns(parts[0], parts[1]);
                }
                retValue = table.checkAndMutate(key, parts[0]).qualifier(parts[1]).ifEquals(valueToDeleteCell.getValue()).thenDelete(delete);
            } else {
                // The case of empty qualifier.
                if (cellModelCount == 1) {
                    delete.addColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY));
                }
                retValue = table.checkAndMutate(key, parts[0]).ifEquals(valueToDeleteCell.getValue()).thenDelete(delete);
            }
        } else {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column to check incorrectly specified." + CRLF).build();
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns " + retValue);
        }
        if (!retValue) {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity(" Delete check failed." + CRLF).build();
        }
        ResponseBuilder response = Response.ok();
        servlet.getMetrics().incrementSucessfulDeleteRequests(1);
        return response.build();
    } catch (Exception e) {
        servlet.getMetrics().incrementFailedDeleteRequests(1);
        return processException(e);
    } finally {
        if (table != null)
            try {
                table.close();
            } catch (IOException ioe) {
                LOG.debug("Exception received while closing the table", ioe);
            }
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) IOException(java.io.IOException) IOException(java.io.IOException) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) ResponseBuilder(org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder)

Example 3 with ResponseBuilder

use of org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder in project hbase by apache.

the class RegionsResource method get.

@GET
@Produces({ MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF, MIMETYPE_PROTOBUF_IETF })
public Response get(@Context final UriInfo uriInfo) {
    if (LOG.isTraceEnabled()) {
        LOG.trace("GET " + uriInfo.getAbsolutePath());
    }
    servlet.getMetrics().incrementRequests(1);
    try {
        TableName tableName = TableName.valueOf(tableResource.getName());
        if (!tableResource.exists()) {
            throw new TableNotFoundException(tableName);
        }
        TableInfoModel model = new TableInfoModel(tableName.getNameAsString());
        List<HRegionLocation> locs;
        try (Connection connection = ConnectionFactory.createConnection(servlet.getConfiguration());
            RegionLocator locator = connection.getRegionLocator(tableName)) {
            locs = locator.getAllRegionLocations();
        }
        for (HRegionLocation loc : locs) {
            RegionInfo hri = loc.getRegion();
            ServerName addr = loc.getServerName();
            model.add(new TableRegionModel(tableName.getNameAsString(), hri.getRegionId(), hri.getStartKey(), hri.getEndKey(), addr.getAddress().toString()));
        }
        ResponseBuilder response = Response.ok(model);
        response.cacheControl(cacheControl);
        servlet.getMetrics().incrementSucessfulGetRequests(1);
        return response.build();
    } catch (TableNotFoundException e) {
        servlet.getMetrics().incrementFailedGetRequests(1);
        return Response.status(Response.Status.NOT_FOUND).type(MIMETYPE_TEXT).entity("Not found" + CRLF).build();
    } catch (IOException e) {
        servlet.getMetrics().incrementFailedGetRequests(1);
        return Response.status(Response.Status.SERVICE_UNAVAILABLE).type(MIMETYPE_TEXT).entity("Unavailable" + CRLF).build();
    }
}
Also used : TableInfoModel(org.apache.hadoop.hbase.rest.model.TableInfoModel) TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) ServerName(org.apache.hadoop.hbase.ServerName) Connection(org.apache.hadoop.hbase.client.Connection) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) TableRegionModel(org.apache.hadoop.hbase.rest.model.TableRegionModel) IOException(java.io.IOException) ResponseBuilder(org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder) Produces(org.apache.hbase.thirdparty.javax.ws.rs.Produces) GET(org.apache.hbase.thirdparty.javax.ws.rs.GET)

Example 4 with ResponseBuilder

use of org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder in project hbase by apache.

the class TableScanResource method getProtobuf.

@GET
@Produces({ Constants.MIMETYPE_PROTOBUF, Constants.MIMETYPE_PROTOBUF_IETF })
public Response getProtobuf(@Context final UriInfo uriInfo, @HeaderParam("Accept") final String contentType) {
    if (LOG.isTraceEnabled()) {
        LOG.trace("GET " + uriInfo.getAbsolutePath() + " as " + MIMETYPE_BINARY);
    }
    servlet.getMetrics().incrementRequests(1);
    try {
        int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
        StreamingOutput stream = new ProtobufStreamingOutput(this.results, contentType, userRequestedLimit, fetchSize);
        servlet.getMetrics().incrementSucessfulScanRequests(1);
        ResponseBuilder response = Response.ok(stream);
        response.header("content-type", contentType);
        return response.build();
    } catch (Exception exp) {
        servlet.getMetrics().incrementFailedScanRequests(1);
        processException(exp);
        LOG.warn(exp.toString(), exp);
        return null;
    }
}
Also used : StreamingOutput(org.apache.hbase.thirdparty.javax.ws.rs.core.StreamingOutput) ResponseBuilder(org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder) IOException(java.io.IOException) Produces(org.apache.hbase.thirdparty.javax.ws.rs.Produces) GET(org.apache.hbase.thirdparty.javax.ws.rs.GET)

Example 5 with ResponseBuilder

use of org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder in project hbase by apache.

the class ExistsResource method get.

@GET
@Produces({ MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF, MIMETYPE_PROTOBUF_IETF, MIMETYPE_BINARY })
public Response get(@Context final UriInfo uriInfo) {
    try {
        if (!tableResource.exists()) {
            return Response.status(Response.Status.NOT_FOUND).type(MIMETYPE_TEXT).entity("Not found" + CRLF).build();
        }
    } catch (IOException e) {
        return Response.status(Response.Status.SERVICE_UNAVAILABLE).type(MIMETYPE_TEXT).entity("Unavailable" + CRLF).build();
    }
    ResponseBuilder response = Response.ok();
    response.cacheControl(cacheControl);
    return response.build();
}
Also used : IOException(java.io.IOException) ResponseBuilder(org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder) Produces(org.apache.hbase.thirdparty.javax.ws.rs.Produces) GET(org.apache.hbase.thirdparty.javax.ws.rs.GET)

Aggregations

ResponseBuilder (org.apache.hbase.thirdparty.javax.ws.rs.core.Response.ResponseBuilder)15 IOException (java.io.IOException)12 GET (org.apache.hbase.thirdparty.javax.ws.rs.GET)11 Produces (org.apache.hbase.thirdparty.javax.ws.rs.Produces)11 CellModel (org.apache.hadoop.hbase.rest.model.CellModel)5 RowModel (org.apache.hadoop.hbase.rest.model.RowModel)5 Cell (org.apache.hadoop.hbase.Cell)4 Table (org.apache.hadoop.hbase.client.Table)4 TableNotFoundException (org.apache.hadoop.hbase.TableNotFoundException)3 ServerName (org.apache.hadoop.hbase.ServerName)2 Put (org.apache.hadoop.hbase.client.Put)2 CellSetModel (org.apache.hadoop.hbase.rest.model.CellSetModel)2 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 ClusterMetrics (org.apache.hadoop.hbase.ClusterMetrics)1 HRegionLocation (org.apache.hadoop.hbase.HRegionLocation)1 RegionMetrics (org.apache.hadoop.hbase.RegionMetrics)1 ServerMetrics (org.apache.hadoop.hbase.ServerMetrics)1 TableExistsException (org.apache.hadoop.hbase.TableExistsException)1 TableName (org.apache.hadoop.hbase.TableName)1