use of org.apache.hadoop.hbase.rest.client.Response in project hbase by apache.
the class TestGzipFilter method testGzipFilter.
@Test
public void testGzipFilter() throws Exception {
String path = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
GZIPOutputStream os = new GZIPOutputStream(bos);
os.write(VALUE_1);
os.close();
byte[] value_1_gzip = bos.toByteArray();
// input side filter
Header[] headers = new Header[2];
headers[0] = new BasicHeader("Content-Type", Constants.MIMETYPE_BINARY);
headers[1] = new BasicHeader("Content-Encoding", "gzip");
Response response = client.put(path, headers, value_1_gzip);
assertEquals(200, response.getCode());
Table table = TEST_UTIL.getConnection().getTable(TABLE);
Get get = new Get(Bytes.toBytes(ROW_1));
get.addColumn(Bytes.toBytes(CFA), Bytes.toBytes("1"));
Result result = table.get(get);
byte[] value = result.getValue(Bytes.toBytes(CFA), Bytes.toBytes("1"));
assertNotNull(value);
assertTrue(Bytes.equals(value, VALUE_1));
// output side filter
headers[0] = new BasicHeader("Accept", Constants.MIMETYPE_BINARY);
headers[1] = new BasicHeader("Accept-Encoding", "gzip");
response = client.get(path, headers);
assertEquals(200, response.getCode());
ByteArrayInputStream bis = new ByteArrayInputStream(response.getBody());
GZIPInputStream is = new GZIPInputStream(bis);
value = new byte[VALUE_1.length];
is.read(value, 0, VALUE_1.length);
assertTrue(Bytes.equals(value, VALUE_1));
is.close();
table.close();
testScannerResultCodes();
}
use of org.apache.hadoop.hbase.rest.client.Response in project hbase by apache.
the class TestSchemaResource method testTableCreateAndDeletePB.
@Test
public void testTableCreateAndDeletePB() throws IOException {
String schemaPath = "/" + TABLE2 + "/schema";
TableSchemaModel model;
Response response;
Admin admin = TEST_UTIL.getAdmin();
assertFalse(admin.tableExists(TableName.valueOf(TABLE2)));
// create the table
model = testTableSchemaModel.buildTestModel(TABLE2);
testTableSchemaModel.checkModel(model, TABLE2);
if (csrfEnabled) {
// test put operation is forbidden without custom header
response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput());
assertEquals(400, response.getCode());
}
response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput(), extraHdr);
assertEquals("put failed with csrf " + (csrfEnabled ? "enabled" : "disabled"), 201, response.getCode());
// recall the same put operation but in read-only mode
conf.set("hbase.rest.readonly", "true");
response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput(), extraHdr);
assertNotNull(extraHdr);
assertEquals(403, response.getCode());
// retrieve the schema and validate it
response = client.get(schemaPath, Constants.MIMETYPE_PROTOBUF);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type"));
model = new TableSchemaModel();
model.getObjectFromMessage(response.getBody());
testTableSchemaModel.checkModel(model, TABLE2);
// retrieve the schema and validate it with alternate pbuf type
response = client.get(schemaPath, Constants.MIMETYPE_PROTOBUF_IETF);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, response.getHeader("content-type"));
model = new TableSchemaModel();
model.getObjectFromMessage(response.getBody());
testTableSchemaModel.checkModel(model, TABLE2);
if (csrfEnabled) {
// test delete schema operation is forbidden without custom header
response = client.delete(schemaPath);
assertEquals(400, response.getCode());
}
// test delete schema operation is forbidden in read-only mode
response = client.delete(schemaPath, extraHdr);
assertEquals(403, response.getCode());
// return read-only setting back to default
conf.set("hbase.rest.readonly", "false");
// delete the table and make sure HBase concurs
response = client.delete(schemaPath, extraHdr);
assertEquals(200, response.getCode());
assertFalse(admin.tableExists(TableName.valueOf(TABLE2)));
}
use of org.apache.hadoop.hbase.rest.client.Response in project hbase by apache.
the class RowResourceBase method putValuePB.
static Response putValuePB(String url, String table, String row, String column, String value) throws IOException {
RowModel rowModel = new RowModel(row);
rowModel.addCell(new CellModel(Bytes.toBytes(column), Bytes.toBytes(value)));
CellSetModel cellSetModel = new CellSetModel();
cellSetModel.addRow(rowModel);
Response response = client.put(url, Constants.MIMETYPE_PROTOBUF, cellSetModel.createProtobufOutput());
Thread.yield();
return response;
}
use of org.apache.hadoop.hbase.rest.client.Response in project hbase by apache.
the class RowResourceBase method checkValueXML.
protected static void checkValueXML(String url, String table, String row, String column, String value) throws IOException, JAXBException {
Response response = getValueXML(url);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type"));
CellSetModel cellSet = (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response.getBody()));
RowModel rowModel = cellSet.getRows().get(0);
CellModel cell = rowModel.getCells().get(0);
assertEquals(Bytes.toString(cell.getColumn()), column);
assertEquals(Bytes.toString(cell.getValue()), value);
}
use of org.apache.hadoop.hbase.rest.client.Response in project hbase by apache.
the class RowResourceBase method checkIncrementValueXML.
protected static void checkIncrementValueXML(String table, String row, String column, long value) throws IOException, JAXBException {
Response response1 = getValueXML(table, row, column);
assertEquals(200, response1.getCode());
assertEquals(Constants.MIMETYPE_XML, response1.getHeader("content-type"));
CellSetModel cellSet = (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response1.getBody()));
RowModel rowModel = cellSet.getRows().get(0);
CellModel cell = rowModel.getCells().get(0);
assertEquals(Bytes.toString(cell.getColumn()), column);
assertEquals(Bytes.toLong(cell.getValue()), value);
}
Aggregations