use of co.cask.cdap.api.data.format.FormatSpecification in project cdap by caskdata.
the class FileStreamAdmin method updateConfig.
@Override
public void updateConfig(final StreamId streamId, final StreamProperties properties) throws Exception {
Location streamLocation;
// User should have admin access on the stream to update its configuration
ensureAccess(streamId, Action.ADMIN);
streamLocation = impersonator.doAs(streamId.getParent(), new Callable<Location>() {
@Override
public Location call() throws Exception {
return getStreamLocation(streamId);
}
});
Preconditions.checkArgument(streamLocation.isDirectory(), "Stream '%s' does not exist.", streamId);
SecurityUtil.verifyOwnerPrincipal(streamId, properties.getOwnerPrincipal(), ownerAdmin);
streamCoordinatorClient.updateProperties(streamId, new Callable<CoordinatorStreamProperties>() {
@Override
public CoordinatorStreamProperties call() throws Exception {
StreamProperties oldProperties = updateProperties(streamId, properties);
FormatSpecification format = properties.getFormat();
if (format != null) {
// if the schema has changed, we need to recreate the hive table.
// Changes in format and settings don't require
// a hive change, as they are just properties used by the stream storage handler.
Schema currSchema = oldProperties.getFormat().getSchema();
Schema newSchema = format.getSchema();
if (!Objects.equals(currSchema, newSchema)) {
alterExploreStream(streamId, false, null);
alterExploreStream(streamId, true, format);
}
}
publishAudit(streamId, AuditType.UPDATE);
return new CoordinatorStreamProperties(properties.getTTL(), properties.getFormat(), properties.getNotificationThresholdMB(), null, properties.getDescription(), properties.getOwnerPrincipal());
}
});
}
use of co.cask.cdap.api.data.format.FormatSpecification in project cdap by caskdata.
the class StreamHandlerTest method testUpdateDescription.
@Test
public void testUpdateDescription() throws Exception {
// Create a stream with some ttl and description
String desc = "large stream";
HttpURLConnection urlConn = openURL(createURL("streams/stream1"), HttpMethod.PUT);
urlConn.setDoOutput(true);
Schema schema = Schema.recordOf("event", Schema.Field.of("purchase", Schema.of(Schema.Type.STRING)));
FormatSpecification formatSpecification = new FormatSpecification(TextRecordFormat.class.getCanonicalName(), schema, ImmutableMap.of(TextRecordFormat.CHARSET, "utf8"));
StreamProperties properties = new StreamProperties(1L, formatSpecification, 128, desc, null);
urlConn.getOutputStream().write(GSON.toJson(properties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// Check whether ttl and description got persisted
urlConn = openURL(createStreamInfoURL("stream1"), HttpMethod.GET);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
StreamProperties actual = GSON.fromJson(new String(ByteStreams.toByteArray(urlConn.getInputStream()), Charsets.UTF_8), StreamProperties.class);
urlConn.disconnect();
Assert.assertEquals(properties, actual);
// Update desc and ttl and check whether the changes were persisted
StreamProperties newProps = new StreamProperties(2L, null, null, "small stream", null);
urlConn = openURL(createPropertiesURL("stream1"), HttpMethod.PUT);
urlConn.setDoOutput(true);
urlConn.getOutputStream().write(GSON.toJson(newProps).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
urlConn = openURL(createStreamInfoURL("stream1"), HttpMethod.GET);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
actual = GSON.fromJson(new String(ByteStreams.toByteArray(urlConn.getInputStream()), Charsets.UTF_8), StreamProperties.class);
urlConn.disconnect();
StreamProperties expected = new StreamProperties(newProps.getTTL(), properties.getFormat(), properties.getNotificationThresholdMB(), newProps.getDescription(), properties.getOwnerPrincipal());
Assert.assertEquals(expected, actual);
}
use of co.cask.cdap.api.data.format.FormatSpecification in project cdap by caskdata.
the class StreamHandlerTest method testPutInvalidStreamConfig.
@Test
public void testPutInvalidStreamConfig() throws Exception {
// create the new stream.
HttpURLConnection urlConn = openURL(createURL("streams/stream_badconf"), HttpMethod.PUT);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a config with invalid json
urlConn = openURL(createPropertiesURL("stream_badconf"), HttpMethod.PUT);
urlConn.setDoOutput(true);
urlConn.getOutputStream().write("ttl:2".getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a config with an invalid TTL
urlConn = openURL(createPropertiesURL("stream_badconf"), HttpMethod.PUT);
urlConn.setDoOutput(true);
StreamProperties streamProperties = new StreamProperties(-1L, null, 20);
urlConn.getOutputStream().write(GSON.toJson(streamProperties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a config with a format without a format class
urlConn = openURL(createPropertiesURL("stream_badconf"), HttpMethod.PUT);
urlConn.setDoOutput(true);
FormatSpecification formatSpec = new FormatSpecification(null, null, null);
streamProperties = new StreamProperties(2L, formatSpec, 20);
urlConn.getOutputStream().write(GSON.toJson(streamProperties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a config with a format with a bad format class
urlConn = openURL(createPropertiesURL("stream_badconf"), HttpMethod.PUT);
urlConn.setDoOutput(true);
formatSpec = new FormatSpecification("gibberish", null, null);
streamProperties = new StreamProperties(2L, formatSpec, 20);
urlConn.getOutputStream().write(GSON.toJson(streamProperties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a config with an incompatible format and schema
urlConn = openURL(createPropertiesURL("stream_badconf"), HttpMethod.PUT);
urlConn.setDoOutput(true);
Schema schema = Schema.recordOf("event", Schema.Field.of("col", Schema.of(Schema.Type.DOUBLE)));
formatSpec = new FormatSpecification(TextRecordFormat.class.getCanonicalName(), schema, null);
streamProperties = new StreamProperties(2L, formatSpec, 20);
urlConn.getOutputStream().write(GSON.toJson(streamProperties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a config with a bad threshold
urlConn = openURL(createPropertiesURL("stream_badconf"), HttpMethod.PUT);
urlConn.setDoOutput(true);
streamProperties = new StreamProperties(2L, null, -20);
urlConn.getOutputStream().write(GSON.toJson(streamProperties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.BAD_REQUEST.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
}
use of co.cask.cdap.api.data.format.FormatSpecification in project cdap by caskdata.
the class StreamHandlerTest method testStreamInfo.
@Test
public void testStreamInfo() throws Exception {
// Now, create the new stream.
HttpURLConnection urlConn = openURL(createURL("streams/stream_info"), HttpMethod.PUT);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a new config
urlConn = openURL(createPropertiesURL("stream_info"), HttpMethod.PUT);
urlConn.setDoOutput(true);
Schema schema = Schema.recordOf("event", Schema.Field.of("purchase", Schema.of(Schema.Type.STRING)));
FormatSpecification formatSpecification;
formatSpecification = new FormatSpecification(TextRecordFormat.class.getCanonicalName(), schema, ImmutableMap.of(TextRecordFormat.CHARSET, "utf8"));
StreamProperties streamProperties = new StreamProperties(2L, formatSpecification, 20);
urlConn.getOutputStream().write(GSON.toJson(streamProperties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// test the config ttl by calling info
urlConn = openURL(createStreamInfoURL("stream_info"), HttpMethod.GET);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
StreamProperties actual = GSON.fromJson(new String(ByteStreams.toByteArray(urlConn.getInputStream()), Charsets.UTF_8), StreamProperties.class);
urlConn.disconnect();
Assert.assertEquals(streamProperties, actual);
}
use of co.cask.cdap.api.data.format.FormatSpecification in project cdap by caskdata.
the class HiveExploreServiceStreamTest method testAvroFormattedStream.
@Test
public void testAvroFormattedStream() throws Exception {
StreamId streamId = NAMESPACE_ID.stream("avroStream");
createStream(streamId);
try {
Schema schema = Schema.recordOf("purchase", Schema.Field.of("user", Schema.of(Schema.Type.STRING)), Schema.Field.of("num", Schema.of(Schema.Type.INT)), Schema.Field.of("price", Schema.of(Schema.Type.DOUBLE)));
FormatSpecification formatSpecification = new FormatSpecification(Formats.AVRO, schema, Collections.<String, String>emptyMap());
StreamProperties properties = new StreamProperties(Long.MAX_VALUE, formatSpecification, 1000);
setStreamProperties(NAMESPACE_ID.getNamespace(), "avroStream", properties);
// our schemas are compatible
org.apache.avro.Schema avroSchema = new org.apache.avro.Schema.Parser().parse(schema.toString());
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userX", 5, 3.14));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userX", 10, 2.34));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userY", 1, 1.23));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userZ", 50, 45.67));
sendStreamEvent(streamId, createAvroEvent(avroSchema, "userZ", 100, 98.76));
Double xPrice = 5 * 3.14 + 10 * 2.34;
Double yPrice = 1.23;
Double zPrice = 50 * 45.67 + 100 * 98.76;
ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, "SELECT `user`, sum(num) as total_num, sum(price * num) as total_price " + "FROM " + getTableName(streamId) + " GROUP BY `user` ORDER BY total_price DESC").get();
Assert.assertTrue(result.hasNext());
Assert.assertEquals(Lists.newArrayList(new ColumnDesc("user", "STRING", 1, null), new ColumnDesc("total_num", "BIGINT", 2, null), new ColumnDesc("total_price", "DOUBLE", 3, null)), result.getResultSchema());
// should get 3 rows
// first row should be for userZ
List<Object> rowColumns = result.next().getColumns();
// toString b/c avro returns a utf8 object for strings
Assert.assertEquals("userZ", rowColumns.get(0).toString());
Assert.assertEquals(150L, rowColumns.get(1));
Assert.assertTrue(Math.abs(zPrice - (Double) rowColumns.get(2)) < 0.0000001);
// 2nd row, should be userX
rowColumns = result.next().getColumns();
Assert.assertEquals("userX", rowColumns.get(0).toString());
Assert.assertEquals(15L, rowColumns.get(1));
Assert.assertTrue(Math.abs(xPrice - (Double) rowColumns.get(2)) < 0.0000001);
// 3rd row, should be userY
rowColumns = result.next().getColumns();
Assert.assertEquals("userY", rowColumns.get(0).toString());
Assert.assertEquals(1L, rowColumns.get(1));
Assert.assertTrue(Math.abs(yPrice - (Double) rowColumns.get(2)) < 0.0000001);
// shouldn't be any more results
Assert.assertFalse(result.hasNext());
} finally {
dropStream(streamId);
}
}
Aggregations