use of co.cask.cdap.proto.StreamProperties in project cdap by caskdata.
the class DescribeStreamCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
StreamId streamId = cliConfig.getCurrentNamespace().stream(arguments.get(ArgumentName.STREAM.toString()));
StreamProperties config = streamClient.getConfig(streamId);
Table table = Table.builder().setHeader("ttl", "format", "schema", "notification.threshold.mb", "description").setRows(ImmutableList.of(config), new RowMaker<StreamProperties>() {
@Override
public List<?> makeRow(StreamProperties object) {
FormatSpecification format = object.getFormat();
return Lists.newArrayList(object.getTTL(), format.getName(), format.getSchema().toString(), object.getNotificationThresholdMB(), object.getDescription());
}
}).build();
cliConfig.getTableRenderer().render(cliConfig, output, table);
}
use of co.cask.cdap.proto.StreamProperties in project cdap by caskdata.
the class GetStreamStatsCommand method perform.
@Override
public void perform(Arguments arguments, PrintStream output) throws Exception {
long currentTime = System.currentTimeMillis();
StreamId streamId = cliConfig.getCurrentNamespace().stream(arguments.get(ArgumentName.STREAM.toString()));
// limit limit to [1, MAX_LIMIT]
Integer limitInput = arguments.getIntOptional(ArgumentName.LIMIT.toString(), DEFAULT_LIMIT);
// we know we're passing a non-null default, so limitInput should never be null.
Preconditions.checkNotNull(limitInput);
int limit = Math.max(1, Math.min(MAX_LIMIT, limitInput));
long startTime = getTimestamp(arguments.getOptional(ArgumentName.START_TIME.toString(), "min"), currentTime);
long endTime = getTimestamp(arguments.getOptional(ArgumentName.END_TIME.toString(), "max"), currentTime);
// hack to validate streamId
StreamProperties config = streamClient.getConfig(streamId);
if (config.getFormat().getName().equals("text")) {
output.printf("No schema found for stream '%s'", streamId.getEntityName());
output.println();
return;
}
// build processorMap: Hive column name -> StatsProcessor
Map<String, Set<StatsProcessor>> processorMap = new HashMap<>();
Schema streamSchema = config.getFormat().getSchema();
for (Schema.Field field : streamSchema.getFields()) {
Schema fieldSchema = field.getSchema();
String hiveColumnName = cdapSchemaColumName2HiveColumnName(streamId, field.getName());
processorMap.put(hiveColumnName, getProcessorsForType(fieldSchema.getType(), fieldSchema.getUnionSchemas()));
}
// get a list of stream events and calculates various statistics about the events
String timestampCol = getTimestampHiveColumn(streamId);
ListenableFuture<ExploreExecutionResult> resultsFuture = queryClient.execute(streamId.getParent(), "SELECT * FROM " + getHiveTableName(streamId) + " WHERE " + timestampCol + " BETWEEN " + startTime + " AND " + endTime + " LIMIT " + limit);
ExploreExecutionResult results = resultsFuture.get(1, TimeUnit.MINUTES);
List<ColumnDesc> schema = results.getResultSchema();
// apply StatsProcessors to every element in every row
int rows = 0;
while (results.hasNext()) {
rows++;
QueryResult row = results.next();
for (int i = 0; i < row.getColumns().size(); i++) {
Object column = row.getColumns().get(i);
ColumnDesc columnDesc = schema.get(i);
String columnName = columnDesc.getName();
if (isUserHiveColumn(streamId, columnName)) {
Set<StatsProcessor> processors = processorMap.get(columnName);
if (processors != null) {
for (StatsProcessor processor : processors) {
processor.process(column);
}
}
}
}
}
// print report
for (ColumnDesc columnDesc : schema) {
if (isUserHiveColumn(streamId, columnDesc.getName())) {
String truncatedColumnName = getTruncatedColumnName(streamId, columnDesc.getName());
output.printf("column: %s, type: %s", truncatedColumnName, columnDesc.getType());
output.println();
Set<StatsProcessor> processors = processorMap.get(columnDesc.getName());
if (processors != null && !processors.isEmpty()) {
for (StatsProcessor processor : processors) {
processor.printReport(output);
}
output.println();
} else {
output.println("No statistics available");
output.println();
}
}
}
output.printf("Analyzed %d Stream events in the time range [%d, %d]...", rows, startTime, endTime);
output.println();
output.println();
}
use of co.cask.cdap.proto.StreamProperties in project cdap by caskdata.
the class GenerateClientUsageExample method streamClient.
public void streamClient() throws Exception {
// Construct the client used to interact with CDAP
StreamClient streamClient = new StreamClient(clientConfig);
// Fetch the stream list
List streams = streamClient.list(NamespaceId.DEFAULT);
// Create a stream, using the Purchase example
StreamId streamId = NamespaceId.DEFAULT.stream("purchases");
streamClient.create(streamId);
// Fetch a stream's properties
StreamProperties config = streamClient.getConfig(streamId);
// Send events to a stream
streamClient.sendEvent(streamId, "Tom bought 5 apples for $10");
// Read all events from a stream (results in events)
List<StreamEvent> events = Lists.newArrayList();
streamClient.getEvents(streamId, 0, Long.MAX_VALUE, Integer.MAX_VALUE, events);
// Read first 5 events from a stream (results in events)
events = Lists.newArrayList();
streamClient.getEvents(streamId, 0, Long.MAX_VALUE, 5, events);
// Read 2nd and 3rd events from a stream, after first calling getEvents
long startTime = events.get(1).getTimestamp();
long endTime = events.get(2).getTimestamp() + 1;
events.clear();
streamClient.getEvents(streamId, startTime, endTime, Integer.MAX_VALUE, events);
// Write asynchronously to a stream
streamId = NamespaceId.DEFAULT.stream("testAsync");
events = Lists.newArrayList();
streamClient.create(streamId);
// Send 10 async writes
int msgCount = 10;
for (int i = 0; i < msgCount; i++) {
streamClient.asyncSendEvent(streamId, "Testing " + i);
}
// Read them back; need to read it multiple times as the writes happen asynchronously
while (events.size() != msgCount) {
events.clear();
streamClient.getEvents(streamId, 0, Long.MAX_VALUE, msgCount, events);
}
// Check that there are no more events
events.clear();
while (events.isEmpty()) {
events.clear();
streamClient.getEvents(streamId, 0, Long.MAX_VALUE, msgCount, events);
}
// End write asynchronously
}
use of co.cask.cdap.proto.StreamProperties in project cdap by caskdata.
the class StreamHandlerTest method getOwner.
private String getOwner(String streamName) throws IOException, URISyntaxException {
HttpURLConnection urlConn = openURL(createStreamInfoURL(streamName), HttpMethod.GET);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
StreamProperties properties = GSON.fromJson(new String(ByteStreams.toByteArray(urlConn.getInputStream()), Charsets.UTF_8), StreamProperties.class);
urlConn.disconnect();
return properties.getOwnerPrincipal();
}
use of co.cask.cdap.proto.StreamProperties in project cdap by caskdata.
the class StreamHandlerTest method testPutStreamConfigDefaults.
@Test
public void testPutStreamConfigDefaults() throws Exception {
// Now, create the new stream.
HttpURLConnection urlConn = openURL(createURL("streams/stream_defaults"), HttpMethod.PUT);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// put a new config
urlConn = openURL(createPropertiesURL("stream_defaults"), HttpMethod.PUT);
urlConn.setDoOutput(true);
// don't give the schema to make sure a default gets used
FormatSpecification formatSpecification = new FormatSpecification(Formats.TEXT, null, null);
StreamProperties streamProperties = new StreamProperties(2L, formatSpecification, 20);
urlConn.getOutputStream().write(GSON.toJson(streamProperties).getBytes(Charsets.UTF_8));
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
urlConn.disconnect();
// test the config ttl by calling info
urlConn = openURL(createStreamInfoURL("stream_defaults"), HttpMethod.GET);
Assert.assertEquals(HttpResponseStatus.OK.getCode(), urlConn.getResponseCode());
StreamProperties actual = GSON.fromJson(new String(ByteStreams.toByteArray(urlConn.getInputStream()), Charsets.UTF_8), StreamProperties.class);
urlConn.disconnect();
StreamProperties expected = new StreamProperties(2L, StreamConfig.DEFAULT_STREAM_FORMAT, 20);
Assert.assertEquals(expected, actual);
}
Aggregations