use of com.google.common.reflect.TypeToken in project cdap by caskdata.
the class ASMDatumCodecTest method testEnum.
@Test
public void testEnum() throws UnsupportedTypeException, IOException {
TypeToken<TestEnum> type = new TypeToken<TestEnum>() {
};
PipedOutputStream os = new PipedOutputStream();
PipedInputStream is = new PipedInputStream(os);
DatumWriter<TestEnum> writer = getWriter(type);
BinaryEncoder encoder = new BinaryEncoder(os);
writer.encode(TestEnum.VALUE1, encoder);
writer.encode(TestEnum.VALUE4, encoder);
writer.encode(TestEnum.VALUE3, encoder);
ReflectionDatumReader<TestEnum> reader = new ReflectionDatumReader<>(getSchema(type), type);
TestEnum value = reader.read(new BinaryDecoder(is), getSchema(type));
Assert.assertEquals(TestEnum.VALUE1, value);
value = reader.read(new BinaryDecoder(is), getSchema(type));
Assert.assertEquals(TestEnum.VALUE4, value);
value = reader.read(new BinaryDecoder(is), getSchema(type));
Assert.assertEquals(TestEnum.VALUE3, value);
}
use of com.google.common.reflect.TypeToken in project cdap by caskdata.
the class ASMDatumCodecTest method testPrimitiveArray.
@Test
public void testPrimitiveArray() throws IOException, UnsupportedTypeException {
TypeToken<int[]> type = new TypeToken<int[]>() {
};
PipedOutputStream os = new PipedOutputStream();
PipedInputStream is = new PipedInputStream(os);
int[] writeValue = { 1, 2, 3, 4, -5, -6, -7, -8 };
DatumWriter<int[]> writer = getWriter(type);
writer.encode(writeValue, new BinaryEncoder(os));
ReflectionDatumReader<int[]> reader = new ReflectionDatumReader<>(getSchema(type), type);
int[] value = reader.read(new BinaryDecoder(is), getSchema(type));
Assert.assertArrayEquals(writeValue, value);
}
use of com.google.common.reflect.TypeToken in project cdap by caskdata.
the class ASMDatumCodecTest method testTree.
@Test
public void testTree() throws IOException, UnsupportedTypeException {
TypeToken<Node> type = new TypeToken<Node>() {
};
PipedOutputStream os = new PipedOutputStream();
PipedInputStream is = new PipedInputStream(os);
DatumWriter<Node> writer = getWriter(type);
Node root = new Node((short) 1, new Node((short) 2, null, new Node((short) 3, null, null)), new Node((short) 4, new Node((short) 5, null, null), null));
writer.encode(root, new BinaryEncoder(os));
ReflectionDatumReader<Node> reader = new ReflectionDatumReader<>(getSchema(type), type);
Node value = reader.read(new BinaryDecoder(is), getSchema(type));
Assert.assertEquals(root, value);
}
use of com.google.common.reflect.TypeToken in project cdap by caskdata.
the class LogAnalysisAppTest method test.
@Test
public void test() throws Exception {
// Deploy the App
ApplicationManager appManager = deployApplication(LogAnalysisApp.class);
// Send a stream events to the Stream
StreamManager streamManager = getStreamManager(LogAnalysisApp.LOG_STREAM);
streamManager.send(LOG_1);
streamManager.send(LOG_2);
streamManager.send(LOG_3);
// run the spark program
SparkManager sparkManager = appManager.getSparkManager(LogAnalysisApp.ResponseCounterSpark.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
// run the mapreduce job
MapReduceManager mapReduceManager = appManager.getMapReduceManager(HitCounterProgram.class.getSimpleName()).start();
mapReduceManager.waitForRun(ProgramRunStatus.COMPLETED, 3, TimeUnit.MINUTES);
// start and wait for services
ServiceManager hitCounterServiceManager = getServiceManager(appManager, LogAnalysisApp.HIT_COUNTER_SERVICE);
ServiceManager responseCounterServiceManager = getServiceManager(appManager, LogAnalysisApp.RESPONSE_COUNTER_SERVICE);
ServiceManager requestCounterServiceManager = getServiceManager(appManager, LogAnalysisApp.REQUEST_COUNTER_SERVICE);
//Query for hit counts and verify it
URL totalHitsURL = new URL(hitCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.HitCounterServiceHandler.HIT_COUNTER_SERVICE_PATH);
HttpResponse response = HttpRequests.execute(HttpRequest.post(totalHitsURL).withBody("{\"url\":\"" + "/home.html" + "\"}").build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Assert.assertEquals(TOTAL_HITS_VALUE, response.getResponseBodyAsString());
// query for total responses for a response code and verify it
URL responseCodeURL = new URL(responseCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.ResponseCounterHandler.RESPONSE_COUNT_PATH + "/" + RESPONSE_CODE);
HttpRequest request = HttpRequest.get(responseCodeURL).build();
response = HttpRequests.execute(request);
Assert.assertEquals(TOTAL_RESPONSE_VALUE, response.getResponseBodyAsString());
// query to get partitions in the request count tpfs
URL requestCountFilsetsURL = new URL(requestCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.RequestCounterHandler.REQUEST_COUNTER_PARTITIONS_PATH);
request = HttpRequest.get(requestCountFilsetsURL).build();
response = HttpRequests.execute(request);
TreeSet<String> partitions = GSON.fromJson(response.getResponseBodyAsString(), new TypeToken<TreeSet<String>>() {
}.getType());
Assert.assertEquals(1, partitions.size());
String partition = partitions.iterator().next();
//Query for the contents of the files in this partition and verify
URL requestFilesetContentURL = new URL(requestCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.RequestCounterHandler.REQUEST_FILE_CONTENT_PATH);
response = HttpRequests.execute(HttpRequest.post(requestFilesetContentURL).withBody("{\"" + LogAnalysisApp.RequestCounterHandler.REQUEST_FILE_PATH_HANDLER_KEY + "\":\"" + partition + "\"}").build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Map<String, Integer> responseMap = GSON.fromJson(response.getResponseBodyAsString(), new TypeToken<Map<String, Integer>>() {
}.getType());
Assert.assertTrue(responseMap.equals(TPFS_RESULT));
}
use of com.google.common.reflect.TypeToken in project cdap by caskdata.
the class ScheduleClient method nextRuntimes.
/**
* Get the next scheduled run time of the program. A program may contain multiple schedules.
* This method returns the next scheduled runtimes for all the schedules. This method only takes
+ into account {@link Schedule}s based on time. Schedules based on data are ignored.
*
* @param workflow Id of the Workflow for which to fetch next run times.
* @return list of Scheduled runtimes for the Workflow. Empty list if there are no schedules.
*/
public List<ScheduledRuntime> nextRuntimes(WorkflowId workflow) throws IOException, UnauthenticatedException, NotFoundException, UnauthorizedException {
String path = String.format("apps/%s/workflows/%s/nextruntime", workflow.getApplication(), workflow.getProgram());
URL url = config.resolveNamespacedURLV3(workflow.getNamespaceId(), path);
HttpResponse response = restClient.execute(HttpMethod.GET, url, config.getAccessToken(), HttpURLConnection.HTTP_NOT_FOUND);
if (HttpURLConnection.HTTP_NOT_FOUND == response.getResponseCode()) {
throw new NotFoundException(workflow);
}
ObjectResponse<List<ScheduledRuntime>> objectResponse = ObjectResponse.fromJsonBody(response, new TypeToken<List<ScheduledRuntime>>() {
}.getType(), GSON);
return objectResponse.getResponseObject();
}
Aggregations