use of co.cask.common.http.HttpRequest in project cdap by caskdata.
the class StreamClient method setStreamProperties.
/**
* Sets properties of a stream.
*
* @param stream ID of the stream
* @param properties properties to set
* @throws IOException if a network error occurred
* @throws UnauthenticatedException if the client is unauthorized
* @throws BadRequestException if the request is bad
* @throws StreamNotFoundException if the stream was not found
*/
public void setStreamProperties(StreamId stream, StreamProperties properties) throws IOException, UnauthenticatedException, BadRequestException, StreamNotFoundException, UnauthorizedException {
URL url = config.resolveNamespacedURLV3(stream.getParent(), String.format("streams/%s/properties", stream.getStream()));
HttpRequest request = HttpRequest.put(url).withBody(GSON.toJson(properties)).build();
HttpResponse response = restClient.execute(request, config.getAccessToken(), HttpURLConnection.HTTP_NOT_FOUND, HttpURLConnection.HTTP_BAD_REQUEST);
if (response.getResponseCode() == HttpURLConnection.HTTP_BAD_REQUEST) {
throw new BadRequestException("Bad request: " + response.getResponseBodyAsString());
}
if (response.getResponseCode() == HttpURLConnection.HTTP_NOT_FOUND) {
throw new StreamNotFoundException(stream);
}
}
use of co.cask.common.http.HttpRequest in project cdap by caskdata.
the class LogAnalysisAppTest method test.
@Test
public void test() throws Exception {
// Deploy the App
ApplicationManager appManager = deployApplication(LogAnalysisApp.class);
// Send a stream events to the Stream
StreamManager streamManager = getStreamManager(LogAnalysisApp.LOG_STREAM);
streamManager.send(LOG_1);
streamManager.send(LOG_2);
streamManager.send(LOG_3);
// run the spark program
SparkManager sparkManager = appManager.getSparkManager(LogAnalysisApp.ResponseCounterSpark.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
// run the mapreduce job
MapReduceManager mapReduceManager = appManager.getMapReduceManager(HitCounterProgram.class.getSimpleName()).start();
mapReduceManager.waitForRun(ProgramRunStatus.COMPLETED, 3, TimeUnit.MINUTES);
// start and wait for services
ServiceManager hitCounterServiceManager = getServiceManager(appManager, LogAnalysisApp.HIT_COUNTER_SERVICE);
ServiceManager responseCounterServiceManager = getServiceManager(appManager, LogAnalysisApp.RESPONSE_COUNTER_SERVICE);
ServiceManager requestCounterServiceManager = getServiceManager(appManager, LogAnalysisApp.REQUEST_COUNTER_SERVICE);
//Query for hit counts and verify it
URL totalHitsURL = new URL(hitCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.HitCounterServiceHandler.HIT_COUNTER_SERVICE_PATH);
HttpResponse response = HttpRequests.execute(HttpRequest.post(totalHitsURL).withBody("{\"url\":\"" + "/home.html" + "\"}").build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Assert.assertEquals(TOTAL_HITS_VALUE, response.getResponseBodyAsString());
// query for total responses for a response code and verify it
URL responseCodeURL = new URL(responseCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.ResponseCounterHandler.RESPONSE_COUNT_PATH + "/" + RESPONSE_CODE);
HttpRequest request = HttpRequest.get(responseCodeURL).build();
response = HttpRequests.execute(request);
Assert.assertEquals(TOTAL_RESPONSE_VALUE, response.getResponseBodyAsString());
// query to get partitions in the request count tpfs
URL requestCountFilsetsURL = new URL(requestCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.RequestCounterHandler.REQUEST_COUNTER_PARTITIONS_PATH);
request = HttpRequest.get(requestCountFilsetsURL).build();
response = HttpRequests.execute(request);
TreeSet<String> partitions = GSON.fromJson(response.getResponseBodyAsString(), new TypeToken<TreeSet<String>>() {
}.getType());
Assert.assertEquals(1, partitions.size());
String partition = partitions.iterator().next();
//Query for the contents of the files in this partition and verify
URL requestFilesetContentURL = new URL(requestCounterServiceManager.getServiceURL(15, TimeUnit.SECONDS), LogAnalysisApp.RequestCounterHandler.REQUEST_FILE_CONTENT_PATH);
response = HttpRequests.execute(HttpRequest.post(requestFilesetContentURL).withBody("{\"" + LogAnalysisApp.RequestCounterHandler.REQUEST_FILE_PATH_HANDLER_KEY + "\":\"" + partition + "\"}").build());
Assert.assertEquals(HttpURLConnection.HTTP_OK, response.getResponseCode());
Map<String, Integer> responseMap = GSON.fromJson(response.getResponseBodyAsString(), new TypeToken<Map<String, Integer>>() {
}.getType());
Assert.assertTrue(responseMap.equals(TPFS_RESULT));
}
use of co.cask.common.http.HttpRequest in project cdap by caskdata.
the class DecisionTreeRegressionAppTest method test.
@Test
public void test() throws Exception {
// Deploy the Application
ApplicationManager appManager = deployApplication(DecisionTreeRegressionApp.class);
// Start the Service
ServiceManager serviceManager = appManager.getServiceManager(ModelDataService.SERVICE_NAME).start();
serviceManager.waitForStatus(true, 30, 1);
URL serviceURL = serviceManager.getServiceURL(15, TimeUnit.SECONDS);
URL addDataURL = new URL(serviceURL, "labels");
HttpRequest request = HttpRequest.builder(HttpMethod.PUT, addDataURL).withBody(new InputSupplier<InputStream>() {
@Override
public InputStream getInput() throws IOException {
return getClass().getClassLoader().getResourceAsStream("sample_libsvm_data.txt");
}
}).build();
HttpResponse response = HttpRequests.execute(request);
Assert.assertEquals(200, response.getResponseCode());
// Start a Spark Program
SparkManager sparkManager = appManager.getSparkManager(ModelTrainer.NAME).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
// Check that there is a new model
URL listModelsURL = new URL(serviceURL, "models");
request = HttpRequest.builder(HttpMethod.GET, listModelsURL).build();
response = HttpRequests.execute(request);
Assert.assertEquals(200, response.getResponseCode());
List<String> models = GSON.fromJson(response.getResponseBodyAsString(), new TypeToken<List<String>>() {
}.getType());
Assert.assertEquals(1, models.size());
// Check that there is some model metadata
String modelId = models.get(0);
URL modelMetaURL = new URL(serviceURL, "models/" + modelId);
request = HttpRequest.builder(HttpMethod.GET, modelMetaURL).build();
response = HttpRequests.execute(request);
Assert.assertEquals(200, response.getResponseCode());
ModelMeta meta = GSON.fromJson(response.getResponseBodyAsString(), ModelMeta.class);
Assert.assertNotNull(meta);
Assert.assertEquals(0.7, meta.getTrainingPercentage(), 0.000001);
Assert.assertEquals(692, meta.getNumFeatures());
// Check that the corresponding model file exists
DataSetManager<FileSet> modelFiles = getDataset(DecisionTreeRegressionApp.MODEL_DATASET);
Assert.assertTrue(modelFiles.get().getBaseLocation().append(modelId).exists());
}
use of co.cask.common.http.HttpRequest in project cdap by caskdata.
the class DatasetClient method update.
/**
* Updates the properties of a dataset.
*
* @param instance the dataset to update
* @param properties properties to set
* @throws NotFoundException if the dataset is not found
* @throws IOException if a network error occurred
* @throws UnauthenticatedException if the request is not authorized successfully in the gateway server
*/
public void update(DatasetId instance, Map<String, String> properties) throws NotFoundException, IOException, UnauthenticatedException, ConflictException, UnauthorizedException {
URL url = config.resolveNamespacedURLV3(instance.getParent(), String.format("data/datasets/%s/properties", instance.getDataset()));
HttpRequest request = HttpRequest.put(url).withBody(GSON.toJson(properties)).build();
HttpResponse response = restClient.execute(request, config.getAccessToken(), HttpURLConnection.HTTP_NOT_FOUND, HttpURLConnection.HTTP_CONFLICT);
if (response.getResponseCode() == HttpURLConnection.HTTP_NOT_FOUND) {
throw new NotFoundException(instance);
} else if (response.getResponseCode() == HttpURLConnection.HTTP_CONFLICT) {
throw new ConflictException(response.getResponseBodyAsString());
}
}
use of co.cask.common.http.HttpRequest in project cdap by caskdata.
the class DatasetClient method create.
/**
* Creates a dataset.
*
* @param instance ID of the dataset instance
* @param properties properties of the dataset to create
* @throws DatasetTypeNotFoundException if the desired dataset type was not found
* @throws DatasetAlreadyExistsException if a dataset by the same name already exists
* @throws IOException if a network error occurred
* @throws UnauthenticatedException if the request is not authorized successfully in the gateway server
*/
public void create(DatasetId instance, DatasetInstanceConfiguration properties) throws DatasetTypeNotFoundException, DatasetAlreadyExistsException, IOException, UnauthenticatedException, UnauthorizedException {
URL url = config.resolveNamespacedURLV3(instance.getParent(), String.format("data/datasets/%s", instance.getDataset()));
HttpRequest request = HttpRequest.put(url).withBody(GSON.toJson(properties)).build();
HttpResponse response = restClient.execute(request, config.getAccessToken(), HttpURLConnection.HTTP_NOT_FOUND, HttpURLConnection.HTTP_CONFLICT);
if (response.getResponseCode() == HttpURLConnection.HTTP_NOT_FOUND) {
throw new DatasetTypeNotFoundException(instance.getParent().datasetType(properties.getTypeName()));
} else if (response.getResponseCode() == HttpURLConnection.HTTP_CONFLICT) {
throw new DatasetAlreadyExistsException(instance);
}
}
Aggregations