use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class HiveExploreStructuredRecordTestRun method testMissingSchemaFails.
@Test(expected = IllegalArgumentException.class)
public void testMissingSchemaFails() throws Exception {
DatasetId instanceId = NAMESPACE_ID.dataset("badtable");
datasetFramework.addInstance("TableWrapper", instanceId, DatasetProperties.EMPTY);
DatasetSpecification spec = datasetFramework.getDatasetSpec(instanceId);
try {
exploreTableManager.enableDataset(instanceId, spec, false);
} finally {
datasetFramework.deleteInstance(instanceId);
}
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class FakeDatasetDefinition method getDataset.
@Override
public FakeDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
DatasetSpecification kvTableSpec = spec.getSpecification("objects");
KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader);
return new FakeDataset(spec.getName(), table);
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class StandaloneDatasetDefinition method getDataset.
@Override
public StandaloneDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
DatasetSpecification kvTableSpec = spec.getSpecification("objects");
KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader);
return new StandaloneDataset(spec.getName(), table);
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class ExploreExecutorHttpHandler method updateDataset.
/**
* Enable ad-hoc exploration of a dataset instance.
*/
@POST
@Path("datasets/{dataset}/update")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void updateDataset(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) throws BadRequestException {
final DatasetId datasetId = new DatasetId(namespace, datasetName);
try {
UpdateExploreParameters params = readUpdateParameters(request);
final DatasetSpecification oldSpec = params.getOldSpec();
final DatasetSpecification datasetSpec = params.getNewSpec();
QueryHandle handle;
if (oldSpec.equals(datasetSpec)) {
handle = QueryHandle.NO_OP;
} else {
handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.updateDataset(datasetId, datasetSpec, oldSpec);
}
});
}
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json);
} catch (IllegalArgumentException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (ExploreException e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error updating explore on dataset " + datasetId);
} catch (SQLException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to update explore on dataset " + datasetId);
} catch (UnsupportedTypeException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.
the class DatasetAdminOpHTTPHandler method drop.
@POST
@Path("/data/datasets/{name}/admin/drop")
public void drop(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("name") String instanceName) throws Exception {
propagateUserId(request);
InternalDatasetDropParams params = GSON.fromJson(request.getContent().toString(Charsets.UTF_8), InternalDatasetDropParams.class);
Preconditions.checkArgument(params.getInstanceSpec() != null, "Missing required 'instanceSpec' parameter.");
Preconditions.checkArgument(params.getTypeMeta() != null, "Missing required 'typeMeta' parameter.");
DatasetSpecification spec = params.getInstanceSpec();
DatasetTypeMeta typeMeta = params.getTypeMeta();
try {
datasetAdminService.drop(new DatasetId(namespaceId, instanceName), typeMeta, spec);
responder.sendJson(HttpResponseStatus.OK, spec);
} catch (BadRequestException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
}
}
Aggregations