use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ObjectMappedTableDefinition method validateSchema.
private void validateSchema(Schema schema) throws UnsupportedTypeException {
Schema.Type type = schema.isNullable() ? schema.getNonNullable().getType() : schema.getType();
if (type != Schema.Type.RECORD) {
throw new UnsupportedTypeException("Unsupported type " + type + ". Must be a record.");
}
for (Schema.Field field : schema.getFields()) {
Schema fieldSchema = field.getSchema();
Schema.Type fieldType = fieldSchema.isNullable() ? fieldSchema.getNonNullable().getType() : fieldSchema.getType();
if (!fieldType.isSimpleType()) {
throw new UnsupportedTypeException(String.format("Field %s is of unsupported type %s." + " Must be a simple type (boolean, int, long, float, double, string, bytes).", field.getName(), fieldType.toString()));
}
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class AllProgramsApp method configure.
@Override
public void configure() {
setName(NAME);
setDescription("Application which has everything");
addStream(new Stream(STREAM_NAME, "test stream"));
createDataset(DATASET_NAME, KeyValueTable.class, DatasetProperties.builder().setDescription("test dataset").build());
createDataset(DATASET_NAME2, KeyValueTable.class);
createDataset(DATASET_NAME3, KeyValueTable.class);
createDataset(DATASET_NAME4, FileSet.class, FileSetProperties.builder().setEnableExploreOnCreate(true).setExploreFormat("text").setExploreFormatProperty("delimiter", "\n").setExploreSchema("record STRING").setDescription("fileSet").build());
createDataset(DATASET_NAME5, PartitionedFileSet.class, PartitionedFileSetProperties.builder().setEnableExploreOnCreate(true).setExploreFormat("text").setExploreFormatProperty("delimiter", "\n").setExploreSchema("record STRING").setDescription("partitonedFileSet").add("partitioning.fields.", "field1").add("partitioning.field.field1", "STRING").build());
createDataset(DATASET_NAME6, FileSet.class, FileSetProperties.builder().setEnableExploreOnCreate(false).setDescription("fileSet").build());
createDataset(DATASET_NAME7, PartitionedFileSet.class, PartitionedFileSetProperties.builder().setEnableExploreOnCreate(false).setDescription("partitonedFileSet").add("partitioning.fields.", "field1").add("partitioning.field.field1", "STRING").build());
addFlow(new NoOpFlow());
addMapReduce(new NoOpMR());
addMapReduce(new NoOpMR2());
addWorkflow(new NoOpWorkflow());
addWorker(new NoOpWorker());
addSpark(new NoOpSpark());
addService(new NoOpService());
schedule(buildSchedule(SCHEDULE_NAME, ProgramType.WORKFLOW, NoOpWorkflow.NAME).setDescription(SCHEDULE_DESCRIPTION).triggerByTime("* * * * *"));
try {
createDataset(DS_WITH_SCHEMA_NAME, ObjectMappedTable.class, ObjectMappedTableProperties.builder().setType(DsSchema.class).setDescription("test object mapped table").build());
} catch (UnsupportedTypeException e) {
// ignore for test
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ExploreExecutorHttpHandler method updateDataset.
/**
* Enable ad-hoc exploration of a dataset instance.
*/
@POST
@Path("datasets/{dataset}/update")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void updateDataset(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) throws BadRequestException {
final DatasetId datasetId = new DatasetId(namespace, datasetName);
try {
UpdateExploreParameters params = readUpdateParameters(request);
final DatasetSpecification oldSpec = params.getOldSpec();
final DatasetSpecification datasetSpec = params.getNewSpec();
QueryHandle handle;
if (oldSpec.equals(datasetSpec)) {
handle = QueryHandle.NO_OP;
} else {
handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.updateDataset(datasetId, datasetSpec, oldSpec);
}
});
}
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json);
} catch (IllegalArgumentException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (ExploreException e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error updating explore on dataset " + datasetId);
} catch (SQLException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to update explore on dataset " + datasetId);
} catch (UnsupportedTypeException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ExploreTableManager method hiveSchemaFor.
// TODO: replace with SchemaConverter.toHiveSchema when we tackle queries on Tables.
// but unfortunately, SchemaConverter is not compatible with this, for example:
// - a byte becomes a tinyint here, but an int there
// - SchemaConverter sort fields alphabetically, whereas this preserves the order
// - ExploreExtensiveSchemaTableTestRun will fail because of this
private String hiveSchemaFor(Type type) throws UnsupportedTypeException {
// This call will make sure that the type is not recursive
try {
new ReflectionSchemaGenerator().generate(type, false);
} catch (Exception e) {
throw new UnsupportedTypeException("Unable to derive schema from " + type, e);
}
ObjectInspector objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(type);
if (!(objectInspector instanceof StructObjectInspector)) {
throw new UnsupportedTypeException(String.format("Type must be a RECORD, but is %s", type.getClass().getName()));
}
StructObjectInspector structObjectInspector = (StructObjectInspector) objectInspector;
StringBuilder sb = new StringBuilder();
boolean first = true;
for (StructField structField : structObjectInspector.getAllStructFieldRefs()) {
if (first) {
first = false;
} else {
sb.append(", ");
}
ObjectInspector oi = structField.getFieldObjectInspector();
String typeName;
typeName = oi.getTypeName();
if (shouldEscapeColumns) {
// a literal backtick(`) is represented as a double backtick(``)
sb.append('`').append(structField.getFieldName().replace("`", "``")).append('`');
} else {
sb.append(structField.getFieldName());
}
sb.append(" ").append(typeName);
}
return sb.toString();
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ExploreExecutorHttpHandler method enableDataset.
private void enableDataset(HttpResponder responder, final DatasetId datasetId, final DatasetSpecification datasetSpec, final boolean truncating) {
LOG.debug("Enabling explore for dataset instance {}", datasetId);
try {
QueryHandle handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.enableDataset(datasetId, datasetSpec, truncating);
}
});
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json);
} catch (IllegalArgumentException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (ExploreException e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error enabling explore on dataset " + datasetId);
} catch (SQLException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to enable explore on dataset " + datasetId);
} catch (UnsupportedTypeException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
Aggregations