use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by cdapio.
the class AvroRecordFormat method validateSchema.
@Override
protected void validateSchema(Schema desiredSchema) throws UnsupportedTypeException {
try {
// rather than check for all inconsistencies, just try to read the schema string as an Avro schema.
avroFormatSchema = new org.apache.avro.Schema.Parser().parse(desiredSchema.toString());
formatSchema = desiredSchema;
} catch (SchemaParseException e) {
throw new UnsupportedTypeException("Schema is not a valid avro schema.", e);
} catch (Exception e) {
throw new UnsupportedTypeException("Exception parsing schema as an avro schema.", e);
}
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by cdapio.
the class DelimitedStringsRecordFormat method validateSchema.
@Override
protected void validateSchema(Schema desiredSchema) throws UnsupportedTypeException {
// a valid schema is a record of simple types. In other words, no maps, arrays, records, unions, or enums allowed.
// if mapping is null, the exception is the very last field, which is allowed to be an array of simple types.
// These types may be nullable, which is a union of a null and non-null type.
Iterator<Schema.Field> fields = desiredSchema.getFields().iterator();
// check that each field is a simple field, except for the very last field, which can be an array of simple types.
while (fields.hasNext()) {
Schema.Field field = fields.next();
Schema schema = field.getSchema();
// if we're not on the very last field, the field must be a simple type or a nullable simple type.
boolean isSimple = schema.getType().isSimpleType();
boolean isNullableSimple = schema.isNullableSimple();
if (!isSimple && !isNullableSimple) {
// if this is the very last field and a string array, it is valid. otherwise it is not.
if (fields.hasNext() || !isStringArray(schema)) {
throw new UnsupportedTypeException("Field " + field.getName() + " is of invalid type.");
}
}
}
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by cdapio.
the class ExploreTableManager method hiveSchemaFor.
// TODO: replace with SchemaConverter.toHiveSchema when we tackle queries on Tables.
// but unfortunately, SchemaConverter is not compatible with this, for example:
// - a byte becomes a tinyint here, but an int there
// - SchemaConverter sort fields alphabetically, whereas this preserves the order
// - ExploreExtensiveSchemaTableTestRun will fail because of this
private String hiveSchemaFor(Type type) throws UnsupportedTypeException {
// This call will make sure that the type is not recursive
try {
new ReflectionSchemaGenerator().generate(type, false);
} catch (Exception e) {
throw new UnsupportedTypeException("Unable to derive schema from " + type, e);
}
ObjectInspector objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(type);
if (!(objectInspector instanceof StructObjectInspector)) {
throw new UnsupportedTypeException(String.format("Type must be a RECORD, but is %s", type.getClass().getName()));
}
StructObjectInspector structObjectInspector = (StructObjectInspector) objectInspector;
StringBuilder sb = new StringBuilder();
boolean first = true;
for (StructField structField : structObjectInspector.getAllStructFieldRefs()) {
if (first) {
first = false;
} else {
sb.append(", ");
}
ObjectInspector oi = structField.getFieldObjectInspector();
String typeName;
typeName = oi.getTypeName();
if (shouldEscapeColumns) {
// a literal backtick(`) is represented as a double backtick(``)
sb.append('`').append(structField.getFieldName().replace("`", "``")).append('`');
} else {
sb.append(structField.getFieldName());
}
sb.append(" ").append(typeName);
}
return sb.toString();
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by cdapio.
the class ExploreExecutorHttpHandler method updateDataset.
/**
* Enable ad-hoc exploration of a dataset instance.
*/
@POST
@Path("datasets/{dataset}/update")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void updateDataset(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) throws BadRequestException {
final DatasetId datasetId = new DatasetId(namespace, datasetName);
try {
UpdateExploreParameters params = readUpdateParameters(request);
final DatasetSpecification oldSpec = params.getOldSpec();
final DatasetSpecification datasetSpec = params.getNewSpec();
QueryHandle handle;
if (oldSpec.equals(datasetSpec)) {
handle = QueryHandle.NO_OP;
} else {
handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.updateDataset(datasetId, datasetSpec, oldSpec);
}
});
}
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json.toString());
} catch (IllegalArgumentException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (ExploreException e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error updating explore on dataset " + datasetId);
} catch (SQLException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to update explore on dataset " + datasetId);
} catch (UnsupportedTypeException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by cdapio.
the class ExploreExecutorHttpHandler method enableDataset.
private void enableDataset(HttpResponder responder, final DatasetId datasetId, final DatasetSpecification datasetSpec, final boolean truncating) {
LOG.debug("Enabling explore for dataset instance {}", datasetId);
try {
QueryHandle handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.enableDataset(datasetId, datasetSpec, truncating);
}
});
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json.toString());
} catch (IllegalArgumentException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
} catch (ExploreException e) {
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error enabling explore on dataset " + datasetId);
} catch (SQLException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to enable explore on dataset " + datasetId);
} catch (UnsupportedTypeException e) {
responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
} catch (Throwable e) {
LOG.error("Got exception:", e);
responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
}
}
Aggregations