use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ObjectMappedTableDefinition method validateSchema.
private void validateSchema(Schema schema) throws UnsupportedTypeException {
Schema.Type type = schema.isNullable() ? schema.getNonNullable().getType() : schema.getType();
if (type != Schema.Type.RECORD) {
throw new UnsupportedTypeException("Unsupported type " + type + ". Must be a record.");
}
for (Schema.Field field : schema.getFields()) {
Schema fieldSchema = field.getSchema();
Schema.Type fieldType = fieldSchema.isNullable() ? fieldSchema.getNonNullable().getType() : fieldSchema.getType();
if (!fieldType.isSimpleType()) {
throw new UnsupportedTypeException(String.format("Field %s is of unsupported type %s." + " Must be a simple type (boolean, int, long, float, double, string, bytes).", field.getName(), fieldType.toString()));
}
}
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ObjectMappedTableDefinition method configureSchema.
private DatasetProperties configureSchema(DatasetProperties properties) {
Map<String, String> props = properties.getProperties();
Preconditions.checkArgument(props.containsKey(ObjectMappedTableProperties.OBJECT_TYPE));
// schema can normally be derived from the type. However, we cannot use the Type in this method because
// this is called by the system, where the Type is often not available. for example, if somebody creates
// an ObjectMappedTable<Purchase> where Purchase is a class internal to their app.
// we require schema here because we want to validate it to make sure it is supported.
Preconditions.checkArgument(props.containsKey(ObjectMappedTableProperties.OBJECT_SCHEMA));
Preconditions.checkArgument(props.containsKey(ObjectMappedTableProperties.ROW_KEY_EXPLORE_NAME));
Preconditions.checkArgument(props.containsKey(ObjectMappedTableProperties.ROW_KEY_EXPLORE_TYPE));
try {
Schema objectSchema = ObjectMappedTableProperties.getObjectSchema(props);
validateSchema(objectSchema);
String keyName = ObjectMappedTableProperties.getRowKeyExploreName(props);
Schema.Type keyType = ObjectMappedTableProperties.getRowKeyExploreType(props);
Schema fullSchema = addKeyToSchema(objectSchema, keyName, keyType);
return TableProperties.builder().setSchema(fullSchema).setRowFieldName(keyName).addAll(properties.getProperties()).build();
} catch (IOException e) {
throw new IllegalArgumentException("Could not parse schema.", e);
} catch (UnsupportedTypeException e) {
throw new IllegalArgumentException("Schema is of an unsupported type.", e);
}
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class AvroRecordFormat method validateSchema.
@Override
protected void validateSchema(Schema desiredSchema) throws UnsupportedTypeException {
try {
// rather than check for all inconsistencies, just try to read the schema string as an Avro schema.
avroFormatSchema = new org.apache.avro.Schema.Parser().parse(desiredSchema.toString());
formatSchema = desiredSchema;
} catch (SchemaParseException e) {
throw new UnsupportedTypeException("Schema is not a valid avro schema.", e);
} catch (Exception e) {
throw new UnsupportedTypeException("Exception parsing schema as an avro schema.", e);
}
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class GrokRecordFormat method validateSchema.
@Override
protected void validateSchema(Schema desiredSchema) throws UnsupportedTypeException {
// a valid schema is a record of simple types. In other words, no maps, arrays, records, unions, or enums allowed.
// the exception is the very last field, which is allowed to be an array of simple types.
// These types may be nullable, which is a union of a null and non-null type.
Iterator<Schema.Field> fields = desiredSchema.getFields().iterator();
// check that each field is a simple field, except for the very last field, which can be an array of simple types.
while (fields.hasNext()) {
Schema.Field field = fields.next();
Schema schema = field.getSchema();
// if we're not on the very last field, the field must be a simple type or a nullable simple type.
boolean isSimple = schema.getType().isSimpleType();
boolean isNullableSimple = schema.isNullableSimple();
if (!isSimple && !isNullableSimple) {
// if this is the very last field and a string array, it is valid. otherwise it is not.
if (fields.hasNext() || !isStringArray(schema)) {
throw new UnsupportedTypeException("Field " + field.getName() + " is of invalid type.");
}
}
}
}
use of io.cdap.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ExploreTableManager method hiveSchemaFor.
// TODO: replace with SchemaConverter.toHiveSchema when we tackle queries on Tables.
// but unfortunately, SchemaConverter is not compatible with this, for example:
// - a byte becomes a tinyint here, but an int there
// - SchemaConverter sort fields alphabetically, whereas this preserves the order
// - ExploreExtensiveSchemaTableTestRun will fail because of this
private String hiveSchemaFor(Type type) throws UnsupportedTypeException {
// This call will make sure that the type is not recursive
try {
new ReflectionSchemaGenerator().generate(type, false);
} catch (Exception e) {
throw new UnsupportedTypeException("Unable to derive schema from " + type, e);
}
ObjectInspector objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(type);
if (!(objectInspector instanceof StructObjectInspector)) {
throw new UnsupportedTypeException(String.format("Type must be a RECORD, but is %s", type.getClass().getName()));
}
StructObjectInspector structObjectInspector = (StructObjectInspector) objectInspector;
StringBuilder sb = new StringBuilder();
boolean first = true;
for (StructField structField : structObjectInspector.getAllStructFieldRefs()) {
if (first) {
first = false;
} else {
sb.append(", ");
}
ObjectInspector oi = structField.getFieldObjectInspector();
String typeName;
typeName = oi.getTypeName();
if (shouldEscapeColumns) {
// a literal backtick(`) is represented as a double backtick(``)
sb.append('`').append(structField.getFieldName().replace("`", "``")).append('`');
} else {
sb.append(structField.getFieldName());
}
sb.append(" ").append(typeName);
}
return sb.toString();
}
Aggregations