Search in sources :

Example 26 with UnsupportedTypeException

use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.

the class ObjectMappedTableDefinition method validateSchema.

private void validateSchema(Schema schema) throws UnsupportedTypeException {
    Schema.Type type = schema.isNullable() ? schema.getNonNullable().getType() : schema.getType();
    if (type != Schema.Type.RECORD) {
        throw new UnsupportedTypeException("Unsupported type " + type + ". Must be a record.");
    }
    for (Schema.Field field : schema.getFields()) {
        Schema fieldSchema = field.getSchema();
        Schema.Type fieldType = fieldSchema.isNullable() ? fieldSchema.getNonNullable().getType() : fieldSchema.getType();
        if (!fieldType.isSimpleType()) {
            throw new UnsupportedTypeException(String.format("Field %s is of unsupported type %s." + " Must be a simple type (boolean, int, long, float, double, string, bytes).", field.getName(), fieldType.toString()));
        }
    }
}
Also used : Schema(co.cask.cdap.api.data.schema.Schema) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException)

Example 27 with UnsupportedTypeException

use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.

the class AllProgramsApp method configure.

@Override
public void configure() {
    setName(NAME);
    setDescription("Application which has everything");
    addStream(new Stream(STREAM_NAME, "test stream"));
    createDataset(DATASET_NAME, KeyValueTable.class, DatasetProperties.builder().setDescription("test dataset").build());
    createDataset(DATASET_NAME2, KeyValueTable.class);
    createDataset(DATASET_NAME3, KeyValueTable.class);
    createDataset(DATASET_NAME4, FileSet.class, FileSetProperties.builder().setEnableExploreOnCreate(true).setExploreFormat("text").setExploreFormatProperty("delimiter", "\n").setExploreSchema("record STRING").setDescription("fileSet").build());
    createDataset(DATASET_NAME5, PartitionedFileSet.class, PartitionedFileSetProperties.builder().setEnableExploreOnCreate(true).setExploreFormat("text").setExploreFormatProperty("delimiter", "\n").setExploreSchema("record STRING").setDescription("partitonedFileSet").add("partitioning.fields.", "field1").add("partitioning.field.field1", "STRING").build());
    createDataset(DATASET_NAME6, FileSet.class, FileSetProperties.builder().setEnableExploreOnCreate(false).setDescription("fileSet").build());
    createDataset(DATASET_NAME7, PartitionedFileSet.class, PartitionedFileSetProperties.builder().setEnableExploreOnCreate(false).setDescription("partitonedFileSet").add("partitioning.fields.", "field1").add("partitioning.field.field1", "STRING").build());
    addFlow(new NoOpFlow());
    addMapReduce(new NoOpMR());
    addMapReduce(new NoOpMR2());
    addWorkflow(new NoOpWorkflow());
    addWorker(new NoOpWorker());
    addSpark(new NoOpSpark());
    addService(new NoOpService());
    schedule(buildSchedule(SCHEDULE_NAME, ProgramType.WORKFLOW, NoOpWorkflow.NAME).setDescription(SCHEDULE_DESCRIPTION).triggerByTime("* * * * *"));
    try {
        createDataset(DS_WITH_SCHEMA_NAME, ObjectMappedTable.class, ObjectMappedTableProperties.builder().setType(DsSchema.class).setDescription("test object mapped table").build());
    } catch (UnsupportedTypeException e) {
    // ignore for test
    }
}
Also used : UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) Stream(co.cask.cdap.api.data.stream.Stream) InputStream(java.io.InputStream)

Example 28 with UnsupportedTypeException

use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.

the class ExploreExecutorHttpHandler method updateDataset.

/**
   * Enable ad-hoc exploration of a dataset instance.
   */
@POST
@Path("datasets/{dataset}/update")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void updateDataset(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) throws BadRequestException {
    final DatasetId datasetId = new DatasetId(namespace, datasetName);
    try {
        UpdateExploreParameters params = readUpdateParameters(request);
        final DatasetSpecification oldSpec = params.getOldSpec();
        final DatasetSpecification datasetSpec = params.getNewSpec();
        QueryHandle handle;
        if (oldSpec.equals(datasetSpec)) {
            handle = QueryHandle.NO_OP;
        } else {
            handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {

                @Override
                public QueryHandle call() throws Exception {
                    return exploreTableManager.updateDataset(datasetId, datasetSpec, oldSpec);
                }
            });
        }
        JsonObject json = new JsonObject();
        json.addProperty("handle", handle.getHandle());
        responder.sendJson(HttpResponseStatus.OK, json);
    } catch (IllegalArgumentException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
    } catch (ExploreException e) {
        responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error updating explore on dataset " + datasetId);
    } catch (SQLException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to update explore on dataset " + datasetId);
    } catch (UnsupportedTypeException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
    } catch (Throwable e) {
        LOG.error("Got exception:", e);
        responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
    }
}
Also used : UpdateExploreParameters(co.cask.cdap.explore.client.UpdateExploreParameters) SQLException(java.sql.SQLException) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) JsonObject(com.google.gson.JsonObject) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) QueryHandle(co.cask.cdap.proto.QueryHandle) Callable(java.util.concurrent.Callable) DatasetId(co.cask.cdap.proto.id.DatasetId) ExploreException(co.cask.cdap.explore.service.ExploreException) Path(javax.ws.rs.Path) AuditPolicy(co.cask.cdap.common.security.AuditPolicy) POST(javax.ws.rs.POST)

Example 29 with UnsupportedTypeException

use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.

the class ExploreTableManager method hiveSchemaFor.

// TODO: replace with SchemaConverter.toHiveSchema when we tackle queries on Tables.
//       but unfortunately, SchemaConverter is not compatible with this, for example:
//       - a byte becomes a tinyint here, but an int there
//       - SchemaConverter sort fields alphabetically, whereas this preserves the order
//       - ExploreExtensiveSchemaTableTestRun will fail because of this
private String hiveSchemaFor(Type type) throws UnsupportedTypeException {
    // This call will make sure that the type is not recursive
    try {
        new ReflectionSchemaGenerator().generate(type, false);
    } catch (Exception e) {
        throw new UnsupportedTypeException("Unable to derive schema from " + type, e);
    }
    ObjectInspector objectInspector = ObjectInspectorFactory.getReflectionObjectInspector(type);
    if (!(objectInspector instanceof StructObjectInspector)) {
        throw new UnsupportedTypeException(String.format("Type must be a RECORD, but is %s", type.getClass().getName()));
    }
    StructObjectInspector structObjectInspector = (StructObjectInspector) objectInspector;
    StringBuilder sb = new StringBuilder();
    boolean first = true;
    for (StructField structField : structObjectInspector.getAllStructFieldRefs()) {
        if (first) {
            first = false;
        } else {
            sb.append(", ");
        }
        ObjectInspector oi = structField.getFieldObjectInspector();
        String typeName;
        typeName = oi.getTypeName();
        if (shouldEscapeColumns) {
            // a literal backtick(`) is represented as a double backtick(``)
            sb.append('`').append(structField.getFieldName().replace("`", "``")).append('`');
        } else {
            sb.append(structField.getFieldName());
        }
        sb.append(" ").append(typeName);
    }
    return sb.toString();
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) ReflectionSchemaGenerator(co.cask.cdap.internal.io.ReflectionSchemaGenerator) DatasetNotFoundException(co.cask.cdap.common.DatasetNotFoundException) SQLException(java.sql.SQLException) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) IOException(java.io.IOException) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 30 with UnsupportedTypeException

use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.

the class ExploreExecutorHttpHandler method enableDataset.

private void enableDataset(HttpResponder responder, final DatasetId datasetId, final DatasetSpecification datasetSpec, final boolean truncating) {
    LOG.debug("Enabling explore for dataset instance {}", datasetId);
    try {
        QueryHandle handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {

            @Override
            public QueryHandle call() throws Exception {
                return exploreTableManager.enableDataset(datasetId, datasetSpec, truncating);
            }
        });
        JsonObject json = new JsonObject();
        json.addProperty("handle", handle.getHandle());
        responder.sendJson(HttpResponseStatus.OK, json);
    } catch (IllegalArgumentException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
    } catch (ExploreException e) {
        responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error enabling explore on dataset " + datasetId);
    } catch (SQLException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to enable explore on dataset " + datasetId);
    } catch (UnsupportedTypeException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
    } catch (Throwable e) {
        LOG.error("Got exception:", e);
        responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
    }
}
Also used : SQLException(java.sql.SQLException) JsonObject(com.google.gson.JsonObject) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) QueryHandle(co.cask.cdap.proto.QueryHandle) BadRequestException(co.cask.cdap.common.BadRequestException) ExploreException(co.cask.cdap.explore.service.ExploreException) SQLException(java.sql.SQLException) DatasetManagementException(co.cask.cdap.api.dataset.DatasetManagementException) JsonSyntaxException(com.google.gson.JsonSyntaxException) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) IOException(java.io.IOException) ExploreException(co.cask.cdap.explore.service.ExploreException)

Aggregations

UnsupportedTypeException (co.cask.cdap.api.data.schema.UnsupportedTypeException)30 Schema (co.cask.cdap.api.data.schema.Schema)11 Stream (co.cask.cdap.api.data.stream.Stream)10 IOException (java.io.IOException)9 FormatSpecification (co.cask.cdap.api.data.format.FormatSpecification)4 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)3 BadRequestException (co.cask.cdap.common.BadRequestException)3 ExploreException (co.cask.cdap.explore.service.ExploreException)3 QueryHandle (co.cask.cdap.proto.QueryHandle)3 JsonObject (com.google.gson.JsonObject)3 SQLException (java.sql.SQLException)3 DatasetSpecification (co.cask.cdap.api.dataset.DatasetSpecification)2 PluginPropertyField (co.cask.cdap.api.plugin.PluginPropertyField)2 InvalidArtifactException (co.cask.cdap.common.InvalidArtifactException)2 AuditPolicy (co.cask.cdap.common.security.AuditPolicy)2 ContextManager (co.cask.cdap.hive.context.ContextManager)2 StreamId (co.cask.cdap.proto.id.StreamId)2 JsonSyntaxException (com.google.gson.JsonSyntaxException)2 InputStream (java.io.InputStream)2 InputStreamReader (java.io.InputStreamReader)2