use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class ExploreExecutorHttpHandler method enableStream.
@POST
@Path("streams/{stream}/tables/{table}/enable")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void enableStream(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("stream") String streamName, @PathParam("table") final String tableName) throws Exception {
final StreamId streamId = new StreamId(namespace, streamName);
try (Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()))) {
final FormatSpecification format = GSON.fromJson(reader, FormatSpecification.class);
if (format == null) {
throw new BadRequestException("Expected format in the body");
}
QueryHandle handle = impersonator.doAs(streamId, new Callable<QueryHandle>() {
@Override
public QueryHandle call() throws Exception {
return exploreTableManager.enableStream(tableName, streamId, format);
}
});
JsonObject json = new JsonObject();
json.addProperty("handle", handle.getHandle());
responder.sendJson(HttpResponseStatus.OK, json);
} catch (UnsupportedTypeException e) {
LOG.error("Exception while generating create statement for stream {}", streamName, e);
responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class SparkPageRankApp method configure.
@Override
public void configure() {
setName("SparkPageRank");
setDescription("Spark page rank application.");
// Ingest data into the Application via a Stream
addStream(new Stream(BACKLINK_URL_STREAM));
// Run a Spark program on the acquired data
addSpark(new PageRankSpark());
// Runs MapReduce program on data emitted by Spark program
addMapReduce(new RanksCounter());
// Runs Spark followed by a MapReduce in a Workflow
addWorkflow(new PageRankWorkflow());
// Service to retrieve process data
addService(SERVICE_HANDLERS, new SparkPageRankServiceHandler());
// Store input and processed data in ObjectStore Datasets
try {
ObjectStores.createObjectStore(getConfigurer(), "ranks", Integer.class, DatasetProperties.builder().setDescription("Ranks Dataset").build());
ObjectStores.createObjectStore(getConfigurer(), "rankscount", Integer.class, DatasetProperties.builder().setDescription("Ranks Count Dataset").build());
} catch (UnsupportedTypeException e) {
// because String and Double are actual classes.
throw new RuntimeException(e);
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class CombinedLogRecordFormat method validateSchema.
@Override
protected void validateSchema(Schema desiredSchema) throws UnsupportedTypeException {
// a valid schema is a record of simple types.
Iterator<Schema.Field> fields = desiredSchema.getFields().iterator();
while (fields.hasNext()) {
Schema.Field field = fields.next();
Schema schema = field.getSchema();
boolean isSimple = schema.getType().isSimpleType();
boolean isNullableSimple = schema.isNullableSimple();
if (!isSimple && !isNullableSimple) {
throw new UnsupportedTypeException("Field " + field.getName() + " is of invalid type.");
}
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class GrokRecordFormat method validateSchema.
@Override
protected void validateSchema(Schema desiredSchema) throws UnsupportedTypeException {
// a valid schema is a record of simple types. In other words, no maps, arrays, records, unions, or enums allowed.
// the exception is the very last field, which is allowed to be an array of simple types.
// These types may be nullable, which is a union of a null and non-null type.
Iterator<Schema.Field> fields = desiredSchema.getFields().iterator();
// check that each field is a simple field, except for the very last field, which can be an array of simple types.
while (fields.hasNext()) {
Schema.Field field = fields.next();
Schema schema = field.getSchema();
// if we're not on the very last field, the field must be a simple type or a nullable simple type.
boolean isSimple = schema.getType().isSimpleType();
boolean isNullableSimple = schema.isNullableSimple();
if (!isSimple && !isNullableSimple) {
// if this is the very last field and a string array, it is valid. otherwise it is not.
if (fields.hasNext() || !isStringArray(schema)) {
throw new UnsupportedTypeException("Field " + field.getName() + " is of invalid type.");
}
}
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class FlowletProgramRunner method createProcessSpecification.
/**
* Creates all {@link ProcessSpecification} for the process methods of the flowlet class.
*
* @param flowletType Type of the flowlet class represented by {@link TypeToken}.
* @param processMethodFactory A {@link ProcessMethodFactory} for creating {@link ProcessMethod}.
* @param processSpecFactory A {@link ProcessSpecificationFactory} for creating {@link ProcessSpecification}.
* @param result A {@link Collection} for storing newly created {@link ProcessSpecification}.
* @return The same {@link Collection} as the {@code result} parameter.
*/
@SuppressWarnings("unchecked")
private <T extends Collection<ProcessSpecification<?>>> T createProcessSpecification(BasicFlowletContext flowletContext, TypeToken<? extends Flowlet> flowletType, ProcessMethodFactory processMethodFactory, ProcessSpecificationFactory processSpecFactory, T result) throws Exception {
Set<FlowletMethod> seenMethods = Sets.newHashSet();
// Walk up the hierarchy of flowlet class to get all ProcessInput and Tick methods
for (TypeToken<?> type : flowletType.getTypes().classes()) {
if (type.getRawType().equals(Object.class)) {
break;
}
// Extracts all process and tick methods
for (Method method : type.getRawType().getDeclaredMethods()) {
if (method.isSynthetic() || method.isBridge()) {
continue;
}
if (!seenMethods.add(FlowletMethod.create(method, flowletType.getType()))) {
// up the class hierarchy.
continue;
}
ProcessInput processInputAnnotation = method.getAnnotation(ProcessInput.class);
Tick tickAnnotation = method.getAnnotation(Tick.class);
if (processInputAnnotation == null && tickAnnotation == null) {
// Neither a process nor a tick method.
continue;
}
int maxRetries = (tickAnnotation == null) ? processInputAnnotation.maxRetries() : tickAnnotation.maxRetries();
ProcessMethod processMethod = processMethodFactory.create(method, maxRetries);
Set<String> inputNames;
Schema schema;
TypeToken<?> dataType;
ConsumerConfig consumerConfig;
int batchSize = 1;
if (tickAnnotation != null) {
inputNames = ImmutableSet.of();
consumerConfig = new ConsumerConfig(0, 0, 1, DequeueStrategy.FIFO, null);
schema = Schema.of(Schema.Type.NULL);
dataType = TypeToken.of(void.class);
} else {
inputNames = Sets.newHashSet(processInputAnnotation.value());
if (inputNames.isEmpty()) {
// If there is no input name, it would be ANY_INPUT
inputNames.add(FlowletDefinition.ANY_INPUT);
}
// If batch mode then generate schema for Iterator's parameter type
dataType = flowletType.resolveType(method.getGenericParameterTypes()[0]);
consumerConfig = getConsumerConfig(flowletContext, method);
Integer processBatchSize = getBatchSize(method, flowletContext);
if (processBatchSize != null) {
if (dataType.getRawType().equals(Iterator.class)) {
Preconditions.checkArgument(dataType.getType() instanceof ParameterizedType, "Only ParameterizedType is supported for batch Iterator.");
dataType = flowletType.resolveType(((ParameterizedType) dataType.getType()).getActualTypeArguments()[0]);
}
batchSize = processBatchSize;
}
try {
schema = schemaGenerator.generate(dataType.getType());
} catch (UnsupportedTypeException e) {
throw Throwables.propagate(e);
}
}
ProcessSpecification processSpec = processSpecFactory.create(inputNames, schema, dataType, processMethod, consumerConfig, batchSize, tickAnnotation);
// Add processSpec
if (processSpec != null) {
result.add(processSpec);
}
}
}
Preconditions.checkArgument(!result.isEmpty(), "No inputs found for flowlet '%s' of flow '%s' of application '%s' (%s)", flowletContext.getFlowletId(), flowletContext.getFlowId(), flowletContext.getApplicationId(), flowletType);
return result;
}
Aggregations