use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class SpamClassifier method configure.
@Override
public void configure() {
setName("SpamClassifier");
setDescription("A Spark Streaming Example for Kafka Message Classification");
addStream(new Stream(STREAM));
addSpark(new SpamClassifierProgram());
addService(SERVICE_HANDLER, new SpamClassifierServiceHandler());
// Store for message classification status
try {
ObjectStores.createObjectStore(getConfigurer(), DATASET, Double.class, DatasetProperties.builder().setDescription("Kafka Message Spam " + "Classification").build());
} catch (UnsupportedTypeException e) {
// because Double is an actual class.
throw new RuntimeException(e);
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class PurchaseApp method configure.
@Override
public void configure() {
setName(APP_NAME);
setDescription("Purchase history application");
// Ingest data into the Application via a Stream
addStream(new Stream("purchaseStream"));
// Store processed data in a Dataset
createDataset("frequentCustomers", KeyValueTable.class, DatasetProperties.builder().setDescription("Store frequent customers").build());
// Store user profiles in a Dataset
createDataset("userProfiles", KeyValueTable.class, DatasetProperties.builder().setDescription("Store user profiles").build());
// Process events in realtime using a Flow
addFlow(new PurchaseFlow());
// Specify a MapReduce to run on the acquired data
addMapReduce(new PurchaseHistoryBuilder());
// Run a Workflow that uses the MapReduce to run on the acquired data
addWorkflow(new PurchaseHistoryWorkflow());
// Retrieve the processed data using a Service
addService(new PurchaseHistoryService());
// Store and retrieve user profile data using a Service
addService(UserProfileServiceHandler.SERVICE_NAME, new UserProfileServiceHandler());
// Provide a Service to Application components
addService(new CatalogLookupService());
// Schedule the workflow
schedule(buildSchedule("DailySchedule", ProgramType.WORKFLOW, "PurchaseHistoryWorkflow").withConcurrency(1).triggerByTime("0 4 * * *"));
// Schedule the workflow based on the data coming in the purchaseStream stream
scheduleWorkflow(Schedules.builder("DataSchedule").setDescription("Schedule execution when 1 MB or more of data is ingested in the purchaseStream").setMaxConcurrentRuns(1).createDataSchedule(Schedules.Source.STREAM, "purchaseStream", 1), "PurchaseHistoryWorkflow");
createDataset("history", PurchaseHistoryStore.class, PurchaseHistoryStore.properties("History dataset"));
try {
createDataset("purchases", ObjectMappedTable.class, ObjectMappedTableProperties.builder().setType(Purchase.class).setDescription("Store purchases").build());
} catch (UnsupportedTypeException e) {
// because PurchaseHistory and Purchase are actual classes.
throw new RuntimeException(e);
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class SparkKMeansApp method configure.
@Override
public void configure() {
setName("SparkKMeans");
setDescription("Spark KMeans app");
// Ingest data into the Application via a Stream
addStream(new Stream("pointsStream"));
// Process points data in real-time using a Flow
addFlow(new PointsFlow());
// Run a Spark program on the acquired data
addSpark(new SparkKMeansSpecification());
// Retrieve the processed data using a Service
addService(new CentersService());
// Store input and processed data in ObjectStore Datasets
try {
ObjectStores.createObjectStore(getConfigurer(), "points", Point.class, DatasetProperties.builder().setDescription("Store points data").build());
ObjectStores.createObjectStore(getConfigurer(), "centers", String.class, DatasetProperties.builder().setDescription("Store centers data").build());
} catch (UnsupportedTypeException e) {
// because String is an actual class.
throw new RuntimeException(e);
}
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class StreamHandler method getAndValidateConfig.
/**
* Gets stream properties from the request. If there is request is invalid, a BadRequestException will be thrown.
*/
private StreamProperties getAndValidateConfig(HttpRequest request) throws BadRequestException {
Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()));
StreamProperties properties;
try {
properties = GSON.fromJson(reader, StreamProperties.class);
} catch (Exception e) {
throw new BadRequestException("Invalid stream configuration. Please check that the " + "configuration is a valid JSON Object with a valid schema. Cause: " + e.getMessage());
}
// Validate ttl
Long ttl = properties.getTTL();
if (ttl != null && ttl < 0) {
throw new BadRequestException("Invalid TTL " + ttl + ". TTL value should be positive.");
}
// Validate format
FormatSpecification formatSpec = properties.getFormat();
if (formatSpec != null) {
String formatName = formatSpec.getName();
if (formatName == null) {
throw new BadRequestException("A format name must be specified.");
}
try {
// if a format is given, make sure it is a valid format,
// check that we can instantiate the format class
RecordFormat<?, ?> format = RecordFormats.createInitializedFormat(formatSpec);
// the request may contain a null schema, in which case the default schema of the format should be used.
// create a new specification object that is guaranteed to have a non-null schema.
formatSpec = new FormatSpecification(formatSpec.getName(), format.getSchema(), formatSpec.getSettings());
} catch (UnsupportedTypeException e) {
throw new BadRequestException("Format " + formatName + " does not support the requested schema.");
} catch (Exception e) {
throw new BadRequestException("Invalid format, unable to instantiate format " + formatName);
}
}
// Validate notification threshold
Integer threshold = properties.getNotificationThresholdMB();
if (threshold != null && threshold <= 0) {
throw new BadRequestException("Invalid threshold " + threshold + ". Threshold value should be greater than zero.");
}
// validate owner principal if one is provided
if (properties.getOwnerPrincipal() != null) {
SecurityUtil.validateKerberosPrincipal(properties.getOwnerPrincipal());
}
return new StreamProperties(ttl, formatSpec, threshold, properties.getDescription(), properties.getOwnerPrincipal());
}
use of co.cask.cdap.api.data.schema.UnsupportedTypeException in project cdap by caskdata.
the class AvroRecordFormat method validateSchema.
@Override
protected void validateSchema(Schema desiredSchema) throws UnsupportedTypeException {
try {
// rather than check for all inconsistencies, just try to read the schema string as an Avro schema.
avroFormatSchema = new org.apache.avro.Schema.Parser().parse(desiredSchema.toString());
formatSchema = desiredSchema;
} catch (SchemaParseException e) {
throw new UnsupportedTypeException("Schema is not a valid avro schema.", e);
} catch (Exception e) {
throw new UnsupportedTypeException("Exception parsing schema as an avro schema.", e);
}
}
Aggregations