use of org.apache.apex.malhar.lib.db.jdbc.JdbcTransactionalStore in project apex-malhar by apache.
the class MaxPerKeyExamples method populateDAG.
/**
* Populate the dag using High-Level API.
* @param dag
* @param conf
*/
@Override
public void populateDAG(DAG dag, Configuration conf) {
JdbcPOJOInputOperator jdbcInput = new JdbcPOJOInputOperator();
jdbcInput.setFieldInfos(addInputFieldInfos());
JdbcStore store = new JdbcStore();
jdbcInput.setStore(store);
JdbcPOJOInsertOutputOperator jdbcOutput = new JdbcPOJOInsertOutputOperator();
jdbcOutput.setFieldInfos(addOutputFieldInfos());
JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
jdbcOutput.setStore(outputStore);
// Create stream that reads from a Jdbc Input.
ApexStream<Object> stream = StreamFactory.fromInput(jdbcInput, jdbcInput.outputPort, name("jdbcInput")).window(new WindowOption.GlobalWindow(), new TriggerOption().accumulatingFiredPanes().withEarlyFiringsAtEvery(1)).map(new Function.MapFunction<Object, InputPojo>() {
@Override
public InputPojo f(Object input) {
return (InputPojo) input;
}
}, name("ObjectToInputPojo")).addCompositeStreams(new MaxMeanTemp()).map(new Function.MapFunction<OutputPojo, Object>() {
@Override
public Object f(OutputPojo input) {
return (Object) input;
}
}, name("OutputPojoToObject")).endWith(jdbcOutput, jdbcOutput.input, name("jdbcOutput"));
stream.populateDag(dag);
}
use of org.apache.apex.malhar.lib.db.jdbc.JdbcTransactionalStore in project apex-malhar by apache.
the class JdbcToJdbcApp method populateDAG.
@Override
public void populateDAG(DAG dag, Configuration conf) {
JdbcPOJOInputOperator jdbcInputOperator = dag.addOperator("JdbcInput", new JdbcPOJOInputOperator());
JdbcStore store = new JdbcStore();
jdbcInputOperator.setStore(store);
jdbcInputOperator.setFieldInfos(addFieldInfos());
/**
* The class given below can be updated to the user defined class based on
* input table schema The addField infos method needs to be updated
* accordingly This line can be commented and class can be set from the
* properties file
*/
// dag.setOutputPortAttribute(jdbcInputOperator.outputPort, Context.PortContext.TUPLE_CLASS, PojoEvent.class);
JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", new JdbcPOJOInsertOutputOperator());
JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
jdbcOutputOperator.setStore(outputStore);
jdbcOutputOperator.setFieldInfos(addJdbcFieldInfos());
/**
* The class given below can be updated to the user defined class based on
* input table schema The addField infos method needs to be updated
* accordingly This line can be commented and class can be set from the
* properties file
*/
// dag.setInputPortAttribute(jdbcOutputOperator.input, Context.PortContext.TUPLE_CLASS, PojoEvent.class);
dag.addStream("POJO's", jdbcInputOperator.outputPort, jdbcOutputOperator.input).setLocality(Locality.CONTAINER_LOCAL);
}
use of org.apache.apex.malhar.lib.db.jdbc.JdbcTransactionalStore in project apex-malhar by apache.
the class FileToJdbcCustomParser method populateDAG.
@Override
public void populateDAG(DAG dag, Configuration configuration) {
// create operators
FileReader fileReader = dag.addOperator("FileReader", FileReader.class);
CustomParser customParser = dag.addOperator("CustomParser", CustomParser.class);
JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", JdbcPOJOInsertOutputOperator.class);
// configure operators
jdbcOutputOperator.setFieldInfos(addFieldInfos());
JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
jdbcOutputOperator.setStore(outputStore);
// add stream
dag.addStream("Data", fileReader.output, customParser.input);
dag.addStream("POJOs", customParser.output, jdbcOutputOperator.input);
}
use of org.apache.apex.malhar.lib.db.jdbc.JdbcTransactionalStore in project apex-malhar by apache.
the class FileToJdbcCsvParser method populateDAG.
@Override
public void populateDAG(DAG dag, Configuration configuration) {
// create operators
FileReader fileReader = dag.addOperator("FileReader", FileReader.class);
CsvParser csvParser = dag.addOperator("CsvParser", CsvParser.class);
JdbcPOJOInsertOutputOperator jdbcOutputOperator = dag.addOperator("JdbcOutput", JdbcPOJOInsertOutputOperator.class);
// configure operators
String pojoSchema = SchemaUtils.jarResourceFileToString("schema.json");
csvParser.setSchema(pojoSchema);
jdbcOutputOperator.setFieldInfos(addFieldInfos());
JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
jdbcOutputOperator.setStore(outputStore);
// add stream
dag.addStream("Bytes", fileReader.byteOutput, csvParser.in);
dag.addStream("POJOs", csvParser.out, jdbcOutputOperator.input);
}
use of org.apache.apex.malhar.lib.db.jdbc.JdbcTransactionalStore in project apex-malhar by apache.
the class StreamingWordExtract method populateDAG.
/**
* Populate dag with High-Level API.
* @param dag
* @param conf
*/
@Override
public void populateDAG(DAG dag, Configuration conf) {
JdbcPOJOInsertOutputOperator jdbcOutput = new JdbcPOJOInsertOutputOperator();
jdbcOutput.setFieldInfos(addFieldInfos());
JdbcTransactionalStore outputStore = new JdbcTransactionalStore();
jdbcOutput.setStore(outputStore);
jdbcOutput.setTablename("TestTable");
// Create a stream reading from a folder.
ApexStream<String> stream = StreamFactory.fromFolder("./src/test/resources/data");
// Extract all the words from the input line of text.
stream.flatMap(new ExtractWords()).filter(new EmptyStringFilter()).map(new Uppercase()).map(new PojoMapper()).endWith(jdbcOutput, jdbcOutput.input, Option.Options.name("jdbcOutput"));
stream.populateDag(dag);
}
Aggregations