use of org.apache.apex.malhar.hive.HiveOperator in project apex-malhar by apache.
the class HiveMapInsertBenchmarkingApp method populateDAG.
@Override
public void populateDAG(DAG dag, Configuration conf) {
HiveStore store = new HiveStore();
store.setDatabaseUrl(conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.store.dbUrl"));
store.setConnectionProperties(conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.store.connectionProperties"));
store.setFilepath(conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.store.filepath"));
try {
hiveInitializeMapDatabase(store, conf.get("dt.application.HiveMapInsertBenchmarkingApp.operator.HiveOperator.tablename"), ":");
} catch (SQLException ex) {
LOG.debug(ex.getMessage());
}
dag.setAttribute(DAG.STREAMING_WINDOW_SIZE_MILLIS, 1000);
RandomEventGenerator eventGenerator = dag.addOperator("EventGenerator", RandomEventGenerator.class);
RandomMapOutput mapGenerator = dag.addOperator("MapGenerator", RandomMapOutput.class);
dag.setAttribute(eventGenerator, PortContext.QUEUE_CAPACITY, 10000);
dag.setAttribute(mapGenerator, PortContext.QUEUE_CAPACITY, 10000);
HiveOperator hiveInsert = dag.addOperator("HiveOperator", new HiveOperator());
hiveInsert.setStore(store);
FSRollingMapTestImpl rollingMapFsWriter = dag.addOperator("RollingFsMapWriter", new FSRollingMapTestImpl());
rollingMapFsWriter.setFilePath(store.filepath);
ArrayList<String> hivePartitionColumns = new ArrayList<String>();
hivePartitionColumns.add("dt");
hiveInsert.setHivePartitionColumns(hivePartitionColumns);
dag.addStream("EventGenerator2Map", eventGenerator.integer_data, mapGenerator.input);
dag.addStream("MapGenerator2HdfsOutput", mapGenerator.map_data, rollingMapFsWriter.input);
dag.addStream("FsWriter2Hive", rollingMapFsWriter.outputPort, hiveInsert.input);
}
use of org.apache.apex.malhar.hive.HiveOperator in project apex-malhar by apache.
the class HiveInsertBenchmarkingApp method populateDAG.
@Override
public void populateDAG(DAG dag, Configuration conf) {
HiveStore store = new HiveStore();
store.setDatabaseUrl(conf.get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.store.dbUrl"));
store.setConnectionProperties(conf.get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.store.connectionProperties"));
store.setFilepath(conf.get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.store.filepath"));
try {
hiveInitializeDatabase(store, conf.get("dt.application.HiveInsertBenchmarkingApp.operator.HiveOperator.tablename"));
} catch (SQLException ex) {
LOG.debug(ex.getMessage());
}
dag.setAttribute(DAG.STREAMING_WINDOW_SIZE_MILLIS, 1000);
RandomDateGenerator dateGenerator = dag.addOperator("DateGenerator", new RandomDateGenerator());
FSRollingTestImpl rollingFsWriter = dag.addOperator("RollingFsWriter", new FSRollingTestImpl());
rollingFsWriter.setFilePath(store.filepath);
HiveOperator hiveInsert = dag.addOperator("HiveOperator", new HiveOperator());
hiveInsert.setStore(store);
ArrayList<String> hivePartitionColumns = new ArrayList<String>();
hivePartitionColumns.add("dt");
hiveInsert.setHivePartitionColumns(hivePartitionColumns);
dag.addStream("Generator2HDFS", dateGenerator.outputString, rollingFsWriter.input);
dag.addStream("FsWriter2Hive", rollingFsWriter.outputPort, hiveInsert.input);
}
Aggregations