use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.
the class SpillableSetMultimapImplTest method recoveryTestWithManagedState.
@Test
public void recoveryTestWithManagedState() {
SpillableStateStore store = testMeta.store;
SpillableSetMultimapImpl<String, String> map = new SpillableSetMultimapImpl<>(store, ID1, 0L, createStringSerde(), createStringSerde());
store.setup(testMeta.operatorContext);
map.setup(testMeta.operatorContext);
long nextWindowId = 0L;
nextWindowId = simpleMultiKeyTestHelper(store, map, "a", nextWindowId);
long activationWindow = nextWindowId;
store.beforeCheckpoint(nextWindowId);
SpillableSetMultimapImpl<String, String> clonedMap = KryoCloneUtils.cloneObject(map);
store.checkpointed(nextWindowId);
store.committed(nextWindowId);
nextWindowId++;
store.beginWindow(nextWindowId);
map.beginWindow(nextWindowId);
Set<String> set1 = map.get("a");
Assert.assertEquals(11, set1.size());
Set<String> referenceSet = Sets.newHashSet("a", "b", "c", "d", "e", "f", "g", "tt", "ab", "99", "oo");
Assert.assertTrue(referenceSet.containsAll(set1));
Assert.assertTrue(set1.containsAll(referenceSet));
set1.add("111");
Assert.assertTrue(set1.contains("111"));
Assert.assertEquals(12, set1.size());
map.endWindow();
store.endWindow();
map.teardown();
store.teardown();
map = clonedMap;
store = map.getStore();
Attribute.AttributeMap.DefaultAttributeMap attributes = new Attribute.AttributeMap.DefaultAttributeMap();
attributes.put(DAG.APPLICATION_PATH, testMeta.applicationPath);
attributes.put(Context.OperatorContext.ACTIVATION_WINDOW_ID, activationWindow);
OperatorContext context = mockOperatorContext(testMeta.operatorContext.getId(), attributes);
store.setup(context);
map.setup(context);
nextWindowId = activationWindow + 1;
store.beginWindow(nextWindowId);
map.beginWindow(nextWindowId);
Assert.assertEquals(1, map.size());
Assert.assertTrue(map.containsKey("a"));
Assert.assertEquals(11, map.get("a").size());
map.endWindow();
store.endWindow();
map.teardown();
store.teardown();
}
use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.
the class SpillableArrayListMultimapImplTest method testLoad.
@Test
public void testLoad() {
Random random = new Random();
final int keySize = 1000000;
final int valueSize = 100000000;
final int numOfEntry = 100000;
SpillableStateStore store = testMeta.store;
SpillableArrayListMultimapImpl<String, String> multimap = new SpillableArrayListMultimapImpl<>(this.testMeta.store, ID1, 0L, new StringSerde(), new StringSerde());
Attribute.AttributeMap.DefaultAttributeMap attributes = new Attribute.AttributeMap.DefaultAttributeMap();
attributes.put(DAG.APPLICATION_PATH, testMeta.applicationPath);
OperatorContext context = mockOperatorContext(testMeta.operatorContext.getId(), attributes);
store.setup(context);
multimap.setup(context);
store.beginWindow(1);
multimap.beginWindow(1);
for (int i = 0; i < numOfEntry; ++i) {
multimap.put(String.valueOf(random.nextInt(keySize)), String.valueOf(random.nextInt(valueSize)));
}
multimap.endWindow();
store.endWindow();
}
use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.
the class MemsqlInputBenchmarkTest method testMethod.
@Test
public void testMethod() throws SQLException, IOException {
Configuration conf = new Configuration();
InputStream inputStream = new FileInputStream("src/site/conf/dt-site-memsql.xml");
conf.addResource(inputStream);
MemsqlStore memsqlStore = new MemsqlStore();
memsqlStore.setDatabaseUrl(conf.get("dt.rootDbUrl"));
memsqlStore.setConnectionProperties(conf.get("dt.application.MemsqlInputBenchmark.operator.memsqlInputOperator.store.connectionProperties"));
AbstractMemsqlOutputOperatorTest.memsqlInitializeDatabase(memsqlStore);
MemsqlPOJOOutputOperator outputOperator = new MemsqlPOJOOutputOperator();
outputOperator.getStore().setDatabaseUrl(conf.get("dt.application.MemsqlInputBenchmark.operator.memsqlInputOperator.store.dbUrl"));
outputOperator.getStore().setConnectionProperties(conf.get("dt.application.MemsqlInputBenchmark.operator.memsqlInputOperator.store.connectionProperties"));
outputOperator.setBatchSize(BATCH_SIZE);
Random random = new Random();
com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributeMap = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE);
attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L);
attributeMap.put(DAG.APPLICATION_ID, APP_ID);
OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
long seedSize = conf.getLong("dt.seedSize", SEED_SIZE);
outputOperator.setup(context);
outputOperator.beginWindow(0);
for (long valueCounter = 0; valueCounter < seedSize; valueCounter++) {
outputOperator.input.put(random.nextInt());
}
outputOperator.endWindow();
outputOperator.teardown();
MemsqlInputBenchmark app = new MemsqlInputBenchmark();
LocalMode lm = LocalMode.newInstance();
try {
lm.prepareDAG(app, conf);
LocalMode.Controller lc = lm.getController();
lc.run(20000);
} catch (Exception ex) {
DTThrowable.rethrow(ex);
}
IOUtils.closeQuietly(inputStream);
}
use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.
the class AbstractMemsqlInputOperatorTest method TestMemsqlPOJOInputOperator.
/*
* This test can be run against memsql installation on node17.
*/
@Test
public void TestMemsqlPOJOInputOperator() throws SQLException {
cleanDatabase();
populateDatabase(createStore(null, true));
Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap();
attributeMap.put(Context.OperatorContext.SPIN_MILLIS, 500);
OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
MemsqlPOJOInputOperator inputOperator = new MemsqlPOJOInputOperator();
createStore((MemsqlStore) inputOperator.getStore(), true);
inputOperator.setBatchSize(10);
inputOperator.setTablename(FQ_TABLE);
inputOperator.setPrimaryKeyColumn(INDEX_COLUMN);
ArrayList<String> expressions = new ArrayList<String>();
expressions.add("id");
expressions.add("name");
inputOperator.setExpressions(expressions);
ArrayList<String> columns = new ArrayList<String>();
columns.add("data_index");
columns.add("data2");
inputOperator.setColumns(columns);
inputOperator.setQuery("select * from " + FQ_TABLE + ";");
inputOperator.setOutputClass("org.apache.apex.malhar.contrib.memsql.TestInputPojo");
CollectorTestSink<Object> sink = new CollectorTestSink<Object>();
inputOperator.outputPort.setSink(sink);
inputOperator.setup(context);
inputOperator.beginWindow(0);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 100, sink.collectedTuples.size());
for (int i = 0; i < 10; i++) {
TestInputPojo object = (TestInputPojo) sink.collectedTuples.get(i);
Assert.assertEquals("id set in testpojo", i, object.getId());
Assert.assertEquals("name set in testpojo", "Testname" + i, object.getName());
}
sink.clear();
inputOperator.setQuery("select * from " + FQ_TABLE + " where " + "%p " + ">= " + "%s" + ";");
inputOperator.setStartRow(10);
inputOperator.setup(context);
inputOperator.beginWindow(0);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 90, sink.collectedTuples.size());
sink.clear();
inputOperator.setQuery("select * from " + FQ_TABLE + " where " + "%p " + ">= " + "%s" + " LIMIT " + "%l" + ";");
inputOperator.setStartRow(1);
inputOperator.setBatchSize(10);
inputOperator.setup(context);
inputOperator.beginWindow(0);
inputOperator.emitTuples();
inputOperator.endWindow();
Assert.assertEquals("rows from db", 10, sink.collectedTuples.size());
}
use of com.datatorrent.api.Context.OperatorContext in project apex-malhar by apache.
the class AbstractMemsqlOutputOperatorTest method testMemsqlOutputOperator.
@Test
public void testMemsqlOutputOperator() throws Exception {
cleanDatabase();
MemsqlStore memsqlStore = createStore(null, true);
MemsqlPOJOOutputOperator outputOperator = new MemsqlPOJOOutputOperator();
outputOperator.setStore(memsqlStore);
outputOperator.setBatchSize(BATCH_SIZE);
outputOperator.setTablename(FQ_TABLE);
ArrayList<String> columns = new ArrayList<String>();
columns.add(DATA_COLUMN1);
columns.add(DATA_COLUMN2);
outputOperator.setDataColumns(columns);
ArrayList<String> expressions = new ArrayList<String>();
expressions.add("getIntVal()");
expressions.add("getStringVal()");
outputOperator.setExpression(expressions);
AttributeMap.DefaultAttributeMap attributeMap = new AttributeMap.DefaultAttributeMap();
attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE);
attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, -1L);
attributeMap.put(DAG.APPLICATION_ID, APP_ID);
OperatorContext context = mockOperatorContext(OPERATOR_ID, attributeMap);
outputOperator.setup(context);
for (int wid = 0; wid < NUM_WINDOWS; wid++) {
outputOperator.beginWindow(wid);
innerObj.setIntVal(wid + 1);
outputOperator.input.put(innerObj);
outputOperator.endWindow();
}
outputOperator.teardown();
memsqlStore.connect();
int databaseSize;
Statement statement = memsqlStore.getConnection().createStatement();
ResultSet resultSet = statement.executeQuery("select count(*) from " + FQ_TABLE);
resultSet.next();
databaseSize = resultSet.getInt(1);
memsqlStore.disconnect();
Assert.assertEquals("Numer of tuples in database", DATABASE_SIZE, databaseSize);
}
Aggregations