use of org.apache.storm.tuple.Fields in project storm by apache.
the class EsLookupBoltTest method fieldsAreDeclaredThroughProvidedOutput.
@Test
public void fieldsAreDeclaredThroughProvidedOutput() throws Exception {
Fields fields = new Fields(UUID.randomUUID().toString());
when(output.fields()).thenReturn(fields);
OutputFieldsDeclarer declarer = mock(OutputFieldsDeclarer.class);
bolt.declareOutputFields(declarer);
ArgumentCaptor<Fields> declaredFields = ArgumentCaptor.forClass(Fields.class);
verify(declarer).declare(declaredFields.capture());
assertThat(declaredFields.getValue(), is(fields));
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class AvroGenericRecordBoltTest method generateTestTuple.
private static Tuple generateTestTuple(GenericRecord record) {
TopologyBuilder builder = new TopologyBuilder();
GeneralTopologyContext topologyContext = new GeneralTopologyContext(builder.createTopology(), new Config(), new HashMap(), new HashMap(), new HashMap(), "") {
@Override
public Fields getComponentOutputFields(String componentId, String streamId) {
return new Fields("record");
}
};
return new TupleImpl(topologyContext, new Values(record), 1, "");
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class TestHdfsBolt method generateTestTuple.
private Tuple generateTestTuple(Object id, Object msg, Object city, Object state) {
TopologyBuilder builder = new TopologyBuilder();
GeneralTopologyContext topologyContext = new GeneralTopologyContext(builder.createTopology(), new Config(), new HashMap(), new HashMap(), new HashMap(), "") {
@Override
public Fields getComponentOutputFields(String componentId, String streamId) {
return new Fields("id", "msg", "city", "state");
}
};
return new TupleImpl(topologyContext, new Values(id, msg, city, state), 1, "");
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class TestSequenceFileBolt method generateTestTuple.
private Tuple generateTestTuple(Long key, String value) {
TopologyBuilder builder = new TopologyBuilder();
GeneralTopologyContext topologyContext = new GeneralTopologyContext(builder.createTopology(), new Config(), new HashMap(), new HashMap(), new HashMap(), "") {
@Override
public Fields getComponentOutputFields(String componentId, String streamId) {
return new Fields("key", "value");
}
};
return new TupleImpl(topologyContext, new Values(key, value), 1, "");
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class TestHiveBolt method testWithoutPartitions.
@Test
public void testWithoutPartitions() throws Exception {
HiveSetupUtil.dropDB(conf, dbName1);
HiveSetupUtil.createDbAndTable(conf, dbName1, tblName1, null, colNames, colTypes, null, dbLocation);
DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper().withColumnFields(new Fields(colNames));
HiveOptions hiveOptions = new HiveOptions(metaStoreURI, dbName1, tblName1, mapper).withTxnsPerBatch(2).withBatchSize(2).withAutoCreatePartitions(false);
bolt = new HiveBolt(hiveOptions);
bolt.prepare(config, null, collector);
Integer id = 100;
String msg = "test-123";
String city = "sunnyvale";
String state = "ca";
checkRecordCountInTable(tblName1, dbName1, 0);
Set<Tuple> tupleSet = new HashSet<Tuple>();
for (int i = 0; i < 4; i++) {
Tuple tuple = generateTestTuple(id, msg, city, state);
bolt.execute(tuple);
tupleSet.add(tuple);
}
for (Tuple t : tupleSet) verify(collector).ack(t);
bolt.cleanup();
checkRecordCountInTable(tblName1, dbName1, 4);
}
Aggregations