use of org.apache.samza.system.SystemStream in project samza by apache.
the class TestClusterBasedJobCoordinator method setUp.
@Before
public void setUp() {
configMap = new HashMap<>();
configMap.put("job.name", "test-job");
configMap.put("job.coordinator.system", "kafka");
configMap.put("task.inputs", "kafka.topic1");
configMap.put("systems.kafka.samza.factory", "org.apache.samza.system.MockSystemFactory");
configMap.put("samza.cluster-manager.factory", "org.apache.samza.clustermanager.MockClusterResourceManagerFactory");
configMap.put("cluster-manager.fault-domain-manager.factory", "org.apache.samza.clustermanager.MockFaultDomainManagerFactory");
configMap.put("job.coordinator.monitor-partition-change.frequency.ms", "1");
MockSystemFactory.MSG_QUEUES.put(new SystemStreamPartition("kafka", "topic1", new Partition(0)), new ArrayList<>());
MockSystemFactory.MSG_QUEUES.put(new SystemStreamPartition("kafka", "__samza_coordinator_test-job_1", new Partition(0)), new ArrayList<>());
MockCoordinatorStreamSystemFactory.enableMockConsumerCache();
PowerMockito.mockStatic(CoordinatorStreamUtil.class);
when(CoordinatorStreamUtil.getCoordinatorSystemFactory(anyObject())).thenReturn(new MockCoordinatorStreamSystemFactory());
when(CoordinatorStreamUtil.getCoordinatorSystemStream(anyObject())).thenReturn(new SystemStream("kafka", "test"));
when(CoordinatorStreamUtil.getCoordinatorStreamName(anyObject(), anyObject())).thenReturn("test");
}
use of org.apache.samza.system.SystemStream in project samza by apache.
the class Log4jSystemConfig method getStreamSerdeName.
public String getStreamSerdeName(String systemName, String streamName) {
StreamConfig streamConfig = new StreamConfig(this);
Optional<String> option = streamConfig.getStreamMsgSerde(new SystemStream(systemName, streamName));
return option.isPresent() ? option.get() : null;
}
use of org.apache.samza.system.SystemStream in project samza by apache.
the class TestAzureBlobAvroWriter method createOME.
private OutgoingMessageEnvelope createOME(String streamName) {
SystemStream systemStream = new SystemStream(SYSTEM_NAME, streamName);
SpecificRecord record = new SpecificRecordEvent();
return new OutgoingMessageEnvelope(systemStream, record);
}
use of org.apache.samza.system.SystemStream in project samza by apache.
the class TestAzureBlobAvroWriter method testWriteByteArray.
@Test
public void testWriteByteArray() throws Exception {
OutgoingMessageEnvelope omeEncoded = new OutgoingMessageEnvelope(new SystemStream(SYSTEM_NAME, "Topic1"), "randomString".getBytes());
int numberOfMessages = 10;
azureBlobAvroWriter.write(ome);
for (int i = 0; i < numberOfMessages; ++i) {
azureBlobAvroWriter.write(omeEncoded);
}
verify(mockDataFileWriter).appendEncoded(ByteBuffer.wrap(encodedRecord));
verify(mockDataFileWriter, times(numberOfMessages)).appendEncoded(ByteBuffer.wrap((byte[]) omeEncoded.getMessage()));
// +1 to account for first ome which is not encoded
verify(mockAzureBlobOutputStream, times(numberOfMessages + 1)).incrementNumberOfRecordsInBlob();
}
use of org.apache.samza.system.SystemStream in project samza by apache.
the class TestEventHubSystemProducer method testSendingToSpecificPartitionsWithInterceptor.
@Test
public void testSendingToSpecificPartitionsWithInterceptor() throws Exception {
String systemName = "eventhubs";
String streamName = "testStream";
int numEvents = 10;
int partitionId0 = 0;
int partitionId1 = 1;
Interceptor interceptor = new SwapFirstLastByteInterceptor();
TestMetricsRegistry testMetrics = new TestMetricsRegistry();
Map<String, Interceptor> interceptors = new HashMap<>();
interceptors.put(streamName, interceptor);
List<String> outgoingMessagesP0 = generateMessages(numEvents);
List<String> outgoingMessagesP1 = generateMessages(numEvents);
// Set configs
Map<String, String> configMap = new HashMap<>();
configMap.put(String.format(EventHubConfig.CONFIG_STREAM_LIST, systemName), streamName);
configMap.put(String.format(EventHubConfig.CONFIG_STREAM_NAMESPACE, streamName), EVENTHUB_NAMESPACE);
configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_KEY_NAME, streamName), EVENTHUB_KEY_NAME);
configMap.put(String.format(EventHubConfig.CONFIG_STREAM_SAS_TOKEN, streamName), EVENTHUB_KEY);
configMap.put(String.format(EventHubConfig.CONFIG_STREAM_ENTITYPATH, streamName), EVENTHUB_ENTITY1);
configMap.put(String.format(EventHubConfig.CONFIG_PRODUCER_PARTITION_METHOD, systemName), PartitioningMethod.PARTITION_KEY_AS_PARTITION.toString());
MapConfig config = new MapConfig(configMap);
MockEventHubClientManagerFactory factory = new MockEventHubClientManagerFactory();
EventHubSystemProducer producer = new EventHubSystemProducer(new EventHubConfig(config), systemName, factory, interceptors, testMetrics);
SystemStream systemStream = new SystemStream(systemName, streamName);
producer.register(SOURCE);
producer.start();
outgoingMessagesP0.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId0, null, message.getBytes())));
outgoingMessagesP1.forEach(message -> producer.send(SOURCE, new OutgoingMessageEnvelope(systemStream, partitionId1, null, message.getBytes())));
// Retrieve sent data
List<String> receivedData0 = factory.getSentData(systemName, streamName, partitionId0).stream().map(eventData -> new String(eventData.getBytes())).collect(Collectors.toList());
List<String> receivedData1 = factory.getSentData(systemName, streamName, partitionId1).stream().map(eventData -> new String(eventData.getBytes())).collect(Collectors.toList());
List<String> expectedP0 = outgoingMessagesP0.stream().map(message -> new String(interceptor.intercept(message.getBytes()))).collect(Collectors.toList());
List<String> expectedP1 = outgoingMessagesP1.stream().map(message -> new String(interceptor.intercept(message.getBytes()))).collect(Collectors.toList());
Assert.assertTrue(expectedP0.equals(receivedData0));
Assert.assertTrue(expectedP1.equals(receivedData1));
}
Aggregations