use of org.apache.samza.job.model.ContainerModel in project samza by apache.
the class TestZkJobCoordinator method testStartWorkWithLastActiveJobModel.
@Test
public void testStartWorkWithLastActiveJobModel() {
final TaskName taskName = new TaskName("task1");
final ContainerModel mockContainerModel = mock(ContainerModel.class);
final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class);
final JobModel mockJobModel = mock(JobModel.class);
ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore));
when(mockContainerModel.getTasks()).thenReturn(ImmutableMap.of(taskName, mock(TaskModel.class)));
when(mockJobModel.getContainers()).thenReturn(ImmutableMap.of(PROCESSOR_ID, mockContainerModel));
when(zkUtils.getLastActiveJobModelVersion()).thenReturn(TEST_JOB_MODEL_VERSION);
when(zkUtils.getJobModelVersion()).thenReturn(TEST_JOB_MODEL_VERSION);
doReturn(mockJobModel).when(zkJobCoordinator).readJobModelFromMetadataStore(TEST_JOB_MODEL_VERSION);
zkJobCoordinator.setListener(mockListener);
zkJobCoordinator.startWorkWithLastActiveJobModel();
verify(mockListener, times(1)).onJobModelExpired();
verify(zkUtils, times(1)).writeTaskLocality(eq(taskName), any());
verify(mockListener, times(1)).onNewJobModel(PROCESSOR_ID, mockJobModel);
assertEquals("Active job model should be updated with the new job model", mockJobModel, zkJobCoordinator.getActiveJobModel());
}
use of org.apache.samza.job.model.ContainerModel in project samza by apache.
the class TestEmbeddedTaggedRateLimiter method initRateLimiter.
static void initRateLimiter(RateLimiter rateLimiter) {
Map<TaskName, TaskModel> tasks = IntStream.range(0, NUMBER_OF_TASKS).mapToObj(i -> new TaskName("task-" + i)).collect(Collectors.toMap(Function.identity(), x -> mock(TaskModel.class)));
ContainerModel containerModel = mock(ContainerModel.class);
when(containerModel.getTasks()).thenReturn(tasks);
JobModel jobModel = mock(JobModel.class);
Map<String, ContainerModel> containerModelMap = new HashMap<>();
containerModelMap.put("container-1", containerModel);
when(jobModel.getContainers()).thenReturn(containerModelMap);
Context context = mock(Context.class);
TaskContextImpl taskContext = mock(TaskContextImpl.class);
when(context.getTaskContext()).thenReturn(taskContext);
when(taskContext.getJobModel()).thenReturn(jobModel);
when(context.getTaskContext().getTaskModel()).thenReturn(mock(TaskModel.class));
rateLimiter.init(context);
}
use of org.apache.samza.job.model.ContainerModel in project samza by apache.
the class TestOperatorImplGraph method testPartitionByChain.
@Test
public void testPartitionByChain() {
String inputStreamId = "input";
String inputSystem = "input-system";
String inputPhysicalName = "input-stream";
String outputStreamId = "output";
String outputSystem = "output-system";
String outputPhysicalName = "output-stream";
String intermediateStreamId = "jobName-jobId-partition_by-p1";
String intermediateSystem = "intermediate-system";
HashMap<String, String> configs = new HashMap<>();
configs.put(JobConfig.JOB_NAME, "jobName");
configs.put(JobConfig.JOB_ID, "jobId");
configs.put(JobConfig.JOB_DEFAULT_SYSTEM, intermediateSystem);
StreamTestUtils.addStreamConfigs(configs, inputStreamId, inputSystem, inputPhysicalName);
StreamTestUtils.addStreamConfigs(configs, outputStreamId, outputSystem, outputPhysicalName);
Config config = new MapConfig(configs);
when(this.context.getJobContext().getConfig()).thenReturn(config);
StreamApplicationDescriptorImpl graphSpec = new StreamApplicationDescriptorImpl(appDesc -> {
GenericSystemDescriptor isd = new GenericSystemDescriptor(inputSystem, "mockFactoryClass");
GenericSystemDescriptor osd = new GenericSystemDescriptor(outputSystem, "mockFactoryClass");
GenericInputDescriptor inputDescriptor = isd.getInputDescriptor(inputStreamId, mock(Serde.class));
GenericOutputDescriptor outputDescriptor = osd.getOutputDescriptor(outputStreamId, KVSerde.of(mock(IntegerSerde.class), mock(StringSerde.class)));
MessageStream<Object> inputStream = appDesc.getInputStream(inputDescriptor);
OutputStream<KV<Integer, String>> outputStream = appDesc.getOutputStream(outputDescriptor);
inputStream.partitionBy(Object::hashCode, Object::toString, KVSerde.of(mock(IntegerSerde.class), mock(StringSerde.class)), "p1").sendTo(outputStream);
}, config);
JobModel jobModel = mock(JobModel.class);
ContainerModel containerModel = mock(ContainerModel.class);
TaskModel taskModel = mock(TaskModel.class);
when(jobModel.getContainers()).thenReturn(Collections.singletonMap("0", containerModel));
when(containerModel.getTasks()).thenReturn(Collections.singletonMap(new TaskName("task 0"), taskModel));
when(taskModel.getSystemStreamPartitions()).thenReturn(Collections.emptySet());
when(((TaskContextImpl) this.context.getTaskContext()).getJobModel()).thenReturn(jobModel);
OperatorImplGraph opImplGraph = new OperatorImplGraph(graphSpec.getOperatorSpecGraph(), this.context, mock(Clock.class));
InputOperatorImpl inputOpImpl = opImplGraph.getInputOperator(new SystemStream(inputSystem, inputPhysicalName));
assertEquals(1, inputOpImpl.registeredOperators.size());
OperatorImpl partitionByOpImpl = (PartitionByOperatorImpl) inputOpImpl.registeredOperators.iterator().next();
// is terminal but paired with an input operator
assertEquals(0, partitionByOpImpl.registeredOperators.size());
assertEquals(OpCode.PARTITION_BY, partitionByOpImpl.getOperatorSpec().getOpCode());
InputOperatorImpl repartitionedInputOpImpl = opImplGraph.getInputOperator(new SystemStream(intermediateSystem, intermediateStreamId));
assertEquals(1, repartitionedInputOpImpl.registeredOperators.size());
OperatorImpl sendToOpImpl = (OutputOperatorImpl) repartitionedInputOpImpl.registeredOperators.iterator().next();
assertEquals(0, sendToOpImpl.registeredOperators.size());
assertEquals(OpCode.SEND_TO, sendToOpImpl.getOperatorSpec().getOpCode());
}
use of org.apache.samza.job.model.ContainerModel in project samza by apache.
the class TestOperatorImplGraph method testGetStreamToConsumerTasks.
@Test
public void testGetStreamToConsumerTasks() {
String system = "test-system";
String streamId0 = "test-stream-0";
String streamId1 = "test-stream-1";
HashMap<String, String> configs = new HashMap<>();
configs.put(JobConfig.JOB_NAME, "test-app");
configs.put(JobConfig.JOB_DEFAULT_SYSTEM, "test-system");
StreamTestUtils.addStreamConfigs(configs, streamId0, system, streamId0);
StreamTestUtils.addStreamConfigs(configs, streamId1, system, streamId1);
Config config = new MapConfig(configs);
when(this.context.getJobContext().getConfig()).thenReturn(config);
SystemStreamPartition ssp0 = new SystemStreamPartition(system, streamId0, new Partition(0));
SystemStreamPartition ssp1 = new SystemStreamPartition(system, streamId0, new Partition(1));
SystemStreamPartition ssp2 = new SystemStreamPartition(system, streamId1, new Partition(0));
TaskName task0 = new TaskName("Task 0");
TaskName task1 = new TaskName("Task 1");
Set<SystemStreamPartition> ssps = new HashSet<>();
ssps.add(ssp0);
ssps.add(ssp2);
TaskModel tm0 = new TaskModel(task0, ssps, new Partition(0));
ContainerModel cm0 = new ContainerModel("c0", Collections.singletonMap(task0, tm0));
TaskModel tm1 = new TaskModel(task1, Collections.singleton(ssp1), new Partition(1));
ContainerModel cm1 = new ContainerModel("c1", Collections.singletonMap(task1, tm1));
Map<String, ContainerModel> cms = new HashMap<>();
cms.put(cm0.getId(), cm0);
cms.put(cm1.getId(), cm1);
JobModel jobModel = new JobModel(config, cms);
Multimap<SystemStream, String> streamToTasks = OperatorImplGraph.getStreamToConsumerTasks(jobModel);
assertEquals(streamToTasks.get(ssp0.getSystemStream()).size(), 2);
assertEquals(streamToTasks.get(ssp2.getSystemStream()).size(), 1);
}
use of org.apache.samza.job.model.ContainerModel in project samza by apache.
the class ContainerStorageManager method getChangelogSystemStreams.
/**
* For each standby task, we remove its changeLogSSPs from changelogSSP map and add it to the task's taskSideInputSSPs.
* The task's sideInputManager will consume and restore these as well.
*
* @param containerModel the container's model
* @param changelogSystemStreams the passed in set of changelogSystemStreams
* @return A map of changeLogSSP to storeName across all tasks, assuming no two stores have the same changelogSSP
*/
private Map<String, SystemStream> getChangelogSystemStreams(ContainerModel containerModel, Map<String, SystemStream> changelogSystemStreams) {
if (MapUtils.invertMap(changelogSystemStreams).size() != changelogSystemStreams.size()) {
throw new SamzaException("Two stores cannot have the same changelog system-stream");
}
Map<SystemStreamPartition, String> changelogSSPToStore = new HashMap<>();
changelogSystemStreams.forEach((storeName, systemStream) -> containerModel.getTasks().forEach((taskName, taskModel) -> changelogSSPToStore.put(new SystemStreamPartition(systemStream, taskModel.getChangelogPartition()), storeName)));
getTasks(containerModel, TaskMode.Standby).forEach((taskName, taskModel) -> {
taskSideInputStoreSSPs.putIfAbsent(taskName, new HashMap<>());
changelogSystemStreams.forEach((storeName, systemStream) -> {
SystemStreamPartition ssp = new SystemStreamPartition(systemStream, taskModel.getChangelogPartition());
changelogSSPToStore.remove(ssp);
taskSideInputStoreSSPs.get(taskName).put(storeName, Collections.singleton(ssp));
});
});
// changelogSystemStreams correspond only to active tasks (since those of standby-tasks moved to sideInputs above)
return MapUtils.invertMap(changelogSSPToStore).entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, x -> x.getValue().getSystemStream()));
}
Aggregations