use of org.apache.samza.checkpoint.Checkpoint in project samza by apache.
the class TestTaskStorageCommitManager method testPersistToFileSystemCheckpointV2Only.
@Test
public void testPersistToFileSystemCheckpointV2Only() throws IOException {
ContainerStorageManager containerStorageManager = mock(ContainerStorageManager.class);
StorageEngine mockLPStore = mock(StorageEngine.class);
StoreProperties lpStoreProps = mock(StoreProperties.class);
when(mockLPStore.getStoreProperties()).thenReturn(lpStoreProps);
when(lpStoreProps.isPersistedToDisk()).thenReturn(true);
when(lpStoreProps.isDurableStore()).thenReturn(true);
Path mockPath = mock(Path.class);
when(mockLPStore.checkpoint(any())).thenReturn(Optional.of(mockPath));
StorageEngine mockPStore = mock(StorageEngine.class);
StoreProperties pStoreProps = mock(StoreProperties.class);
when(mockPStore.getStoreProperties()).thenReturn(pStoreProps);
when(pStoreProps.isPersistedToDisk()).thenReturn(true);
when(pStoreProps.isDurableStore()).thenReturn(false);
StorageEngine mockLIStore = mock(StorageEngine.class);
StoreProperties liStoreProps = mock(StoreProperties.class);
when(mockLIStore.getStoreProperties()).thenReturn(liStoreProps);
when(liStoreProps.isPersistedToDisk()).thenReturn(false);
when(liStoreProps.isDurableStore()).thenReturn(true);
StorageEngine mockIStore = mock(StorageEngine.class);
StoreProperties iStoreProps = mock(StoreProperties.class);
when(mockIStore.getStoreProperties()).thenReturn(iStoreProps);
when(iStoreProps.isPersistedToDisk()).thenReturn(false);
when(iStoreProps.isDurableStore()).thenReturn(false);
java.util.Map<String, StorageEngine> taskStores = ImmutableMap.of("loggedPersistentStore", mockLPStore, "persistentStore", mockPStore, "loggedInMemStore", mockLIStore, "inMemStore", mockIStore);
Partition changelogPartition = new Partition(0);
SystemStream changelogSystemStream = new SystemStream("changelogSystem", "changelogStream");
SystemStreamPartition changelogSSP = new SystemStreamPartition(changelogSystemStream, changelogPartition);
java.util.Map<String, SystemStream> storeChangelogsStreams = ImmutableMap.of("loggedPersistentStore", changelogSystemStream, "loggedInMemStore", new SystemStream("system", "stream"));
StorageManagerUtil storageManagerUtil = mock(StorageManagerUtil.class);
File durableStoreDir = new File("durableStorePath");
when(storageManagerUtil.getTaskStoreDir(eq(durableStoreDir), eq("loggedPersistentStore"), any(), any())).thenReturn(durableStoreDir);
TaskName taskName = new TaskName("task");
TaskInstanceMetrics metrics = mock(TaskInstanceMetrics.class);
Timer checkpointTimer = mock(Timer.class);
when(metrics.storeCheckpointNs()).thenReturn(checkpointTimer);
when(containerStorageManager.getAllStores(taskName)).thenReturn(taskStores);
TaskStorageCommitManager commitManager = spy(new TaskStorageCommitManager(taskName, Collections.emptyMap(), containerStorageManager, storeChangelogsStreams, changelogPartition, null, null, ForkJoinPool.commonPool(), storageManagerUtil, durableStoreDir, metrics));
doNothing().when(commitManager).writeChangelogOffsetFile(any(), any(), any(), any());
when(storageManagerUtil.getStoreCheckpointDir(any(File.class), any(CheckpointId.class))).thenAnswer((Answer<String>) invocation -> {
File file = invocation.getArgumentAt(0, File.class);
CheckpointId checkpointId = invocation.getArgumentAt(1, CheckpointId.class);
return file + "-" + checkpointId;
});
CheckpointId newCheckpointId = CheckpointId.create();
java.util.Map<String, String> storeSCM = ImmutableMap.of("loggedPersistentStore", "system;loggedPersistentStoreStream;1", "persistentStore", "system;persistentStoreStream;1", "loggedInMemStore", "system;loggedInMemStoreStream;1", "inMemStore", "system;inMemStoreStream;1");
CheckpointV2 checkpoint = new CheckpointV2(newCheckpointId, Collections.emptyMap(), Collections.singletonMap("factory", storeSCM));
commitManager.init();
// invoke persist to file system
commitManager.writeCheckpointToStoreDirectories(checkpoint);
// Validate only durable and persisted stores are persisted
verify(storageManagerUtil).getTaskStoreDir(eq(durableStoreDir), eq("loggedPersistentStore"), eq(taskName), any());
File checkpointPath = Paths.get(storageManagerUtil.getStoreCheckpointDir(durableStoreDir, newCheckpointId)).toFile();
verify(storageManagerUtil).writeCheckpointV2File(eq(checkpointPath), eq(checkpoint));
}
use of org.apache.samza.checkpoint.Checkpoint in project samza by apache.
the class TestBlobStoreBackupManager method testUploadWithPreviousCheckpoints.
@Test
public void testUploadWithPreviousCheckpoints() throws IOException {
// Track directory for post cleanup
List<String> checkpointDirsToClean = new ArrayList<>();
// Setup: init back manager with previous checkpoints
// indexBlobIdAndLocalRemoteSnapshotsPair = setupRemoteAndLocalSnapshots(true);
Map<String, String> previousCheckpoints = // map store name, previous snapshot index blob id
indexBlobIdAndLocalRemoteSnapshotsPair.entrySet().stream().collect(Collectors.toMap(e -> e.getValue().getLeft(), e -> e.getValue().getRight().getPrevSnapshotIndexBlobId().get()));
Checkpoint checkpoint = new CheckpointV2(checkpointId, new HashMap<>(), ImmutableMap.of(BlobStoreStateBackendFactory.class.getName(), previousCheckpoints));
when(blobStoreUtil.getStoreSnapshotIndexes(anyString(), anyString(), anyString(), any(Checkpoint.class), anySetOf(String.class))).thenCallRealMethod();
blobStoreBackupManager.init(checkpoint);
// mock: set task store dir to return corresponding test local store and create checkpoint dir
ArgumentCaptor<String> stringCaptor = ArgumentCaptor.forClass(String.class);
when(storageManagerUtil.getTaskStoreDir(any(File.class), stringCaptor.capture(), any(TaskName.class), any(TaskMode.class))).then((Answer<File>) invocation -> {
String storeName = invocation.getArgumentAt(1, String.class);
String snapshotIndexBlobId = testStoreNameAndSCMMap.get(storeName);
String storeDir = indexBlobIdAndLocalRemoteSnapshotsPair.get(snapshotIndexBlobId).getLeft();
try {
BlobStoreTestUtil.createTestCheckpointDirectory(storeDir, checkpointId.serialize());
checkpointDirsToClean.add(storeDir + "-" + checkpointId.serialize());
} catch (IOException e) {
Assert.fail("Couldn't create checkpoint directory. Test failed.");
}
return new File(storeDir);
});
ArgumentCaptor<File> storeDirCaptor = ArgumentCaptor.forClass(File.class);
when(storageManagerUtil.getStoreCheckpointDir(storeDirCaptor.capture(), eq(checkpointId))).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
File storeDir = invocation.getArgumentAt(0, File.class);
return storeDir.getAbsolutePath() + "-" + checkpointId.serialize();
}
});
// mock: mock putDir and capture DirDiff
SortedSet<DirDiff> actualDirDiffs = new TreeSet<>(Comparator.comparing(DirDiff::getDirName));
ArgumentCaptor<DirDiff> dirDiffCaptor = ArgumentCaptor.forClass(DirDiff.class);
ArgumentCaptor<SnapshotMetadata> snapshotMetadataCaptor = ArgumentCaptor.forClass(SnapshotMetadata.class);
when(blobStoreUtil.putDir(dirDiffCaptor.capture(), snapshotMetadataCaptor.capture())).then((Answer<CompletableFuture<DirIndex>>) invocation -> {
DirDiff dirDiff = invocation.getArgumentAt(0, DirDiff.class);
SnapshotMetadata snapshotMetadata = invocation.getArgumentAt(1, SnapshotMetadata.class);
actualDirDiffs.add(dirDiff);
SnapshotIndex snapshotIndex = testBlobStore.get(testStoreNameAndSCMMap.get(snapshotMetadata.getStoreName()));
return CompletableFuture.completedFuture(snapshotIndex.getDirIndex());
});
// mock: mock putSnapshotIndex and capture previous snapshot index
SortedSet<SnapshotIndex> expectedSnapshotIndexesUploaded = indexBlobIdAndLocalRemoteSnapshotsPair.values().stream().map(Pair::getRight).collect(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(SnapshotIndex::getCreationTimeMillis))));
SortedSet<SnapshotIndex> actualSnapshotIndexesUploaded = new TreeSet<>(Comparator.comparing(SnapshotIndex::getCreationTimeMillis));
SortedSet<String> actualPreviousSnapshotIndexBlobIds = new TreeSet<>();
SortedSet<String> expectedPreviousSnapshotIndexBlobIds = new TreeSet<>(previousCheckpoints.values());
ArgumentCaptor<SnapshotIndex> snapshotIndexCaptor = ArgumentCaptor.forClass(SnapshotIndex.class);
when(blobStoreUtil.putSnapshotIndex(snapshotIndexCaptor.capture())).then((Answer<CompletableFuture<String>>) invocation -> {
SnapshotIndex snapshotIndex = invocation.getArgumentAt(0, SnapshotIndex.class);
actualSnapshotIndexesUploaded.add(snapshotIndex);
if (snapshotIndex.getPrevSnapshotIndexBlobId().isPresent()) {
actualPreviousSnapshotIndexBlobIds.add(snapshotIndex.getPrevSnapshotIndexBlobId().get());
}
return CompletableFuture.completedFuture("random-blob-id");
});
// execute
blobStoreBackupManager.upload(checkpointId, ImmutableMap.of());
TreeSet<DirDiff> expectedDirDiffs = indexBlobIdAndLocalRemoteSnapshotsPair.values().stream().map(localRemoteSnapshotPair -> DirDiffUtil.getDirDiff(new File(localRemoteSnapshotPair.getLeft() + "-" + checkpointId.serialize()), localRemoteSnapshotPair.getRight().getDirIndex(), DirDiffUtil.areSameFile(false))).collect(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(DirDiff::getDirName))));
// assert - asset all DirDiff are put to blob store
Assert.assertEquals(actualDirDiffs, expectedDirDiffs);
// assert - assert no previous snapshot indexes were found
Assert.assertEquals(actualPreviousSnapshotIndexBlobIds, expectedPreviousSnapshotIndexBlobIds);
// assert - assert all snapshot indexes are uploaded
Assert.assertEquals(actualSnapshotIndexesUploaded, expectedSnapshotIndexesUploaded);
// cleanup
checkpointDirsToClean.forEach(path -> {
try {
FileUtils.deleteDirectory(new File(path));
} catch (IOException exception) {
Assert.fail("Failed to cleanup temporary checkpoint dirs.");
}
});
}
use of org.apache.samza.checkpoint.Checkpoint in project samza by apache.
the class TestBlobStoreBackupManager method testInitWithInvalidCheckpoint.
@Test
public void testInitWithInvalidCheckpoint() {
// init called with null checkpoint storeStorageEngineMap
blobStoreBackupManager.init(null);
// verify delete snapshot index blob called from init 0 times because prevSnapshotMap returned from init is empty
// in case of null checkpoint.
verify(blobStoreUtil, times(0)).deleteSnapshotIndexBlob(anyString(), any(Metadata.class));
when(blobStoreUtil.getStoreSnapshotIndexes(anyString(), anyString(), anyString(), any(Checkpoint.class), anySetOf(String.class))).thenCallRealMethod();
// init called with Checkpoint V1 -> unsupported
Checkpoint checkpoint = new CheckpointV1(new HashMap<>());
try {
blobStoreBackupManager.init(checkpoint);
} catch (SamzaException exception) {
Assert.fail("Checkpoint V1 is expected to only log warning.");
}
}
use of org.apache.samza.checkpoint.Checkpoint in project samza by apache.
the class TestBlobStoreBackupManager method testUploadWithNoPreviousCheckpoints.
@Test
public void testUploadWithNoPreviousCheckpoints() throws IOException {
// Track directory for post cleanup
List<String> checkpointDirsToClean = new ArrayList<>();
// Setup: init local/remote snapshots and back manager with no previous checkpoints
indexBlobIdAndLocalRemoteSnapshotsPair = setupRemoteAndLocalSnapshots(false);
Checkpoint checkpoint = new CheckpointV2(checkpointId, new HashMap<>(), ImmutableMap.of(BlobStoreStateBackendFactory.class.getName(), new HashMap<>()));
blobStoreBackupManager.init(checkpoint);
// mock: set task store dir to return corresponding test local store and create checkpoint dir
ArgumentCaptor<String> stringCaptor = ArgumentCaptor.forClass(String.class);
when(storageManagerUtil.getTaskStoreDir(any(File.class), stringCaptor.capture(), any(TaskName.class), any(TaskMode.class))).then((Answer<File>) invocation -> {
String storeName = invocation.getArgumentAt(1, String.class);
String snapshotIndexBlobId = testStoreNameAndSCMMap.get(storeName);
String storeDir = indexBlobIdAndLocalRemoteSnapshotsPair.get(snapshotIndexBlobId).getLeft();
try {
BlobStoreTestUtil.createTestCheckpointDirectory(storeDir, checkpointId.serialize());
checkpointDirsToClean.add(storeDir + "-" + checkpointId.serialize());
} catch (IOException e) {
Assert.fail("Couldn't create checkpoint directory. Test failed.");
}
return new File(storeDir);
});
ArgumentCaptor<File> storeDirCaptor = ArgumentCaptor.forClass(File.class);
when(storageManagerUtil.getStoreCheckpointDir(storeDirCaptor.capture(), eq(checkpointId))).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
File storeDir = invocation.getArgumentAt(0, File.class);
return storeDir.getAbsolutePath() + "-" + checkpointId.serialize();
}
});
SortedSet<DirDiff> actualDirDiffs = new TreeSet<>(Comparator.comparing(DirDiff::getDirName));
// mock: mock putDir and capture DirDiff
ArgumentCaptor<DirDiff> dirDiffCaptor = ArgumentCaptor.forClass(DirDiff.class);
ArgumentCaptor<SnapshotMetadata> snapshotMetadataCaptor = ArgumentCaptor.forClass(SnapshotMetadata.class);
when(blobStoreUtil.putDir(dirDiffCaptor.capture(), snapshotMetadataCaptor.capture())).then((Answer<CompletableFuture<DirIndex>>) invocation -> {
DirDiff dirDiff = invocation.getArgumentAt(0, DirDiff.class);
SnapshotMetadata snapshotMetadata = invocation.getArgumentAt(1, SnapshotMetadata.class);
actualDirDiffs.add(dirDiff);
SnapshotIndex snapshotIndex = testBlobStore.get(testStoreNameAndSCMMap.get(snapshotMetadata.getStoreName()));
return CompletableFuture.completedFuture(snapshotIndex.getDirIndex());
});
SortedSet<SnapshotIndex> expectedSnapshotIndexesUploaded = indexBlobIdAndLocalRemoteSnapshotsPair.values().stream().map(Pair::getRight).collect(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(SnapshotIndex::getCreationTimeMillis))));
String expectedPreviousSnapshotIndexBlobId = "empty";
// mock: mock putSnapshotIndex and capture previous snapshot index
SortedSet<SnapshotIndex> actualSnapshotIndexesUploaded = new TreeSet<>(Comparator.comparing(SnapshotIndex::getCreationTimeMillis));
final String[] actualPreviousSnapshotIndexBlobId = { "empty" };
ArgumentCaptor<SnapshotIndex> snapshotIndexCaptor = ArgumentCaptor.forClass(SnapshotIndex.class);
when(blobStoreUtil.putSnapshotIndex(snapshotIndexCaptor.capture())).then((Answer<CompletableFuture<String>>) invocation -> {
SnapshotIndex snapshotIndex = invocation.getArgumentAt(0, SnapshotIndex.class);
actualSnapshotIndexesUploaded.add(snapshotIndex);
if (!snapshotIndex.getPrevSnapshotIndexBlobId().equals(Optional.empty())) {
actualPreviousSnapshotIndexBlobId[0] = "not-empty";
}
return CompletableFuture.completedFuture("random-blob-id");
});
// execute
blobStoreBackupManager.upload(checkpointId, testStoreNameAndSCMMap);
// setup expected dir diffs after execute: needs checkpoint dirs created in upload()
TreeSet<DirDiff> expectedDirDiffs = indexBlobIdAndLocalRemoteSnapshotsPair.values().stream().map(localRemoteSnapshotPair -> {
File localCheckpointDir = new File(localRemoteSnapshotPair.getLeft() + "-" + checkpointId.serialize());
DirIndex dirIndex = new DirIndex(localCheckpointDir.getName(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
return DirDiffUtil.getDirDiff(localCheckpointDir, dirIndex, DirDiffUtil.areSameFile(false));
}).collect(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(DirDiff::getDirName))));
// assert - asset all DirDiff are put to blob store
Assert.assertEquals(actualDirDiffs, expectedDirDiffs);
// assert - assert no previous snapshot indexes were found
Assert.assertEquals(actualPreviousSnapshotIndexBlobId[0], expectedPreviousSnapshotIndexBlobId);
// assert - assert all snapshot indexes are uploaded
Assert.assertEquals(actualSnapshotIndexesUploaded, expectedSnapshotIndexesUploaded);
// cleanup
checkpointDirsToClean.forEach(path -> {
try {
FileUtils.deleteDirectory(new File(path));
} catch (IOException exception) {
Assert.fail("Failed to cleanup temporary checkpoint dirs.");
}
});
}
use of org.apache.samza.checkpoint.Checkpoint in project samza by apache.
the class TestTaskStorageCommitManager method testSnapshotAndCommitAllFactories.
@Test
public void testSnapshotAndCommitAllFactories() {
CheckpointManager checkpointManager = mock(CheckpointManager.class);
TaskBackupManager taskBackupManager1 = mock(TaskBackupManager.class);
TaskBackupManager taskBackupManager2 = mock(TaskBackupManager.class);
ContainerStorageManager containerStorageManager = mock(ContainerStorageManager.class);
Checkpoint checkpoint = mock(Checkpoint.class);
TaskInstanceMetrics metrics = mock(TaskInstanceMetrics.class);
Timer checkpointTimer = mock(Timer.class);
when(metrics.storeCheckpointNs()).thenReturn(checkpointTimer);
TaskName taskName = new TaskName("task1");
Map<String, TaskBackupManager> backupManagers = ImmutableMap.of("factory1", taskBackupManager1, "factory2", taskBackupManager2);
TaskStorageCommitManager cm = new TaskStorageCommitManager(taskName, backupManagers, containerStorageManager, Collections.emptyMap(), new Partition(1), checkpointManager, new MapConfig(), ForkJoinPool.commonPool(), new StorageManagerUtil(), null, metrics);
when(checkpointManager.readLastCheckpoint(taskName)).thenReturn(checkpoint);
cm.init();
verify(taskBackupManager1).init(eq(checkpoint));
verify(taskBackupManager2).init(eq(checkpoint));
CheckpointId newCheckpointId = CheckpointId.create();
Map<String, String> factory1Checkpoints = ImmutableMap.of("store1", "system;stream;1", "store2", "system;stream;2");
Map<String, String> factory2Checkpoints = ImmutableMap.of("store1", "blobId1", "store2", "blobId2");
when(containerStorageManager.getAllStores(taskName)).thenReturn(Collections.emptyMap());
when(taskBackupManager1.snapshot(newCheckpointId)).thenReturn(factory1Checkpoints);
when(taskBackupManager2.snapshot(newCheckpointId)).thenReturn(factory2Checkpoints);
when(taskBackupManager1.upload(newCheckpointId, factory1Checkpoints)).thenReturn(CompletableFuture.completedFuture(factory1Checkpoints));
when(taskBackupManager2.upload(newCheckpointId, factory2Checkpoints)).thenReturn(CompletableFuture.completedFuture(factory2Checkpoints));
Map<String, Map<String, String>> snapshotSCMs = cm.snapshot(newCheckpointId);
cm.upload(newCheckpointId, snapshotSCMs);
// Test flow for snapshot
verify(taskBackupManager1).snapshot(newCheckpointId);
verify(taskBackupManager2).snapshot(newCheckpointId);
// Test flow for upload
verify(taskBackupManager1).upload(newCheckpointId, factory1Checkpoints);
verify(taskBackupManager2).upload(newCheckpointId, factory2Checkpoints);
verify(checkpointTimer).update(anyLong());
}
Aggregations