use of org.smartdata.model.FileInfo in project SSM by Intel-bigdata.
the class TestMetaStore method testGetNonExistFile.
@Test
public void testGetNonExistFile() throws Exception {
FileInfo info = metaStore.getFile("/non_exist_file_path");
Assert.assertTrue(info == null);
}
use of org.smartdata.model.FileInfo in project SSM by Intel-bigdata.
the class TestMetaStore method testInsetFiles.
@Test
public void testInsetFiles() throws Exception {
String pathString = "/tmp/testFile";
long length = 123L;
boolean isDir = false;
int blockReplication = 1;
long blockSize = 128 * 1024L;
long modTime = 123123123L;
long accessTime = 123123120L;
String owner = "root";
String group = "admin";
long fileId = 312321L;
byte storagePolicy = 0;
byte erasureCodingPolicy = 0;
FileInfo[] files = { new FileInfo(pathString, fileId, length, isDir, (short) blockReplication, blockSize, modTime, accessTime, (short) 1, owner, group, storagePolicy, erasureCodingPolicy) };
metaStore.insertFiles(files);
FileInfo dbFileInfo = metaStore.getFile("/tmp/testFile");
Assert.assertTrue(dbFileInfo.equals(files[0]));
}
use of org.smartdata.model.FileInfo in project SSM by Intel-bigdata.
the class TestCachedListFetcher method testFetcher.
@Test
public void testFetcher() throws Exception {
String pathPrefix = "/fileTest/cache/";
String[] fids = { "5", "7", "9", "10" };
Path dir = new Path(pathPrefix);
dfs.mkdirs(dir);
dfs.setStoragePolicy(dir, "HOT");
List<FileInfo> fileInfos = new ArrayList<>();
for (int i = 0; i < fids.length; i++) {
CacheFileAction cacheAction = new CacheFileAction();
String path = pathPrefix + fids[i];
FSDataOutputStream out = dfs.create(new Path(path));
out.writeChars("testUncache");
out.close();
// Create cache pool
CacheScheduler.createCachePool(dfsClient);
fileInfos.add(createFileStatus(pathPrefix + fids[i]));
cacheAction.setContext(smartContext);
cacheAction.setDfsClient(dfsClient);
Map<String, String> args = new HashMap();
args.put(CacheFileAction.FILE_PATH, path);
cacheAction.init(args);
cacheAction.run();
// System.out.println(cacheAction.isCached(path));
}
metaStore.insertFiles(fileInfos.toArray(new FileInfo[fileInfos.size()]));
List<FileInfo> ret = metaStore.getFile();
Assert.assertTrue(ret.size() == fids.length);
cachedListFetcher.start();
Thread.sleep(1000);
List<CachedFileStatus> cachedFileStatuses = cachedListFetcher.getCachedList();
Assert.assertTrue(cachedFileStatuses.size() == fids.length);
int unCachedSize = 2;
for (int i = 0; i < unCachedSize; i++) {
UncacheFileAction uncacheFileAction = new UncacheFileAction();
String path = pathPrefix + fids[i];
fileInfos.add(createFileStatus("fileTest/cache/" + fids[i]));
uncacheFileAction.setContext(smartContext);
uncacheFileAction.setDfsClient(dfsClient);
Map<String, String> args = new HashMap();
args.put(UncacheFileAction.FILE_PATH, path);
uncacheFileAction.init(args);
uncacheFileAction.run();
}
// System.out.println(uncacheFileAction .isCached(path));
Thread.sleep(2000);
cachedFileStatuses = cachedListFetcher.getCachedList();
Assert.assertTrue(cachedFileStatuses.size() == fids.length - unCachedSize);
}
use of org.smartdata.model.FileInfo in project SSM by Intel-bigdata.
the class TestInotifyEventApplier method testApplier.
@Test
public void testApplier() throws Exception {
DFSClient client = Mockito.mock(DFSClient.class);
FileInfo root = HadoopUtil.convertFileStatus(getDummyDirStatus("/", 1000), "/");
metaStore.insertFile(root);
BackUpInfo backUpInfo = new BackUpInfo(1L, "/file", "remote/dest/", 10);
metaStore.insertBackUpInfo(backUpInfo);
InotifyEventApplier applier = new InotifyEventApplier(metaStore, client);
Event.CreateEvent createEvent = new Event.CreateEvent.Builder().iNodeType(Event.CreateEvent.INodeType.FILE).ctime(1).defaultBlockSize(1024).groupName("cg1").overwrite(true).ownerName("user1").path("/file").perms(new FsPermission("777")).replication(3).build();
HdfsFileStatus status1 = CompatibilityHelperLoader.getHelper().createHdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission("777"), "owner", "group", new byte[0], new byte[0], 1010, 0, null, (byte) 0);
Mockito.when(client.getFileInfo(Matchers.startsWith("/file"))).thenReturn(status1);
Mockito.when(client.getFileInfo(Matchers.startsWith("/dir"))).thenReturn(getDummyDirStatus("", 1010));
applier.apply(new Event[] { createEvent });
FileInfo result1 = metaStore.getFile().get(1);
Assert.assertEquals(result1.getPath(), "/file");
Assert.assertEquals(result1.getFileId(), 1010L);
Assert.assertEquals(result1.getPermission(), 511);
Event close = new Event.CloseEvent("/file", 1024, 0);
applier.apply(new Event[] { close });
FileInfo result2 = metaStore.getFile().get(1);
Assert.assertEquals(result2.getLength(), 1024);
Assert.assertEquals(result2.getModificationTime(), 0L);
// Event truncate = new Event.TruncateEvent("/file", 512, 16);
// applier.apply(new Event[] {truncate});
// ResultSet result3 = metaStore.executeQuery("SELECT * FROM files");
// Assert.assertEquals(result3.getLong("length"), 512);
// Assert.assertEquals(result3.getLong("modification_time"), 16L);
Event meta = new Event.MetadataUpdateEvent.Builder().path("/file").metadataType(Event.MetadataUpdateEvent.MetadataType.TIMES).mtime(2).atime(3).replication(4).ownerName("user2").groupName("cg2").build();
applier.apply(new Event[] { meta });
FileInfo result4 = metaStore.getFile().get(1);
Assert.assertEquals(result4.getAccessTime(), 3);
Assert.assertEquals(result4.getModificationTime(), 2);
Event meta1 = new Event.MetadataUpdateEvent.Builder().path("/file").metadataType(Event.MetadataUpdateEvent.MetadataType.OWNER).ownerName("user1").groupName("cg1").build();
applier.apply(new Event[] { meta1 });
result4 = metaStore.getFile().get(1);
Assert.assertEquals(result4.getOwner(), "user1");
Assert.assertEquals(result4.getGroup(), "cg1");
Event.CreateEvent createEvent2 = new Event.CreateEvent.Builder().iNodeType(Event.CreateEvent.INodeType.DIRECTORY).ctime(1).groupName("cg1").overwrite(true).ownerName("user1").path("/dir").perms(new FsPermission("777")).replication(3).build();
Event.CreateEvent createEvent3 = new Event.CreateEvent.Builder().iNodeType(Event.CreateEvent.INodeType.FILE).ctime(1).groupName("cg1").overwrite(true).ownerName("user1").path("/dir/file").perms(new FsPermission("777")).replication(3).build();
Event rename = new Event.RenameEvent.Builder().dstPath("/dir2").srcPath("/dir").timestamp(5).build();
applier.apply(new Event[] { createEvent2, createEvent3, rename });
List<FileInfo> result5 = metaStore.getFile();
List<String> expectedPaths = Arrays.asList("/dir2", "/dir2/file", "/file");
List<String> actualPaths = new ArrayList<>();
for (FileInfo s : result5) {
actualPaths.add(s.getPath());
}
Collections.sort(actualPaths);
Assert.assertTrue(actualPaths.size() == 4);
Assert.assertTrue(actualPaths.containsAll(expectedPaths));
Event unlink = new Event.UnlinkEvent.Builder().path("/").timestamp(6).build();
applier.apply(new Event[] { unlink });
Thread.sleep(1200);
Assert.assertEquals(metaStore.getFile().size(), 0);
System.out.println("Files in table " + metaStore.getFile().size());
List<FileDiff> fileDiffList = metaStore.getPendingDiff();
Assert.assertTrue(fileDiffList.size() == 4);
}
use of org.smartdata.model.FileInfo in project SSM by Intel-bigdata.
the class TestInotifyEventApplier method testApplierRenameEvent.
@Test
public void testApplierRenameEvent() throws Exception {
DFSClient client = Mockito.mock(DFSClient.class);
SmartConf conf = new SmartConf();
NamespaceFetcher namespaceFetcher = new NamespaceFetcher(client, metaStore, null, conf);
InotifyEventApplier applier = new InotifyEventApplier(metaStore, client, namespaceFetcher);
FileInfo[] fileInfos = new FileInfo[] { HadoopUtil.convertFileStatus(getDummyFileStatus("/dirfile", 7000), "/dirfile"), HadoopUtil.convertFileStatus(getDummyDirStatus("/dir", 8000), "/dir"), HadoopUtil.convertFileStatus(getDummyFileStatus("/dir/file1", 8001), "/dir/file1"), HadoopUtil.convertFileStatus(getDummyFileStatus("/dir/file2", 8002), "/dir/file2"), HadoopUtil.convertFileStatus(getDummyDirStatus("/dir2", 8100), "/dir2"), HadoopUtil.convertFileStatus(getDummyFileStatus("/dir2/file1", 8101), "/dir2/file1"), HadoopUtil.convertFileStatus(getDummyFileStatus("/dir2/file2", 8102), "/dir2/file2"), HadoopUtil.convertFileStatus(getDummyDirStatus("/dir/dir", 8200), "/dir/dir"), HadoopUtil.convertFileStatus(getDummyFileStatus("/dir/dir/f1", 8201), "/dir/dir/f1"), HadoopUtil.convertFileStatus(getDummyFileStatus("/file", 2000), "/file") };
metaStore.insertFiles(fileInfos);
Mockito.when(client.getFileInfo("/dir1")).thenReturn(getDummyDirStatus("/dir1", 8000));
Event.RenameEvent dirRenameEvent = new Event.RenameEvent.Builder().srcPath("/dir").dstPath("/dir1").build();
applier.apply(new Event[] { dirRenameEvent });
Assert.assertTrue(metaStore.getFile("/dir") == null);
Assert.assertTrue(metaStore.getFile("/dir/file1") == null);
Assert.assertTrue(metaStore.getFile("/dirfile") != null);
Assert.assertTrue(metaStore.getFile("/dir1") != null);
Assert.assertTrue(metaStore.getFile("/dir1/file1") != null);
Assert.assertTrue(metaStore.getFile("/dir1/dir/f1") != null);
Assert.assertTrue(metaStore.getFile("/dir2") != null);
Assert.assertTrue(metaStore.getFile("/dir2/file1") != null);
Assert.assertTrue(metaStore.getFile("/file") != null);
List<Event> events = new ArrayList<>();
Event.RenameEvent renameEvent = new Event.RenameEvent.Builder().srcPath("/file1").dstPath("/file2").build();
events.add(renameEvent);
applier.apply(events);
Assert.assertTrue(metaStore.getFile("/file2") == null);
/*
Mockito.when(client.getFileInfo("/file2")).thenReturn(getDummyFileStatus("/file2", 2000));
applier.apply(events);
FileInfo info = metaStore.getFile("/file2");
Assert.assertTrue(info != null && info.getFileId() == 2000);
*/
events.clear();
renameEvent = new Event.RenameEvent.Builder().srcPath("/file").dstPath("/file1").build();
events.add(renameEvent);
applier.apply(events);
FileInfo info2 = metaStore.getFile("/file");
Assert.assertTrue(info2 == null);
FileInfo info3 = metaStore.getFile("/file1");
Assert.assertTrue(info3 != null);
renameEvent = new Event.RenameEvent.Builder().srcPath("/file1").dstPath("/file2").build();
events.clear();
events.add(renameEvent);
applier.apply(events);
FileInfo info4 = metaStore.getFile("/file1");
FileInfo info5 = metaStore.getFile("/file2");
Assert.assertTrue(info4 == null && info5 != null);
}
Aggregations