use of org.smartdata.model.CompactFileState in project SSM by Intel-bigdata.
the class SmallFileScheduler method syncMetaStore.
/**
* Sync compact file states with meta store.
*/
private void syncMetaStore() {
List<CompactFileState> compactFileStates = new ArrayList<>();
// Get compact file states from compactFileStateQueue
for (int i = 0; i < META_STORE_INSERT_BATCH_SIZE; i++) {
CompactFileState compactFileState = compactFileStateQueue.poll();
if (compactFileState != null) {
try {
FileInfo info = metaStore.getFile(compactFileState.getPath());
if (info != null && info.getLength() == 0) {
LOG.debug(String.format("Ready to insert the file state of %s.", compactFileState.getPath()));
compactFileStates.add(compactFileState);
} else {
LOG.debug(String.format("Waiting for the small file %s synced in the meta store.", compactFileState.getPath()));
compactFileStateQueue.offer(compactFileState);
}
} catch (MetaStoreException e) {
LOG.error("Failed to get file info.", e);
compactFileStateQueue.offer(compactFileState);
}
} else {
break;
}
}
// Batch insert compact file states into meta store
try {
if (!compactFileStates.isEmpty()) {
metaStore.insertCompactFileStates(compactFileStates.toArray(new CompactFileState[0]));
for (CompactFileState fileState : compactFileStates) {
handlingSmallFileCache.remove(fileState.getPath());
}
}
} catch (MetaStoreException e) {
for (CompactFileState fileState : compactFileStates) {
handlingSmallFileCache.remove(fileState.getPath());
}
LOG.error("Failed to update file state of meta store.", e);
}
}
use of org.smartdata.model.CompactFileState in project SSM by Intel-bigdata.
the class SmallFileScheduler method isSuccessfulBySpeculation.
/**
* Speculate action status and set result accordingly.
*/
@Override
public boolean isSuccessfulBySpeculation(ActionInfo actionInfo) {
try {
boolean isSuccessful = true;
List<FileState> fileStateList = new ArrayList<>();
// If any one small file is not compacted, return false.
for (String path : getSmallFileList(actionInfo)) {
FileState fileState = HadoopUtil.getFileState(dfsClient, path);
FileState.FileType fileType = fileState.getFileType();
if (!isExpectedFileState(fileType, actionInfo.getActionName())) {
isSuccessful = false;
break;
}
// Only add compact file state.
if (actionInfo.getActionName().equals(COMPACT_ACTION_NAME)) {
fileStateList.add(fileState);
}
}
if (!isSuccessful) {
return false;
}
if (actionInfo.getActionName().equals(UNCOMPACT_ACTION_NAME)) {
return true;
}
// Recover action result for successful compact action.
if (actionInfo.getActionName().equals(COMPACT_ACTION_NAME)) {
List<CompactFileState> compactFileStates = new ArrayList<>();
assert fileStateList.size() == getSmallFileList(actionInfo).size();
for (FileState fileState : fileStateList) {
compactFileStates.add((CompactFileState) fileState);
}
actionInfo.setResult(new Gson().toJson(compactFileStates));
}
return true;
} catch (IOException e) {
LOG.warn("Failed to get file state, suppose this action was not " + "successfully executed: {}", actionInfo.toString());
return false;
}
}
use of org.smartdata.model.CompactFileState in project SSM by Intel-bigdata.
the class ProtoBufferHelper method convert.
public static FileStateProto convert(FileState fileState) {
FileStateProto.Builder builder = FileStateProto.newBuilder();
builder.setPath(fileState.getPath()).setType(fileState.getFileType().getValue()).setStage(fileState.getFileStage().getValue());
if (fileState instanceof CompactFileState) {
FileContainerInfo fileContainerInfo = ((CompactFileState) fileState).getFileContainerInfo();
builder.setCompactFileState(CompactFileStateProto.newBuilder().setContainerFilePath(fileContainerInfo.getContainerFilePath()).setOffset(fileContainerInfo.getOffset()).setLength(fileContainerInfo.getLength()));
} else if (fileState instanceof CompressionFileState) {
builder.setCompressionFileState(convert((CompressionFileState) fileState));
}
/*else if (fileState instanceof S3FileState) {
builder.setS3FileState();
} else if (fileState instanceof ) {
}
*/
return builder.build();
}
use of org.smartdata.model.CompactFileState in project SSM by Intel-bigdata.
the class ProtoBufferHelper method convert.
public static FileState convert(FileStateProto proto) {
FileState fileState = null;
String path = proto.getPath();
FileState.FileType type = FileState.FileType.fromValue(proto.getType());
FileState.FileStage stage = FileState.FileStage.fromValue(proto.getStage());
// FileState.FileStage stage = FileState.FileStage.fromValue(proto.getStage());
if (type == null) {
return new NormalFileState(path);
}
switch(type) {
case NORMAL:
fileState = new NormalFileState(path);
break;
case COMPACT:
CompactFileStateProto compactProto = proto.getCompactFileState();
fileState = new CompactFileState(path, convert(compactProto));
break;
case COMPRESSION:
CompressionFileStateProto compressionProto = proto.getCompressionFileState();
// convert to CompressionFileState
fileState = convert(path, stage, compressionProto);
break;
case S3:
S3FileStateProto s3Proto = proto.getS3FileState();
// fileState = convert(path, type, stage, s3Proto);
break;
default:
}
return fileState;
}
use of org.smartdata.model.CompactFileState in project SSM by Intel-bigdata.
the class SmartFileSystem method listCorruptFileBlocks.
@Override
public RemoteIterator<Path> listCorruptFileBlocks(Path path) throws IOException {
RemoteIterator<Path> corruptFileBlocksIterator = super.listCorruptFileBlocks(path);
FileState fileState = smartDFSClient.getFileState(getPathName(path));
if (fileState instanceof CompactFileState) {
corruptFileBlocksIterator = super.listCorruptFileBlocks(new Path(((CompactFileState) fileState).getFileContainerInfo().getContainerFilePath()));
}
return corruptFileBlocksIterator;
}
Aggregations