use of org.opencastproject.fileupload.api.job.Payload in project opencast by opencast.
the class FileUploadServiceImpl method acceptChunk.
/**
* {@inheritDoc}
*
* @see org.opencastproject.fileupload.api.FileUploadService#acceptChunk(org.opencastproject.fileupload.api.job.FileUploadJob
* job, long chunk, InputStream content)
*/
@Override
public void acceptChunk(FileUploadJob job, long chunkNumber, InputStream content) throws FileUploadException {
// job already completed?
if (job.getState().equals(FileUploadJob.JobState.COMPLETE)) {
removeFromCache(job);
throw fileUploadException(Severity.warn, "Job is already complete.");
}
// job ready to recieve data?
if (isLocked(job.getId())) {
throw fileUploadException(Severity.error, "Job is locked. Seems like a concurrent upload to this job is in progress.");
} else {
lock(job);
}
// right chunk offered?
int supposedChunk = job.getCurrentChunk().getNumber() + 1;
if (chunkNumber != supposedChunk) {
removeFromCache(job);
throw fileUploadException(Severity.error, format("Wrong chunk number. Awaiting #%d but #%d was offered.", supposedChunk, chunkNumber));
}
logger.debug("Receiving chunk #" + chunkNumber + " of job {}", job);
// write chunk to temp file
job.getCurrentChunk().incrementNumber();
File chunkFile = null;
try {
chunkFile = ensureExists(getChunkFile(job.getId()));
} catch (IOException e) {
throw fileUploadException(Severity.error, "Cannot create chunk file", e);
}
OutputStream out = null;
try {
byte[] readBuffer = new byte[READ_BUFFER_LENGTH];
out = new FileOutputStream(chunkFile, false);
int bytesRead = 0;
long bytesReadTotal = 0L;
// copy manually (instead of using IOUtils.copy()) so we can count the
Chunk currentChunk = job.getCurrentChunk();
// number of bytes
do {
bytesRead = content.read(readBuffer);
if (bytesRead > 0) {
out.write(readBuffer, 0, bytesRead);
bytesReadTotal += bytesRead;
currentChunk.setRecieved(bytesReadTotal);
}
} while (bytesRead != -1);
if (job.getPayload().getTotalSize() == -1 && job.getChunksTotal() == 1) {
// set totalSize in case of ordinary
// from submit
job.getPayload().setTotalSize(bytesReadTotal);
}
} catch (Exception e) {
removeFromCache(job);
throw fileUploadException(Severity.error, "Failed to store chunk data", e);
} finally {
IOUtils.closeQuietly(content);
IOUtils.closeQuietly(out);
}
// check if chunk has right size
long actualSize = chunkFile.length();
long supposedSize;
if (chunkNumber == job.getChunksTotal() - 1) {
supposedSize = job.getPayload().getTotalSize() % job.getChunksize();
// a not so nice workaround for the rare
supposedSize = supposedSize == 0 ? job.getChunksize() : supposedSize;
// case that file size is a multiple of the
// chunk size
} else {
supposedSize = job.getChunksize();
}
if (actualSize == supposedSize || (job.getChunksTotal() == 1 && job.getChunksize() == -1)) {
// append chunk to payload file
FileInputStream in = null;
try {
File payloadFile = getPayloadFile(job.getId());
in = new FileInputStream(chunkFile);
out = new FileOutputStream(payloadFile, true);
IOUtils.copy(in, out);
Payload payload = job.getPayload();
payload.setCurrentSize(payload.getCurrentSize() + actualSize);
} catch (IOException e) {
removeFromCache(job);
throw fileUploadException(Severity.error, "Failed to append chunk data", e);
} finally {
IOUtils.closeQuietly(in);
IOUtils.closeQuietly(out);
deleteChunkFile(job.getId());
}
} else {
removeFromCache(job);
throw fileUploadException(Severity.warn, format("Chunk has wrong size. Awaited: %d bytes, received: %d bytes.", supposedSize, actualSize));
}
// update job
if (chunkNumber == job.getChunksTotal() - 1) {
// upload is complete
finalizeJob(job);
logger.info("Upload job completed: {}", job);
} else {
// upload still incomplete
job.setState(FileUploadJob.JobState.READY);
}
storeJob(job);
removeFromCache(job);
}
Aggregations