use of org.datatransferproject.types.common.ExportInformation in project data-transfer-project by google.
the class PortabilityStackInMemoryDataCopier method updateStackAfterCopyIteration.
private void updateStackAfterCopyIteration(UUID jobId, String jobIdPrefix, ContainerResource exportContainerResource, int copyIteration, ContinuationData continuationData) {
if (null != continuationData) {
// Start processing sub-resources
if (continuationData.getContainerResources() != null && !continuationData.getContainerResources().isEmpty()) {
List<ContainerResource> subResources = continuationData.getContainerResources();
for (int i = subResources.size() - 1; i >= 0; i--) {
monitor.debug(() -> jobIdPrefix + "Pushing to the stack a new copy iteration with a new container resource, copy iteration: " + copyIteration);
exportInfoStack.push((new ExportInformation(null, subResources.get(i))));
}
}
// Push the next page of items onto the stack
if (null != continuationData.getPaginationData()) {
monitor.debug(() -> jobIdPrefix + "Pushing to the stack a new copy iteration with pagination info, copy iteration: " + copyIteration);
exportInfoStack.push(new ExportInformation(continuationData.getPaginationData(), exportContainerResource));
}
}
jobStore.storeJobStack(jobId, (Stack<ExportInformation>) exportInfoStack.clone());
}
use of org.datatransferproject.types.common.ExportInformation in project data-transfer-project by google.
the class PortabilityStackInMemoryDataCopier method copy.
/**
* Transfers data from the given {@code exporter} optionally starting at the point specified in
* the provided {@code exportInformation}. Imports the data using the provided {@code importer}.
* If there is more data to required to be exported, recursively copies using the specific {@link
* ExportInformation} to continue the process.
*
* @param exportAuthData The auth data for the export
* @param importAuthData The auth data for the import
* @param exportInfo Any pagination or resource information to use for subsequent calls.
*/
@Override
public Collection<ErrorDetail> copy(AuthData exportAuthData, AuthData importAuthData, UUID jobId, Optional<ExportInformation> exportInfo) throws IOException, CopyException {
idempotentImportExecutor.setJobId(jobId);
String jobIdPrefix = "Job " + jobId + ": ";
Optional<Stack<ExportInformation>> maybeLoadedStack = jobStore.loadJobStack(jobId);
if (maybeLoadedStack.isPresent()) {
// load stack from partially completed transfer
exportInfoStack = maybeLoadedStack.get();
} else {
// start new transfer
int initialCopyIteration = COPY_ITERATION_COUNTER.incrementAndGet();
ExportResult<?> initialExportResult = copyIteration(jobId, exportAuthData, importAuthData, exportInfo, jobIdPrefix, initialCopyIteration);
// Import and Export were successful, determine what to do next
ContainerResource exportContainerResource = exportInfo.isPresent() ? exportInfo.get().getContainerResource() : null;
updateStackAfterCopyIteration(jobId, jobIdPrefix, exportContainerResource, initialCopyIteration, initialExportResult.getContinuationData());
}
while (!exportInfoStack.isEmpty()) {
int copyIteration = COPY_ITERATION_COUNTER.incrementAndGet();
ExportInformation currentExportInfo = exportInfoStack.pop();
ExportResult<?> exportResult = copyIteration(jobId, exportAuthData, importAuthData, Optional.of(currentExportInfo), jobIdPrefix, copyIteration);
// Import and Export were successful, determine what to do next
updateStackAfterCopyIteration(jobId, jobIdPrefix, currentExportInfo.getContainerResource(), copyIteration, exportResult.getContinuationData());
}
return idempotentImportExecutor.getErrors();
}
use of org.datatransferproject.types.common.ExportInformation in project data-transfer-project by google.
the class PortabilityInMemoryDataCopierTest method continuationDataWithMultipleSubResources.
@Test
public void continuationDataWithMultipleSubResources() throws CopyException, IOException {
// Arrange
ContainerResource subResource1 = Mockito.mock(ContainerResource.class);
ContainerResource subResource2 = Mockito.mock(ContainerResource.class);
ExportInformation subResource1ExportInfo = new ExportInformation(null, subResource1);
ExportInformation subResource2ExportInfo = new ExportInformation(null, subResource2);
Mockito.when(continuationData.getContainerResources()).thenReturn(Arrays.asList(subResource1, subResource2));
Mockito.when(initialExportResult.getContinuationData()).thenReturn(continuationData);
Mockito.doReturn(initialExportResult).when(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(exportInfo), jobIdPrefix, 1);
// Act
inMemoryDataCopier.copy(exportAuthData, importAuthData, jobId, Optional.of(exportInfo));
// Assert
InOrder orderVerifier = Mockito.inOrder(inMemoryDataCopier);
orderVerifier.verify(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(exportInfo), jobIdPrefix, 1);
orderVerifier.verify(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(subResource1ExportInfo), jobIdPrefix, 2);
orderVerifier.verify(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(subResource2ExportInfo), jobIdPrefix, 3);
}
use of org.datatransferproject.types.common.ExportInformation in project data-transfer-project by google.
the class PortabilityInMemoryDataCopierTest method continuationDataWithPaginationDataAndMultipleSubResources.
@Test
public void continuationDataWithPaginationDataAndMultipleSubResources() throws CopyException, IOException {
// Arrange
PaginationData paginationData = Mockito.mock(PaginationData.class);
ContainerResource subResource1 = Mockito.mock(ContainerResource.class);
ContainerResource subResource2 = Mockito.mock(ContainerResource.class);
ExportInformation paginationExportInfo = new ExportInformation(paginationData, null);
ExportInformation subResource1ExportInfo = new ExportInformation(null, subResource1);
ExportInformation subResource2ExportInfo = new ExportInformation(null, subResource2);
Mockito.when(continuationData.getPaginationData()).thenReturn(paginationData);
Mockito.when(continuationData.getContainerResources()).thenReturn(Arrays.asList(subResource1, subResource2));
Mockito.when(initialExportResult.getContinuationData()).thenReturn(continuationData);
Mockito.doReturn(initialExportResult).when(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(exportInfo), jobIdPrefix, 1);
// Act
inMemoryDataCopier.copy(exportAuthData, importAuthData, jobId, Optional.of(exportInfo));
// Assert
InOrder orderVerifier = Mockito.inOrder(inMemoryDataCopier);
orderVerifier.verify(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(exportInfo), jobIdPrefix, 1);
orderVerifier.verify(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(paginationExportInfo), jobIdPrefix, 2);
orderVerifier.verify(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(subResource1ExportInfo), jobIdPrefix, 3);
orderVerifier.verify(inMemoryDataCopier).copyIteration(jobId, exportAuthData, importAuthData, Optional.of(subResource2ExportInfo), jobIdPrefix, 4);
}
use of org.datatransferproject.types.common.ExportInformation in project data-transfer-project by google.
the class JobProcessor method processJob.
/**
* Process our job, whose metadata is available via {@link JobMetadata}.
*/
void processJob() {
boolean success = false;
UUID jobId = JobMetadata.getJobId();
monitor.debug(() -> format("Begin processing jobId: %s", jobId), EventCode.WORKER_JOB_STARTED);
Collection<ErrorDetail> errors = null;
try {
markJobStarted(jobId);
hooks.jobStarted(jobId);
PortabilityJob job = store.findJob(jobId);
JobAuthorization jobAuthorization = job.jobAuthorization();
monitor.debug(() -> format("Starting copy job, id: %s, source: %s, destination: %s", jobId, job.exportService(), job.importService()));
String scheme = jobAuthorization.encryptionScheme();
AuthDataDecryptService decryptService = getAuthDecryptService(scheme);
if (decryptService == null) {
monitor.severe(() -> format("No auth decrypter found for scheme %s while processing job: %s", scheme, jobId));
return;
}
String encrypted = jobAuthorization.encryptedAuthData();
byte[] encodedPrivateKey = JobMetadata.getPrivateKey();
AuthDataPair pair = decryptService.decrypt(encrypted, encodedPrivateKey);
AuthData exportAuthData = objectMapper.readValue(pair.getExportAuthData(), AuthData.class);
AuthData importAuthData = objectMapper.readValue(pair.getImportAuthData(), AuthData.class);
String exportInfoStr = job.exportInformation();
Optional<ExportInformation> exportInfo = Optional.empty();
if (!Strings.isNullOrEmpty(exportInfoStr)) {
exportInfo = Optional.of(objectMapper.readValue(exportInfoStr, ExportInformation.class));
}
// Copy the data
dtpInternalMetricRecorder.startedJob(JobMetadata.getDataType(), JobMetadata.getExportService(), JobMetadata.getImportService());
JobMetadata.getStopWatch().start();
errors = copier.copy(exportAuthData, importAuthData, jobId, exportInfo);
final int numErrors = errors.size();
monitor.debug(() -> format("Finished copy for jobId: %s with %d error(s).", jobId, numErrors));
success = errors.isEmpty();
} catch (CopyExceptionWithFailureReason e) {
String failureReason = e.getFailureReason();
if (failureReason.contains(FailureReasons.DESTINATION_FULL.toString())) {
monitor.info(() -> "The remaining storage in the user's account is not enough to perform this operation.", e);
} else if (failureReason.contains(FailureReasons.INVALID_TOKEN.toString()) || failureReason.contains(FailureReasons.SESSION_INVALIDATED.toString()) || failureReason.contains(FailureReasons.UNCONFIRMED_USER.toString()) || failureReason.contains(FailureReasons.USER_CHECKPOINTED.toString())) {
monitor.info(() -> "Got token error", e);
} else {
monitor.severe(() -> format("Error with failure code '%s' while processing jobId: %s", failureReason, jobId), e, EventCode.WORKER_JOB_ERRORED);
}
addFailureReasonToJob(jobId, failureReason);
} catch (IOException | CopyException | RuntimeException e) {
monitor.severe(() -> "Error processing jobId: " + jobId, e, EventCode.WORKER_JOB_ERRORED);
} finally {
monitor.debug(() -> "Finished processing jobId: " + jobId, EventCode.WORKER_JOB_FINISHED);
addErrorsAndMarkJobFinished(jobId, success, errors);
hooks.jobFinished(jobId, success);
dtpInternalMetricRecorder.finishedJob(JobMetadata.getDataType(), JobMetadata.getExportService(), JobMetadata.getImportService(), success, JobMetadata.getStopWatch().elapsed());
monitor.flushLogs();
JobMetadata.reset();
}
}
Aggregations