use of org.datatransferproject.types.transfer.errors.ErrorDetail in project data-transfer-project by google.
the class LocalJobStore method addErrorsToJob.
@Override
public void addErrorsToJob(UUID jobId, Collection<ErrorDetail> errors) throws IOException {
// This is a no-op currently as nothing in DTP reads the errors currently.
if (errors != null && !errors.isEmpty()) {
for (ErrorDetail error : errors) {
String errorString = OBJECT_MAPPER.writeValueAsString(error);
monitor.info(() -> "Added error: " + errorString);
}
}
}
use of org.datatransferproject.types.transfer.errors.ErrorDetail in project data-transfer-project by google.
the class GoogleCloudIdempotentImportExecutor method getErrorDetailsForJob.
private Map<String, ErrorDetail> getErrorDetailsForJob(UUID jobId) {
Map<String, ErrorDetail> datastoreKnownErrors = new HashMap<>();
Query<Entity> query = Query.newEntityQueryBuilder().setKind(IDEMPONTENT_ERRORS_KIND).setFilter(CompositeFilter.and(PropertyFilter.eq(JOB_ID_FIELD, String.valueOf(jobId)))).build();
QueryResults<Entity> results = datastore.run(query);
while (results.hasNext()) {
Entity result = results.next();
try {
ErrorDetail error = objectMapper.readerFor(ErrorDetail.class).readValue(result.getString(ERROR_FIELD));
datastoreKnownErrors.put(result.getString(IDEMPOTENT_ID_FIELD), error);
} catch (IOException e) {
monitor.severe(() -> jobIdPrefix + "Unable to parse ErrorDetail: " + e);
throw new IllegalStateException(e);
}
}
return datastoreKnownErrors;
}
use of org.datatransferproject.types.transfer.errors.ErrorDetail in project data-transfer-project by google.
the class CallableImporter method call.
@Override
public ImportResult call() throws Exception {
boolean success = false;
Stopwatch stopwatch = Stopwatch.createStarted();
try {
idempotentImportExecutor.resetRecentErrors();
ImportResult result = importerProvider.get().importItem(jobId, idempotentImportExecutor, authData, data);
Collection<ErrorDetail> errors = idempotentImportExecutor.getRecentErrors();
success = result.getType() == ImportResult.ResultType.OK && errors.isEmpty();
if (!success) {
throw new IOException("Problem with importer, forcing a retry, " + "first error: " + (errors.iterator().hasNext() ? errors.iterator().next().exception() : "none"));
}
result = result.copyWithCounts(data.getCounts());
return result;
} finally {
metricRecorder.importPageAttemptFinished(JobMetadata.getDataType(), JobMetadata.getImportService(), success, stopwatch.elapsed());
}
}
use of org.datatransferproject.types.transfer.errors.ErrorDetail in project data-transfer-project by google.
the class JobProcessor method processJob.
/**
* Process our job, whose metadata is available via {@link JobMetadata}.
*/
void processJob() {
boolean success = false;
UUID jobId = JobMetadata.getJobId();
monitor.debug(() -> format("Begin processing jobId: %s", jobId), EventCode.WORKER_JOB_STARTED);
Collection<ErrorDetail> errors = null;
try {
markJobStarted(jobId);
hooks.jobStarted(jobId);
PortabilityJob job = store.findJob(jobId);
JobAuthorization jobAuthorization = job.jobAuthorization();
monitor.debug(() -> format("Starting copy job, id: %s, source: %s, destination: %s", jobId, job.exportService(), job.importService()));
String scheme = jobAuthorization.encryptionScheme();
AuthDataDecryptService decryptService = getAuthDecryptService(scheme);
if (decryptService == null) {
monitor.severe(() -> format("No auth decrypter found for scheme %s while processing job: %s", scheme, jobId));
return;
}
String encrypted = jobAuthorization.encryptedAuthData();
byte[] encodedPrivateKey = JobMetadata.getPrivateKey();
AuthDataPair pair = decryptService.decrypt(encrypted, encodedPrivateKey);
AuthData exportAuthData = objectMapper.readValue(pair.getExportAuthData(), AuthData.class);
AuthData importAuthData = objectMapper.readValue(pair.getImportAuthData(), AuthData.class);
String exportInfoStr = job.exportInformation();
Optional<ExportInformation> exportInfo = Optional.empty();
if (!Strings.isNullOrEmpty(exportInfoStr)) {
exportInfo = Optional.of(objectMapper.readValue(exportInfoStr, ExportInformation.class));
}
// Copy the data
dtpInternalMetricRecorder.startedJob(JobMetadata.getDataType(), JobMetadata.getExportService(), JobMetadata.getImportService());
JobMetadata.getStopWatch().start();
errors = copier.copy(exportAuthData, importAuthData, jobId, exportInfo);
final int numErrors = errors.size();
monitor.debug(() -> format("Finished copy for jobId: %s with %d error(s).", jobId, numErrors));
success = errors.isEmpty();
} catch (CopyExceptionWithFailureReason e) {
String failureReason = e.getFailureReason();
if (failureReason.contains(FailureReasons.DESTINATION_FULL.toString())) {
monitor.info(() -> "The remaining storage in the user's account is not enough to perform this operation.", e);
} else if (failureReason.contains(FailureReasons.INVALID_TOKEN.toString()) || failureReason.contains(FailureReasons.SESSION_INVALIDATED.toString()) || failureReason.contains(FailureReasons.UNCONFIRMED_USER.toString()) || failureReason.contains(FailureReasons.USER_CHECKPOINTED.toString())) {
monitor.info(() -> "Got token error", e);
} else {
monitor.severe(() -> format("Error with failure code '%s' while processing jobId: %s", failureReason, jobId), e, EventCode.WORKER_JOB_ERRORED);
}
addFailureReasonToJob(jobId, failureReason);
} catch (IOException | CopyException | RuntimeException e) {
monitor.severe(() -> "Error processing jobId: " + jobId, e, EventCode.WORKER_JOB_ERRORED);
} finally {
monitor.debug(() -> "Finished processing jobId: " + jobId, EventCode.WORKER_JOB_FINISHED);
addErrorsAndMarkJobFinished(jobId, success, errors);
hooks.jobFinished(jobId, success);
dtpInternalMetricRecorder.finishedJob(JobMetadata.getDataType(), JobMetadata.getExportService(), JobMetadata.getImportService(), success, JobMetadata.getStopWatch().elapsed());
monitor.flushLogs();
JobMetadata.reset();
}
}
use of org.datatransferproject.types.transfer.errors.ErrorDetail in project data-transfer-project by google.
the class GooglePhotosImporterTest method importTwoPhotosWithFailure.
@Test
public void importTwoPhotosWithFailure() throws Exception {
PhotoModel photoModel1 = new PhotoModel(PHOTO_TITLE, IMG_URI, PHOTO_DESCRIPTION, JPEG_MEDIA_TYPE, "oldPhotoID1", OLD_ALBUM_ID, false);
PhotoModel photoModel2 = new PhotoModel(PHOTO_TITLE, IMG_URI, PHOTO_DESCRIPTION, JPEG_MEDIA_TYPE, "oldPhotoID2", OLD_ALBUM_ID, false);
Mockito.when(googlePhotosInterface.uploadPhotoContent(any())).thenReturn("token1", "token2");
BatchMediaItemResponse batchMediaItemResponse = new BatchMediaItemResponse(new NewMediaItemResult[] { buildMediaItemResult("token1", Code.OK_VALUE), buildMediaItemResult("token2", Code.UNAUTHENTICATED_VALUE) });
Mockito.when(googlePhotosInterface.createPhotos(any(NewMediaItemUpload.class))).thenReturn(batchMediaItemResponse);
long length = googlePhotosImporter.importPhotoBatch(UUID.randomUUID(), Mockito.mock(TokensAndUrlAuthData.class), Lists.newArrayList(photoModel1, photoModel2), executor, NEW_ALBUM_ID);
// Only one photo of 32L imported
assertEquals(32L, length);
assertTrue(executor.isKeyCached(String.format("%s-%s", OLD_ALBUM_ID, "oldPhotoID1")));
String failedDataId = String.format("%s-%s", OLD_ALBUM_ID, "oldPhotoID2");
assertFalse(executor.isKeyCached(failedDataId));
ErrorDetail errorDetail = executor.getErrors().iterator().next();
assertEquals(failedDataId, errorDetail.id());
assertThat(errorDetail.exception(), CoreMatchers.containsString("Media item could not be created."));
}
Aggregations