use of org.datatransferproject.types.transfer.models.blob.BlobbyStorageContainerResource in project data-transfer-project by google.
the class DriveExporter method export.
@Override
public ExportResult<BlobbyStorageContainerResource> export(UUID jobId, TokensAndUrlAuthData authData, Optional<ExportInformation> optionalExportInformation) throws Exception {
Drive driveInterface = getDriveInterface((authData));
List driveListOperation = driveInterface.files().list();
// If the folder Id isn't specified then use root
String parentId = "root";
if (optionalExportInformation.isPresent()) {
ExportInformation exportInformation = optionalExportInformation.get();
if (exportInformation.getPaginationData() != null) {
StringPaginationToken paginationToken = (StringPaginationToken) exportInformation.getPaginationData();
driveListOperation.setPageToken(paginationToken.getToken());
}
if (exportInformation.getContainerResource() != null) {
BlobbyStorageContainerResource parent = (BlobbyStorageContainerResource) exportInformation.getContainerResource();
parentId = parent.getId();
}
}
driveListOperation.setFields("files(id, name, modifiedTime, mimeType)").setQ(String.format(DRIVE_QUERY_FORMAT, parentId));
ArrayList<DigitalDocumentWrapper> files = new ArrayList<>();
ArrayList<BlobbyStorageContainerResource> folders = new ArrayList<>();
FileList fileList = driveListOperation.execute();
for (File file : fileList.getFiles()) {
if (FOLDER_MIME_TYPE.equals(file.getMimeType())) {
folders.add(new BlobbyStorageContainerResource(file.getName(), file.getId(), null, null));
} else if (FUSION_TABLE_MIME_TYPE.equals(file.getMimeType())) {
monitor.info(() -> "Exporting of fusion tables is not yet supported: " + file);
} else if (MAP_MIME_TYPE.equals(file.getMimeType())) {
monitor.info(() -> "Exporting of maps is not yet supported: " + file);
} else {
try {
InputStream inputStream;
String newMimeType = file.getMimeType();
if (EXPORT_FORMATS.containsKey(file.getMimeType())) {
newMimeType = EXPORT_FORMATS.get(file.getMimeType());
inputStream = driveInterface.files().export(file.getId(), newMimeType).executeMedia().getContent();
} else {
inputStream = driveInterface.files().get(file.getId()).setAlt("media").executeMedia().getContent();
}
jobStore.create(jobId, file.getId(), inputStream);
files.add(new DigitalDocumentWrapper(new DtpDigitalDocument(file.getName(), file.getModifiedTime().toStringRfc3339(), newMimeType), file.getMimeType(), file.getId()));
} catch (Exception e) {
monitor.severe(() -> "Error exporting " + file, e);
}
}
monitor.info(() -> "Exported " + file);
}
ResultType resultType = isDone(fileList) ? ResultType.END : ResultType.CONTINUE;
BlobbyStorageContainerResource result = new BlobbyStorageContainerResource(null, parentId, files, folders);
StringPaginationToken paginationToken = null;
if (!Strings.isNullOrEmpty(fileList.getNextPageToken())) {
paginationToken = new StringPaginationToken(fileList.getNextPageToken());
}
ContinuationData continuationData = new ContinuationData(paginationToken);
folders.forEach(continuationData::addContainerResource);
return new ExportResult<>(resultType, result, continuationData);
}
use of org.datatransferproject.types.transfer.models.blob.BlobbyStorageContainerResource in project data-transfer-project by google.
the class DriveImporter method importItem.
@Override
public ImportResult importItem(UUID jobId, IdempotentImportExecutor idempotentExecutor, TokensAndUrlAuthData authData, BlobbyStorageContainerResource data) throws Exception {
String parentId;
Drive driveInterface = getDriveInterface(authData);
// Let the parent ID be empty for the root level
if (Strings.isNullOrEmpty(data.getId()) || "root".equals(data.getId())) {
parentId = idempotentExecutor.executeOrThrowException(ROOT_FOLDER_ID, data.getName(), () -> importSingleFolder(driveInterface, "MigratedContent", null));
} else {
parentId = idempotentExecutor.getCachedValue(ROOT_FOLDER_ID);
}
// Uploads album metadata
if (data.getFolders() != null && data.getFolders().size() > 0) {
for (BlobbyStorageContainerResource folder : data.getFolders()) {
idempotentExecutor.executeAndSwallowIOExceptions(folder.getId(), folder.getName(), () -> importSingleFolder(driveInterface, folder.getName(), parentId));
}
}
// Uploads photos
if (data.getFiles() != null && data.getFiles().size() > 0) {
for (DigitalDocumentWrapper file : data.getFiles()) {
idempotentExecutor.executeAndSwallowIOExceptions(Integer.toString(file.hashCode()), file.getDtpDigitalDocument().getName(), () -> importSingleFile(jobId, driveInterface, file, parentId));
}
}
return ImportResult.OK;
}
Aggregations