use of org.datatransferproject.types.common.models.photos.PhotoAlbum in project data-transfer-project by google.
the class ImgurPhotosExporter method requestAlbums.
/**
* Exports albums.
*
* @param authData authentication information
* @param paginationData pagination information to use for subsequent calls
*/
private ExportResult<PhotosContainerResource> requestAlbums(TokensAndUrlAuthData authData, PaginationData paginationData) throws IOException {
ImmutableList.Builder<PhotoAlbum> albumBuilder = ImmutableList.builder();
List<IdOnlyContainerResource> albumIds = new ArrayList<>();
int page = paginationData == null ? 0 : ((IntPaginationToken) paginationData).getStart();
String url = format(ALBUMS_URL_TEMPLATE, page);
List<Map<String, Object>> items = requestData(authData, url);
// Request result doesn't indicate if it's the last page
boolean hasMore = (items != null && items.size() != 0);
for (Map<String, Object> item : items) {
albumBuilder.add(new PhotoAlbum((String) item.get("id"), (String) item.get("title"), (String) item.get("description")));
// Save album id for recalling export to get all the photos in albums
albumIds.add(new IdOnlyContainerResource((String) item.get("id")));
}
if (page == 0) {
// For checking non-album photos. Their export should be performed after all the others
// Album will be created later
albumIds.add(new IdOnlyContainerResource(DEFAULT_ALBUM_ID));
}
PaginationData newPage = null;
if (hasMore) {
newPage = new IntPaginationToken(page + 1);
int start = ((IntPaginationToken) newPage).getStart();
monitor.info(() -> format("albums size: %s, newPage: %s", items.size(), start));
}
PhotosContainerResource photosContainerResource = new PhotosContainerResource(albumBuilder.build(), null);
ContinuationData continuationData = new ContinuationData(newPage);
albumIds.forEach(continuationData::addContainerResource);
ExportResult.ResultType resultType = ExportResult.ResultType.CONTINUE;
if (newPage == null) {
resultType = ExportResult.ResultType.END;
}
return new ExportResult<>(resultType, photosContainerResource, continuationData);
}
use of org.datatransferproject.types.common.models.photos.PhotoAlbum in project data-transfer-project by google.
the class InstagramPhotoExporter method exportPhotos.
private ExportResult<PhotosContainerResource> exportPhotos(TokensAndUrlAuthData authData, Optional<PaginationData> pageData) {
Preconditions.checkNotNull(authData);
MediaResponse response;
try {
response = makeRequest(MEDIA_URL, MediaResponse.class, authData);
} catch (IOException e) {
return new ExportResult<>(e);
}
List<PhotoModel> photos = new ArrayList<>();
// TODO: check out paging.
for (MediaFeedData photo : response.getData()) {
// TODO json mapping is broken.
String photoId = photo.getId();
String url = photo.getImages().getStandardResolution().getUrl();
String text = (photo.getCaption() != null) ? photo.getCaption().getText() : null;
photos.add(new PhotoModel("Instagram photo: " + photoId, url, text, null, photoId, FAKE_ALBUM_ID, false));
}
List<PhotoAlbum> albums = new ArrayList<>();
if (!photos.isEmpty() && !pageData.isPresent()) {
albums.add(new PhotoAlbum(FAKE_ALBUM_ID, "Imported Instagram Photos", "Photos imported from instagram"));
}
return new ExportResult<>(ResultType.END, new PhotosContainerResource(albums, photos));
}
use of org.datatransferproject.types.common.models.photos.PhotoAlbum in project data-transfer-project by google.
the class MicrosoftPhotosExporterTest method exportAlbumAndPhotoWithNextPage.
@Test
public void exportAlbumAndPhotoWithNextPage() throws IOException {
// Setup
MicrosoftDriveItem folderItem = setUpSingleAlbum();
MicrosoftDriveItem photoItem = setUpSinglePhoto(IMAGE_URI, PHOTO_ID);
when(driveItemsResponse.getDriveItems()).thenReturn(new MicrosoftDriveItem[] { folderItem, photoItem });
when(driveItemsResponse.getNextPageLink()).thenReturn(DRIVE_PAGE_URL);
// Run
ExportResult<PhotosContainerResource> result = microsoftPhotosExporter.exportOneDrivePhotos(null, Optional.empty(), Optional.empty(), uuid);
// Verify method calls
verify(photosInterface).getDriveItemsFromSpecialFolder(MicrosoftSpecialFolder.FolderType.photos);
verify(driveItemsResponse).getDriveItems();
// Verify pagination token is set
ContinuationData continuationData = result.getContinuationData();
StringPaginationToken paginationToken = (StringPaginationToken) continuationData.getPaginationData();
assertThat(paginationToken.getToken()).isEqualTo(DRIVE_TOKEN_PREFIX + DRIVE_PAGE_URL);
// Verify one album is ready for import
Collection<PhotoAlbum> actualAlbums = result.getExportedData().getAlbums();
assertThat(actualAlbums.stream().map(PhotoAlbum::getId).collect(Collectors.toList())).containsExactly(FOLDER_ID);
// Verify one photo should be present (in the root Photos special folder)
Collection<PhotoModel> actualPhotos = result.getExportedData().getPhotos();
assertThat(actualPhotos.stream().map(PhotoModel::getFetchableUrl).collect(Collectors.toList())).containsExactly(IMAGE_URI);
assertThat(actualPhotos.stream().map(PhotoModel::getAlbumId).collect(Collectors.toList())).containsExactly(null);
assertThat(actualPhotos.stream().map(PhotoModel::getTitle).collect(Collectors.toList())).containsExactly(FILENAME);
// Verify there is one container ready for sub-processing
List<ContainerResource> actualResources = continuationData.getContainerResources();
assertThat(actualResources.stream().map(a -> ((IdOnlyContainerResource) a).getId()).collect(Collectors.toList())).containsExactly(FOLDER_ID);
}
use of org.datatransferproject.types.common.models.photos.PhotoAlbum in project data-transfer-project by google.
the class MicrosoftPhotosImporterTest method testImportItemAllSuccess.
@Test
public void testImportItemAllSuccess() throws Exception {
List<PhotoAlbum> albums = ImmutableList.of(new PhotoAlbum("id1", "albumb1", "This is a fake albumb"));
List<PhotoModel> photos = ImmutableList.of(new PhotoModel("Pic1", "http://fake.com/1.jpg", "A pic", "image/jpg", "p1", "id1", true), new PhotoModel("Pic2", "https://fake.com/2.png", "fine art", "image/png", "p2", "id1", true));
when(jobStore.getStream(uuid, "http://fake.com/1.jpg")).thenReturn(new InputStreamWrapper(new ByteArrayInputStream(new byte[CHUNK_SIZE])));
when(jobStore.getStream(uuid, "https://fake.com/2.png")).thenReturn(new InputStreamWrapper(new ByteArrayInputStream(new byte[CHUNK_SIZE])));
PhotosContainerResource data = new PhotosContainerResource(albums, photos);
Call call = mock(Call.class);
doReturn(call).when(client).newCall(argThat((Request r) -> r.url().toString().equals("https://www.baseurl.com/v1.0/me/drive/special/photos/children")));
Response response = mock(Response.class);
ResponseBody body = mock(ResponseBody.class);
when(body.bytes()).thenReturn(ResponseBody.create(MediaType.parse("application/json"), "{\"id\": \"id1\"}").bytes());
when(body.string()).thenReturn(ResponseBody.create(MediaType.parse("application/json"), "{\"id\": \"id1\"}").string());
when(response.code()).thenReturn(200);
when(response.body()).thenReturn(body);
when(call.execute()).thenReturn(response);
Call call2 = mock(Call.class);
doReturn(call2).when(client).newCall(argThat((Request r) -> r.url().toString().contains("createUploadSession")));
Response response2 = mock(Response.class);
ResponseBody body2 = mock(ResponseBody.class);
when(body2.bytes()).thenReturn(ResponseBody.create(MediaType.parse("application/json"), "{\"uploadUrl\": \"https://scalia.com/link\"}").bytes());
when(body2.string()).thenReturn(ResponseBody.create(MediaType.parse("application/json"), "{\"uploadUrl\": \"https://scalia.com/link\"}").string());
when(response2.code()).thenReturn(200);
when(response2.body()).thenReturn(body2);
when(call2.execute()).thenReturn(response2);
Call call3 = mock(Call.class);
doReturn(call3).when(client).newCall(argThat((Request r) -> r.url().toString().contains("scalia.com/link")));
Response response3 = mock(Response.class);
ResponseBody body3 = mock(ResponseBody.class);
when(body3.bytes()).thenReturn(ResponseBody.create(MediaType.parse("application/json"), "{\"id\": \"rand1\"}").bytes());
when(body3.string()).thenReturn(ResponseBody.create(MediaType.parse("application/json"), "{\"id\": \"rand1\"}").string());
when(response3.code()).thenReturn(200);
when(response3.body()).thenReturn(body3);
when(call3.execute()).thenReturn(response3);
ImportResult result = importer.importItem(uuid, executor, authData, data);
verify(client, times(5)).newCall(any());
assertThat(result).isEqualTo(ImportResult.OK);
}
use of org.datatransferproject.types.common.models.photos.PhotoAlbum in project data-transfer-project by google.
the class KoofrPhotosImporterTest method testImportItemFromJobStoreUserTimeZoneCalledOnce.
@Test
public void testImportItemFromJobStoreUserTimeZoneCalledOnce() throws Exception {
ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[] { 0, 1, 2, 3, 4 });
when(jobStore.getStream(any(), any())).thenReturn(new InputStreamWrapper(inputStream, 5L));
UUID jobId = UUID.randomUUID();
PortabilityJob job = mock(PortabilityJob.class);
when(job.userTimeZone()).thenReturn(TimeZone.getTimeZone("Europe/Rome"));
when(jobStore.findJob(jobId)).thenReturn(job);
Collection<PhotoAlbum> albums = ImmutableList.of(new PhotoAlbum("id1", "Album 1", "This is a fake album"));
DateFormat format = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss");
format.setTimeZone(TimeZone.getTimeZone("Europe/Kiev"));
Collection<PhotoModel> photos1 = ImmutableList.of(new PhotoModel("pic1.jpg", "http://fake.com/1.jpg", "A pic", "image/jpeg", "p1", "id1", true, format.parse("2021:02:16 11:55:00")));
Collection<PhotoModel> photos2 = ImmutableList.of(new PhotoModel("pic2.jpg", "http://fake.com/2.jpg", "A pic", "image/jpeg", "p2", "id1", true, format.parse("2021:02:17 11:55:00")));
PhotosContainerResource resource1 = spy(new PhotosContainerResource(albums, photos1));
PhotosContainerResource resource2 = spy(new PhotosContainerResource(albums, photos2));
importer.importItem(jobId, executor, authData, resource1);
importer.importItem(jobId, executor, authData, resource2);
InOrder clientInOrder = Mockito.inOrder(client);
String[] titles = { "2021-02-16 10.55.00 pic1.jpg", "2021-02-17 10.55.00 pic2.jpg" };
for (String title : titles) {
clientInOrder.verify(client).uploadFile(any(), eq(title), any(), any(), any(), any());
}
verify(jobStore, atMostOnce()).findJob(jobId);
}
Aggregations