use of io.dockstore.webservice.core.Checksum in project dockstore by dockstore.
the class LanguageHandlerInterface method getImagesFromDockerHub.
default Set<Image> getImagesFromDockerHub(final String repo, final String tagName) {
Set<Image> dockerHubImages = new HashSet<>();
Map<String, String> errorMap = new HashMap<>();
Optional<String> response;
boolean versionFound = false;
String repoUrl = DOCKERHUB_URL + "repositories/" + repo + "/tags?name=" + tagName;
DockerHubTag dockerHubTag = new DockerHubTag();
do {
try {
URL url = new URL(repoUrl);
response = Optional.of(IOUtils.toString(url, StandardCharsets.UTF_8));
} catch (IOException ex) {
LOG.error("Unable to get DockerHub response for " + repo, ex);
response = Optional.empty();
}
if (response.isPresent()) {
final String json = response.get();
errorMap = (Map<String, String>) GSON.fromJson(json, errorMap.getClass());
if (errorMap.get("message") != null) {
LOG.error("Error response from DockerHub: " + errorMap.get("message"));
return dockerHubImages;
}
// DockerHub seems to give empty results if something is not found, other fields are marked as null
dockerHubTag = GSON.fromJson(json, DockerHubTag.class);
List<Results> results = Arrays.asList(dockerHubTag.getResults());
if (results.isEmpty()) {
LOG.error("Could not find any results for " + repo);
break;
}
for (Results r : results) {
if (r.getName().equals(tagName)) {
List<DockerHubImage> images = Arrays.asList(r.getImages());
// For every version, DockerHub can provide multiple images, one for each os/architecture
images.stream().forEach(dockerHubImage -> {
final String manifestDigest = dockerHubImage.getDigest();
Checksum checksum = new Checksum(manifestDigest.split(":")[0], manifestDigest.split(":")[1]);
List<Checksum> checksums = Collections.singletonList(checksum);
// Docker Hub appears to return null for all the "last_pushed" properties of their images.
// Using the result's "last_pushed" as a workaround
Image archImage = new Image(checksums, repo, tagName, r.getImageID(), Registry.DOCKER_HUB, dockerHubImage.getSize(), r.getLastUpdated());
String osInfo = formatDockerHubInfo(dockerHubImage.getOs(), dockerHubImage.getOsVersion());
String archInfo = formatDockerHubInfo(dockerHubImage.getArchitecture(), dockerHubImage.getVariant());
archImage.setOs(osInfo);
archImage.setArchitecture(archInfo);
dockerHubImages.add(archImage);
});
versionFound = true;
break;
}
}
if (!versionFound) {
repoUrl = dockerHubTag.getNext();
}
}
} while (response.isPresent() && !versionFound && dockerHubTag.getNext() != null);
return dockerHubImages;
}
use of io.dockstore.webservice.core.Checksum in project dockstore by dockstore.
the class AbstractImageRegistry method getTagsDockerHub.
public List<Tag> getTagsDockerHub(Tool tool) {
final String repo = tool.getNamespace() + '/' + tool.getName();
LOG.info(" ======================= Getting tags for: {}================================", tool.getPath());
final List<Tag> tags = new ArrayList<>();
Optional<String> dockerHubResponse = getDockerHubToolAsString(tool);
Map<String, String> error = new HashMap<>();
DockerHubTag dockerHubTag;
if (dockerHubResponse.isPresent()) {
Gson gson = new Gson();
final String errorJSON = dockerHubResponse.get();
final String manifestJSON = dockerHubResponse.get();
error = (Map<String, String>) gson.fromJson(errorJSON, error.getClass());
if (error.get("message") != null) {
LOG.info("Error response from DockerHub: " + error.get("message"));
return new ArrayList<>();
}
dockerHubTag = gson.fromJson(manifestJSON, DockerHubTag.class);
Results[] results = dockerHubTag.getResults();
try {
for (Results r : results) {
final Tag tag = new Tag();
tag.setName(r.getName());
DockerHubImage[] dockerHubImages = r.getImages();
List<Checksum> checksums = new ArrayList<>();
// For every version, DockerHub can provide multiple images, one for each architecture
for (DockerHubImage i : dockerHubImages) {
final String manifestDigest = i.getDigest();
checksums.add(new Checksum(manifestDigest.split(":")[0], manifestDigest.split(":")[1]));
Image image = new Image(checksums, repo, tag.getName(), r.getImageID(), Registry.DOCKER_HUB, i.getSize(), i.getLastPushed());
image.setArchitecture(i.getArchitecture());
tag.getImages().add(image);
}
tags.add(tag);
}
} catch (IndexOutOfBoundsException ex) {
LOG.info("Unable to grab image and checksum information for" + tool.getNamespace() + '/' + tool.getName());
}
return tags;
} else {
LOG.info("Could not get response from DockerHub");
return new ArrayList<>();
}
}
use of io.dockstore.webservice.core.Checksum in project dockstore by dockstore.
the class AbstractImageRegistry method updateTags.
/**
* Updates/Adds/Deletes tags for a specific tool
*
* @param newTags
* @param tool
* @param tagDAO
* @param fileDAO
* @param toolDAO
*/
@SuppressWarnings("checkstyle:ParameterNumber")
private void updateTags(List<Tag> newTags, @NotNull Tool tool, SourceCodeRepoInterface sourceCodeRepoInterface, final TagDAO tagDAO, final FileDAO fileDAO, final ToolDAO toolDAO, final FileFormatDAO fileFormatDAO, final EventDAO eventDAO, final User user) {
// Get all existing tags
List<Tag> existingTags = new ArrayList<>(tool.getWorkflowVersions());
if (tool.getMode() != ToolMode.MANUAL_IMAGE_PATH || (tool.getRegistry().equals(Registry.QUAY_IO.getDockerPath()) && existingTags.isEmpty())) {
if (newTags == null) {
LOG.info(tool.getToolPath() + " : Tags for tool {} did not get updated because new tags were not found", tool.getPath());
return;
}
List<Tag> toDelete = new ArrayList<>(0);
for (Iterator<Tag> iterator = existingTags.iterator(); iterator.hasNext(); ) {
Tag oldTag = iterator.next();
boolean exists = false;
for (Tag newTag : newTags) {
if (newTag.getName().equals(oldTag.getName())) {
exists = true;
break;
}
}
if (!exists) {
toDelete.add(oldTag);
iterator.remove();
}
}
for (Tag newTag : newTags) {
boolean exists = false;
// Find if user already has the tag
for (Tag oldTag : existingTags) {
if (newTag.getName().equals(oldTag.getName())) {
exists = true;
updateImageInformation(tool, newTag, oldTag);
oldTag.update(newTag);
// Update tag with default paths if dirty bit not set
if (!oldTag.isDirtyBit()) {
// Has not been modified => set paths
oldTag.setCwlPath(tool.getDefaultCwlPath());
oldTag.setWdlPath(tool.getDefaultWdlPath());
oldTag.setDockerfilePath(tool.getDefaultDockerfilePath());
// TODO: keep an eye on this, this used to always create new test params no matter what
if (tool.getDefaultTestCwlParameterFile() != null && oldTag.getSourceFiles().stream().noneMatch(file -> file.getPath().equals(tool.getDefaultTestCwlParameterFile()))) {
oldTag.getSourceFiles().add(createSourceFile(tool.getDefaultTestCwlParameterFile(), DescriptorLanguage.FileType.CWL_TEST_JSON));
}
if (tool.getDefaultTestWdlParameterFile() != null && oldTag.getSourceFiles().stream().noneMatch(file -> file.getPath().equals(tool.getDefaultTestWdlParameterFile()))) {
oldTag.getSourceFiles().add(createSourceFile(tool.getDefaultTestWdlParameterFile(), DescriptorLanguage.FileType.WDL_TEST_JSON));
}
}
break;
}
}
// Tag does not already exist
if (!exists) {
// this could result in the same tag being added to multiple containers with the same path, need to clone
Tag clonedTag = new Tag();
clonedTag.clone(newTag);
clonedTag.getImages().addAll(newTag.getImages());
if (tool.getDefaultTestCwlParameterFile() != null) {
clonedTag.getSourceFiles().add(createSourceFile(tool.getDefaultTestCwlParameterFile(), DescriptorLanguage.FileType.CWL_TEST_JSON));
}
if (tool.getDefaultTestWdlParameterFile() != null) {
clonedTag.getSourceFiles().add(createSourceFile(tool.getDefaultTestWdlParameterFile(), DescriptorLanguage.FileType.WDL_TEST_JSON));
}
existingTags.add(clonedTag);
}
}
boolean allAutomated = true;
for (Tag tag : existingTags) {
// create and add a tag if it does not already exist
if (!tool.getWorkflowVersions().contains(tag)) {
LOG.info(tool.getToolPath() + " : Updating tag {}", tag.getName());
tag.setParent(tool);
long id = tagDAO.create(tag);
tag = tagDAO.findById(id);
eventDAO.createAddTagToEntryEvent(user, tool, tag);
tool.addWorkflowVersion(tag);
if (!tag.isAutomated()) {
allAutomated = false;
}
}
}
// delete tool if it has no users
deleteToolWithNoUsers(tool, toDelete);
if (tool.getMode() != ToolMode.MANUAL_IMAGE_PATH) {
if (allAutomated) {
tool.setMode(ToolMode.AUTO_DETECT_QUAY_TAGS_AUTOMATED_BUILDS);
} else {
tool.setMode(ToolMode.AUTO_DETECT_QUAY_TAGS_WITH_MIXED);
}
}
}
// For tools from dockerhub, grab/update the image and checksum information
if (tool.getRegistry().equals(Registry.DOCKER_HUB.getDockerPath()) || tool.getRegistry().equals(Registry.GITLAB.getDockerPath())) {
updateNonQuayImageInformation(newTags, tool, existingTags);
}
// Now grab default/main tag to grab general information (defaults to github/bitbucket "main branch")
if (sourceCodeRepoInterface != null) {
// Grab files for each version/tag and check if valid
Set<Tag> tags = tool.getWorkflowVersions();
for (Tag tag : tags) {
// check to see whether the commit id has changed
// TODO: calls validation eventually, may simplify if we take into account metadata parsing below
updateFiles(tool, tag, fileDAO, sourceCodeRepoInterface, sourceCodeRepoInterface.gitUsername);
// Grab and parse files to get tool information
// Add for new descriptor types
}
if (tool.getDefaultCwlPath() != null) {
LOG.info(tool.getToolPath() + " " + sourceCodeRepoInterface.gitUsername + " : Parsing CWL...");
sourceCodeRepoInterface.updateEntryMetadata(tool, DescriptorLanguage.CWL);
}
if (tool.getDefaultWdlPath() != null) {
LOG.info(tool.getToolPath() + " " + sourceCodeRepoInterface.gitUsername + " : Parsing WDL...");
sourceCodeRepoInterface.updateEntryMetadata(tool, DescriptorLanguage.WDL);
}
}
FileFormatHelper.updateFileFormats(tool, tool.getWorkflowVersions(), fileFormatDAO, true);
// ensure updated tags are saved to the database, not sure why this is necessary. See GeneralIT#testImageIDUpdateDuringRefresh
tool.getWorkflowVersions().forEach(tagDAO::create);
toolDAO.create(tool);
}
use of io.dockstore.webservice.core.Checksum in project dockstore by dockstore.
the class WorkflowResource method updateWorkflowVersion.
@PUT
@Timed
@UnitOfWork
@Path("/{workflowId}/workflowVersions")
@Operation(operationId = "updateWorkflowVersion", description = "Update the workflow versions linked to a workflow.", security = @SecurityRequirement(name = OPENAPI_JWT_SECURITY_DEFINITION_NAME))
@ApiOperation(value = "Update the workflow versions linked to a workflow.", authorizations = { @Authorization(value = JWT_SECURITY_DEFINITION_NAME) }, notes = "Updates workflow path, reference, and hidden attributes.", response = WorkflowVersion.class, responseContainer = "List")
public Set<WorkflowVersion> updateWorkflowVersion(@ApiParam(hidden = true) @Parameter(hidden = true, name = "user") @Auth User user, @ApiParam(value = "Workflow to modify.", required = true) @PathParam("workflowId") Long workflowId, @ApiParam(value = "List of modified workflow versions", required = true) List<WorkflowVersion> workflowVersions) {
Workflow w = workflowDAO.findById(workflowId);
checkEntry(w);
checkCanWriteWorkflow(user, w);
// create a map for quick lookup
Map<Long, WorkflowVersion> mapOfExistingWorkflowVersions = new HashMap<>();
for (WorkflowVersion version : w.getWorkflowVersions()) {
mapOfExistingWorkflowVersions.put(version.getId(), version);
}
for (WorkflowVersion version : workflowVersions) {
if (mapOfExistingWorkflowVersions.containsKey(version.getId())) {
if (w.getActualDefaultVersion() != null && w.getActualDefaultVersion().getId() == version.getId() && version.isHidden()) {
throw new CustomWebApplicationException("You cannot hide the default version.", HttpStatus.SC_BAD_REQUEST);
}
// remove existing copy and add the new one
WorkflowVersion existingTag = mapOfExistingWorkflowVersions.get(version.getId());
existingTag.setSynced(false);
// If path changed then update dirty bit to true
if (!existingTag.getWorkflowPath().equals(version.getWorkflowPath())) {
String newExtension = FilenameUtils.getExtension(version.getWorkflowPath());
String correctExtension = FilenameUtils.getExtension(w.getDefaultWorkflowPath());
if (!Objects.equals(newExtension, correctExtension)) {
throw new CustomWebApplicationException("Please ensure that the workflow path uses the file extension " + correctExtension, HttpStatus.SC_BAD_REQUEST);
}
existingTag.setDirtyBit(true);
}
boolean wasFrozen = existingTag.isFrozen();
existingTag.updateByUser(version);
boolean nowFrozen = existingTag.isFrozen();
// If version is snapshotted on this update, grab and store image information. Also store dag and tool table json if not available.
if (!wasFrozen && nowFrozen) {
Optional<String> toolsJSONTable;
LanguageHandlerInterface lInterface = LanguageHandlerFactory.getInterface(w.getFileType());
// Store tool table json
if (existingTag.getToolTableJson() == null) {
toolsJSONTable = lInterface.getContent(w.getWorkflowPath(), getMainDescriptorFile(existingTag).getContent(), extractDescriptorAndSecondaryFiles(existingTag), LanguageHandlerInterface.Type.TOOLS, toolDAO);
existingTag.setToolTableJson(toolsJSONTable.get());
} else {
toolsJSONTable = Optional.of(existingTag.getToolTableJson());
}
if (toolsJSONTable.isPresent()) {
Set<Image> images = lInterface.getImagesFromRegistry(toolsJSONTable.get());
existingTag.getImages().addAll(images);
}
// Grab checksum for file descriptors if not already available.
for (SourceFile sourceFile : existingTag.getSourceFiles()) {
Optional<String> sha = FileFormatHelper.calcSHA1(sourceFile.getContent());
if (sha.isPresent()) {
List<Checksum> checksums = new ArrayList<>();
checksums.add(new Checksum(SHA_TYPE_FOR_SOURCEFILES, sha.get()));
if (sourceFile.getChecksums() == null) {
sourceFile.setChecksums(checksums);
} else if (sourceFile.getChecksums().isEmpty()) {
sourceFile.getChecksums().addAll(checksums);
}
}
}
// store dag
if (existingTag.getDagJson() == null) {
String dagJson = lInterface.getCleanDAG(w.getWorkflowPath(), getMainDescriptorFile(existingTag).getContent(), extractDescriptorAndSecondaryFiles(existingTag), LanguageHandlerInterface.Type.DAG, toolDAO);
existingTag.setDagJson(dagJson);
}
}
}
}
Workflow result = workflowDAO.findById(workflowId);
checkEntry(result);
PublicStateManager.getInstance().handleIndexUpdate(result, StateManagerMode.UPDATE);
return result.getWorkflowVersions();
}
use of io.dockstore.webservice.core.Checksum in project dockstore by dockstore.
the class AbstractWorkflowResource method updateDBVersionSourceFilesWithRemoteVersionSourceFiles.
/**
* Updates the sourcefiles in the database to match the sourcefiles on the remote
* @param existingVersion
* @param remoteVersion
* @return WorkflowVersion with updated sourcefiles
*/
private WorkflowVersion updateDBVersionSourceFilesWithRemoteVersionSourceFiles(WorkflowVersion existingVersion, WorkflowVersion remoteVersion) {
// Update source files for each version
Map<String, SourceFile> existingFileMap = new HashMap<>();
existingVersion.getSourceFiles().forEach(file -> existingFileMap.put(file.getType().toString() + file.getAbsolutePath(), file));
for (SourceFile file : remoteVersion.getSourceFiles()) {
String fileKey = file.getType().toString() + file.getAbsolutePath();
SourceFile existingFile = existingFileMap.get(fileKey);
if (existingFileMap.containsKey(fileKey)) {
List<Checksum> checksums = new ArrayList<>();
Optional<String> sha = FileFormatHelper.calcSHA1(file.getContent());
if (sha.isPresent()) {
checksums.add(new Checksum(SHA_TYPE_FOR_SOURCEFILES, sha.get()));
if (existingFile.getChecksums() == null) {
existingFile.setChecksums(checksums);
} else {
existingFile.getChecksums().clear();
existingFileMap.get(fileKey).getChecksums().addAll(checksums);
}
}
existingFile.setContent(file.getContent());
} else {
final long fileID = fileDAO.create(file);
final SourceFile fileFromDB = fileDAO.findById(fileID);
Optional<String> sha = FileFormatHelper.calcSHA1(file.getContent());
if (sha.isPresent()) {
fileFromDB.getChecksums().add(new Checksum(SHA_TYPE_FOR_SOURCEFILES, sha.get()));
}
existingVersion.getSourceFiles().add(fileFromDB);
}
}
// Remove existing files that are no longer present on remote
for (Map.Entry<String, SourceFile> entry : existingFileMap.entrySet()) {
boolean toDelete = true;
for (SourceFile file : remoteVersion.getSourceFiles()) {
if (entry.getKey().equals(file.getType().toString() + file.getAbsolutePath())) {
toDelete = false;
}
}
if (toDelete) {
existingVersion.getSourceFiles().remove(entry.getValue());
}
}
// Update the validations
for (Validation versionValidation : remoteVersion.getValidations()) {
existingVersion.addOrUpdateValidation(versionValidation);
}
return existingVersion;
}
Aggregations