use of com.google.common.hash.HashCode in project gradle by gradle.
the class TaskExecutionServices method createCompileClasspathSnapshotter.
CompileClasspathSnapshotter createCompileClasspathSnapshotter(FileHasher hasher, StringInterner stringInterner, FileSystem fileSystem, DirectoryFileTreeFactory directoryFileTreeFactory, TaskHistoryStore store, FileSystemMirror fileSystemMirror) {
PersistentIndexedCache<HashCode, HashCode> signatureCache = store.createCache("jvmClassSignatures", HashCode.class, new HashCodeSerializer(), 400000, true);
ClasspathEntryHasher classpathEntryHasher = new CachingClasspathEntryHasher(new DefaultClasspathEntryHasher(new AbiExtractingClasspathContentHasher(new DefaultClasspathContentHasher())), signatureCache);
return new DefaultCompileClasspathSnapshotter(hasher, stringInterner, fileSystem, directoryFileTreeFactory, fileSystemMirror, classpathEntryHasher);
}
use of com.google.common.hash.HashCode in project bazel by bazelbuild.
the class FileUtils method getDirectoryNameForJar.
/**
* Chooses a directory name, based on a JAR file name, considering exploded-aar and classes.jar.
*/
@NonNull
public static String getDirectoryNameForJar(@NonNull File inputFile) {
// add a hash of the original file path.
HashFunction hashFunction = Hashing.sha1();
HashCode hashCode = hashFunction.hashString(inputFile.getAbsolutePath(), Charsets.UTF_16LE);
String name = Files.getNameWithoutExtension(inputFile.getName());
if (name.equals("classes") && inputFile.getAbsolutePath().contains("exploded-aar")) {
// This naming scheme is coming from DependencyManager#computeArtifactPath.
File versionDir = inputFile.getParentFile().getParentFile();
File artifactDir = versionDir.getParentFile();
File groupDir = artifactDir.getParentFile();
name = Joiner.on('-').join(groupDir.getName(), artifactDir.getName(), versionDir.getName());
}
name = name + "_" + hashCode.toString();
return name;
}
use of com.google.common.hash.HashCode in project bazel by bazelbuild.
the class WorkerSpawnStrategy method actuallyExec.
private void actuallyExec(Spawn spawn, ActionExecutionContext actionExecutionContext, AtomicReference<Class<? extends SpawnActionContext>> writeOutputFiles) throws ExecException, InterruptedException {
Executor executor = actionExecutionContext.getExecutor();
EventHandler eventHandler = executor.getEventHandler();
if (executor.reportsSubcommands()) {
executor.reportSubcommand(spawn);
}
// We assume that the spawn to be executed always gets at least one @flagfile.txt or
// --flagfile=flagfile.txt argument, which contains the flags related to the work itself (as
// opposed to start-up options for the executed tool). Thus, we can extract those elements from
// its args and put them into the WorkRequest instead.
List<String> flagfiles = new ArrayList<>();
List<String> startupArgs = new ArrayList<>();
for (String arg : spawn.getArguments()) {
if (FLAG_FILE_PATTERN.matcher(arg).matches()) {
flagfiles.add(arg);
} else {
startupArgs.add(arg);
}
}
if (flagfiles.isEmpty()) {
throw new UserExecException(String.format(ERROR_MESSAGE_PREFIX + REASON_NO_FLAGFILE, spawn.getMnemonic()));
}
if (Iterables.isEmpty(spawn.getToolFiles())) {
throw new UserExecException(String.format(ERROR_MESSAGE_PREFIX + REASON_NO_TOOLS, spawn.getMnemonic()));
}
FileOutErr outErr = actionExecutionContext.getFileOutErr();
ImmutableList<String> args = ImmutableList.<String>builder().addAll(startupArgs).add("--persistent_worker").addAll(MoreObjects.firstNonNull(extraFlags.get(spawn.getMnemonic()), ImmutableList.<String>of())).build();
ImmutableMap<String, String> env = spawn.getEnvironment();
try {
ActionInputFileCache inputFileCache = actionExecutionContext.getActionInputFileCache();
HashCode workerFilesHash = WorkerFilesHash.getWorkerFilesHash(spawn.getToolFiles(), actionExecutionContext);
Map<PathFragment, Path> inputFiles = new SpawnHelpers(execRoot).getMounts(spawn, actionExecutionContext);
Set<PathFragment> outputFiles = SandboxHelpers.getOutputFiles(spawn);
WorkerKey key = new WorkerKey(args, env, execRoot, spawn.getMnemonic(), workerFilesHash, inputFiles, outputFiles, writeOutputFiles != null);
WorkRequest.Builder requestBuilder = WorkRequest.newBuilder();
for (String flagfile : flagfiles) {
expandArgument(requestBuilder, flagfile);
}
List<ActionInput> inputs = ActionInputHelper.expandArtifacts(spawn.getInputFiles(), actionExecutionContext.getArtifactExpander());
for (ActionInput input : inputs) {
byte[] digestBytes = inputFileCache.getDigest(input);
ByteString digest;
if (digestBytes == null) {
digest = ByteString.EMPTY;
} else {
digest = ByteString.copyFromUtf8(HashCode.fromBytes(digestBytes).toString());
}
requestBuilder.addInputsBuilder().setPath(input.getExecPathString()).setDigest(digest).build();
}
WorkResponse response = execInWorker(eventHandler, key, requestBuilder.build(), maxRetries, writeOutputFiles);
outErr.getErrorStream().write(response.getOutputBytes().toByteArray());
if (response.getExitCode() != 0) {
throw new UserExecException(String.format("Worker process sent response with exit code: %d.", response.getExitCode()));
}
} catch (IOException e) {
String message = CommandFailureUtils.describeCommandFailure(verboseFailures, spawn.getArguments(), env, execRoot.getPathString());
throw new UserExecException(message, e);
}
}
use of com.google.common.hash.HashCode in project che by eclipse.
the class ETagResponseFilter method doFilter.
/**
* Filter the given container response
*
* @param containerResponse
* the response to use
*/
public void doFilter(GenericContainerResponse containerResponse) {
// get entity of the response
Object entity = containerResponse.getEntity();
// no entity, skip
if (entity == null) {
return;
}
// Only handle JSON content
if (!MediaType.APPLICATION_JSON_TYPE.equals(containerResponse.getContentType())) {
return;
}
// Get the request
ApplicationContext applicationContext = ApplicationContext.getCurrent();
Request request = applicationContext.getRequest();
// manage only GET requests
if (!HttpMethod.GET.equals(request.getMethod())) {
return;
}
// calculate hash with MD5
HashFunction hashFunction = Hashing.md5();
Hasher hasher = hashFunction.newHasher();
boolean hashingSuccess = true;
// Manage a list
if (entity instanceof List) {
List<?> entities = (List) entity;
for (Object simpleEntity : entities) {
hashingSuccess = addHash(simpleEntity, hasher);
if (!hashingSuccess) {
break;
}
}
} else {
hashingSuccess = addHash(entity, hasher);
}
// if we're able to handle the hash
if (hashingSuccess) {
// get result of the hash
HashCode hashCode = hasher.hash();
// Create the entity tag
EntityTag entityTag = new EntityTag(hashCode.toString());
// Check the etag
Response.ResponseBuilder builder = request.evaluatePreconditions(entityTag);
// not modified ?
if (builder != null) {
containerResponse.setResponse(builder.tag(entityTag).build());
} else {
// it has been changed, so send response with new ETag and entity
Response.ResponseBuilder responseBuilder = Response.fromResponse(containerResponse.getResponse()).tag(entityTag);
containerResponse.setResponse(responseBuilder.build());
}
}
}
use of com.google.common.hash.HashCode in project che by eclipse.
the class PBKDF2PasswordEncryptor method test.
@Override
public boolean test(String password, String encryptedPassword) {
requireNonNull(password, "Required non-null password");
requireNonNull(password, "Required non-null encrypted password");
final Matcher matcher = PWD_REGEX.matcher(encryptedPassword);
if (!matcher.matches()) {
return false;
}
// retrieve salt, password hash and iterations count from hash
final Integer iterations = tryParse(matcher.group("iterations"));
final String salt = matcher.group("saltHash");
final String pwdHash = matcher.group("pwdHash");
// compute password's hash and test whether it matches to given hash
final HashCode hash = computeHash(password.toCharArray(), HashCode.fromString(salt).asBytes(), iterations);
return hash.toString().equals(pwdHash);
}
Aggregations