use of com.android.builder.files.FileCacheByPath in project atlas by alibaba.
the class AtlasMergeJavaResourcesTransform method transform.
@Override
public void transform(TransformInvocation invocation) throws IOException {
waitableExecutor.execute(new Callable<Void>() {
@Override
public Void call() throws Exception {
cacheDir = new File(intermediateDir, "zip-cache");
FileUtils.mkdirs(cacheDir);
FileCacheByPath zipCache = new FileCacheByPath(cacheDir);
TransformOutputProvider outputProvider = invocation.getOutputProvider();
checkNotNull(outputProvider, "Missing output object for transform " + getName());
ParsedPackagingOptions packagingOptions = new ParsedPackagingOptions(AtlasMergeJavaResourcesTransform.this.packagingOptions);
boolean full = false;
IncrementalFileMergerState state = loadMergeState();
if (state == null || !invocation.isIncremental()) {
/*
* This is a full build.
*/
state = new IncrementalFileMergerState();
outputProvider.deleteAll();
full = true;
}
List<Runnable> cacheUpdates = new ArrayList<>();
Map<IncrementalFileMergerInput, QualifiedContent> contentMap = new HashMap<>();
List<IncrementalFileMergerInput> inputs = new ArrayList<>(AtlasIncrementalFileMergeTransformUtils.toInput(invocation, zipCache, cacheUpdates, full, contentMap, null, appVariantOutputContext.getVariantContext().getVariantName()));
/*
* In an ideal world, we could just send the inputs to the file merger. However, in the
* real world we live in, things are more complicated :)
*
* We need to:
*
* 1. We need to bring inputs that refer to the project scope before the other inputs.
* 2. Prefix libraries that come from directories with "lib/".
* 3. Filter all inputs to remove anything not accepted by acceptedPathsPredicate neither
* by packagingOptions.
*/
// Sort inputs to move project scopes to the start.
inputs.sort((i0, i1) -> {
int v0 = contentMap.get(i0).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
int v1 = contentMap.get(i1).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
return v0 - v1;
});
// Prefix libraries with "lib/" if we're doing libraries.
assert mergedType.size() == 1;
QualifiedContent.ContentType mergedType = AtlasMergeJavaResourcesTransform.this.mergedType.iterator().next();
if (mergedType == ExtendedContentType.NATIVE_LIBS) {
inputs = inputs.stream().map(i -> {
QualifiedContent qc = contentMap.get(i);
if (qc.getFile().isDirectory()) {
i = new RenameIncrementalFileMergerInput(i, s -> "lib/" + s, s -> s.substring("lib/".length()));
contentMap.put(i, qc);
}
return i;
}).collect(Collectors.toList());
}
// Filter inputs.
Predicate<String> inputFilter = acceptedPathsPredicate.and(path -> packagingOptions.getAction(path) != PackagingFileAction.EXCLUDE);
inputs = inputs.stream().map(i -> {
IncrementalFileMergerInput i2 = new FilterIncrementalFileMergerInput(i, inputFilter);
contentMap.put(i2, contentMap.get(i));
return i2;
}).collect(Collectors.toList());
/*
* Create the algorithm used by the merge transform. This algorithm decides on which
* algorithm to delegate to depending on the packaging option of the path. By default it
* requires just one file (no merging).
*/
StreamMergeAlgorithm mergeTransformAlgorithm = StreamMergeAlgorithms.select(path -> {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
switch(packagingAction) {
case EXCLUDE:
// Should have been excluded from the input.
throw new AssertionError();
case PICK_FIRST:
return StreamMergeAlgorithms.pickFirst();
case MERGE:
return StreamMergeAlgorithms.concat();
case NONE:
return StreamMergeAlgorithms.acceptOnlyOne();
default:
throw new AssertionError();
}
});
/*
* Create an output that uses the algorithm. This is not the final output because,
* unfortunately, we still have the complexity of the project scope overriding other scopes
* to solve.
*
* When resources inside a jar file are extracted to a directory, the results may not be
* expected on Windows if the file names end with "." (bug 65337573), or if there is an
* uppercase/lowercase conflict. To work around this issue, we copy these resources to a
* jar file.
*/
IncrementalFileMergerOutput baseOutput;
if (mergedType == QualifiedContent.DefaultContentType.RESOURCES) {
outputLocation = outputProvider.getContentLocation("resources", getOutputTypes(), getScopes(), Format.JAR);
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toZip(outputLocation));
AtlasBuildContext.atlasMainDexHelperMap.get(appVariantOutputContext.getVariantContext().getVariantName()).addMainJavaRes(outputLocation);
} else {
outputLocation = outputProvider.getContentLocation("resources", getOutputTypes(), getScopes(), Format.DIRECTORY);
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toDirectory(outputLocation));
}
/*
* We need a custom output to handle the case in which the same path appears in multiple
* inputs and the action is NONE, but only one input is actually PROJECT. In this specific
* case we will ignore all other inputs.
*/
Set<IncrementalFileMergerInput> projectInputs = contentMap.keySet().stream().filter(i -> contentMap.get(i).getScopes().contains(QualifiedContent.Scope.PROJECT)).collect(Collectors.toSet());
IncrementalFileMergerOutput output = new DelegateIncrementalFileMergerOutput(baseOutput) {
@Override
public void create(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
super.create(path, filter(path, inputs));
}
@Override
public void update(@NonNull String path, @NonNull List<String> prevInputNames, @NonNull List<IncrementalFileMergerInput> inputs) {
super.update(path, prevInputNames, filter(path, inputs));
}
@Override
public void remove(@NonNull String path) {
super.remove(path);
}
@NonNull
private ImmutableList<IncrementalFileMergerInput> filter(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
if (packagingAction == PackagingFileAction.NONE && inputs.stream().anyMatch(projectInputs::contains)) {
inputs = inputs.stream().filter(projectInputs::contains).collect(ImmutableCollectors.toImmutableList());
}
return ImmutableList.copyOf(inputs);
}
};
state = IncrementalFileMerger.merge(ImmutableList.copyOf(inputs), output, state);
saveMergeState(state);
cacheUpdates.forEach(Runnable::run);
return null;
}
});
for (AwbTransform awbTransform : appVariantOutputContext.getAwbTransformMap().values()) {
File awbCacheDir = new File(intermediateDir, "awb-zip-cache" + File.separator + awbTransform.getAwbBundle().getName());
waitableExecutor.execute(new Callable<Void>() {
@Override
public Void call() throws Exception {
FileUtils.mkdirs(awbCacheDir);
FileCacheByPath zipCache = new FileCacheByPath(awbCacheDir);
ParsedPackagingOptions packagingOptions = new ParsedPackagingOptions(AtlasMergeJavaResourcesTransform.this.packagingOptions);
boolean full = false;
IncrementalFileMergerState state = loadAwbMergeState(awbTransform.getAwbBundle());
if (state == null || !invocation.isIncremental()) {
/*
* This is a full build.
*/
state = new IncrementalFileMergerState();
if (appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle()).exists() && mergedType.contains(ExtendedContentType.NATIVE_LIBS)) {
FileUtils.deleteDirectoryContents(appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle()));
}
if (appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()).exists() && mergedType.contains(QualifiedContent.DefaultContentType.RESOURCES)) {
FileUtils.deleteDirectoryContents(appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()));
}
full = true;
}
List<Runnable> cacheUpdates = new ArrayList<>();
Map<IncrementalFileMergerInput, QualifiedContent> contentMap = new HashMap<>();
List<IncrementalFileMergerInput> inputs = new ArrayList<>(AtlasIncrementalFileMergeTransformUtils.toInput(invocation, zipCache, cacheUpdates, full, contentMap, awbTransform, appVariantOutputContext.getVariantContext().getVariantName()));
/*
* In an ideal world, we could just send the inputs to the file merger. However, in the
* real world we live in, things are more complicated :)
*
* We need to:
*
* 1. We need to bring inputs that refer to the project scope before the other inputs.
* 2. Prefix libraries that come from directories with "lib/".
* 3. Filter all inputs to remove anything not accepted by acceptedPathsPredicate neither
* by packagingOptions.
*/
// Sort inputs to move project scopes to the start.
inputs.sort((i0, i1) -> {
int v0 = contentMap.get(i0).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
int v1 = contentMap.get(i1).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
return v0 - v1;
});
// Prefix libraries with "lib/" if we're doing libraries.
assert mergedType.size() == 1;
QualifiedContent.ContentType mergedType = AtlasMergeJavaResourcesTransform.this.mergedType.iterator().next();
if (mergedType == ExtendedContentType.NATIVE_LIBS) {
inputs = inputs.stream().map(i -> {
QualifiedContent qc = contentMap.get(i);
if (qc.getFile().isDirectory()) {
i = new RenameIncrementalFileMergerInput(i, s -> "lib/" + s, s -> s.substring("lib/".length()));
contentMap.put(i, qc);
}
return i;
}).collect(Collectors.toList());
}
// Filter inputs.
Predicate<String> inputFilter = acceptedPathsPredicate.and(path -> packagingOptions.getAction(path) != PackagingFileAction.EXCLUDE);
inputs = inputs.stream().map(i -> {
IncrementalFileMergerInput i2 = new FilterIncrementalFileMergerInput(i, inputFilter);
contentMap.put(i2, contentMap.get(i));
return i2;
}).collect(Collectors.toList());
/*
* Create the algorithm used by the merge transform. This algorithm decides on which
* algorithm to delegate to depending on the packaging option of the path. By default it
* requires just one file (no merging).
*/
StreamMergeAlgorithm mergeTransformAlgorithm = StreamMergeAlgorithms.select(path -> {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
switch(packagingAction) {
case EXCLUDE:
// Should have been excluded from the input.
throw new AssertionError();
case PICK_FIRST:
return StreamMergeAlgorithms.pickFirst();
case MERGE:
return StreamMergeAlgorithms.concat();
case NONE:
return StreamMergeAlgorithms.acceptOnlyOne();
default:
throw new AssertionError();
}
});
/*
* Create an output that uses the algorithm. This is not the final output because,
* unfortunately, we still have the complexity of the project scope overriding other scopes
* to solve.
*
* When resources inside a jar file are extracted to a directory, the results may not be
* expected on Windows if the file names end with "." (bug 65337573), or if there is an
* uppercase/lowercase conflict. To work around this issue, we copy these resources to a
* jar file.
*/
IncrementalFileMergerOutput baseOutput;
if (mergedType == QualifiedContent.DefaultContentType.RESOURCES) {
File outputLocation = new File(appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()), "res.jar");
if (!appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()).exists()) {
appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()).mkdirs();
}
createEmptyZipFile(outputLocation);
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toZip(outputLocation));
} else {
File outputLocation = appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle());
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toDirectory(outputLocation));
}
/*
* We need a custom output to handle the case in which the same path appears in multiple
* inputs and the action is NONE, but only one input is actually PROJECT. In this specific
* case we will ignore all other inputs.
*/
Set<IncrementalFileMergerInput> projectInputs = contentMap.keySet().stream().filter(i -> contentMap.get(i).getScopes().contains(QualifiedContent.Scope.PROJECT)).collect(Collectors.toSet());
IncrementalFileMergerOutput output = new DelegateIncrementalFileMergerOutput(baseOutput) {
@Override
public void create(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
super.create(path, filter(path, inputs));
}
@Override
public void update(@NonNull String path, @NonNull List<String> prevInputNames, @NonNull List<IncrementalFileMergerInput> inputs) {
super.update(path, prevInputNames, filter(path, inputs));
}
@Override
public void remove(@NonNull String path) {
super.remove(path);
}
@NonNull
private ImmutableList<IncrementalFileMergerInput> filter(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
if (packagingAction == PackagingFileAction.NONE && inputs.stream().anyMatch(projectInputs::contains)) {
inputs = inputs.stream().filter(projectInputs::contains).collect(ImmutableCollectors.toImmutableList());
}
return ImmutableList.copyOf(inputs);
}
};
state = IncrementalFileMerger.merge(ImmutableList.copyOf(inputs), output, state);
saveAwbMergeState(state, awbTransform.getAwbBundle());
cacheUpdates.forEach(Runnable::run);
return null;
}
});
}
try {
waitableExecutor.waitForTasksWithQuickFail(false);
} catch (InterruptedException e) {
e.printStackTrace();
}
appVariantOutputContext.getAwbTransformMap().values().stream().forEach(awbTransform -> {
if (awbTransform.getAwbBundle().isMBundle) {
if (mergedType.contains(ExtendedContentType.NATIVE_LIBS)) {
File bundleOutputLocation = appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle());
if (bundleOutputLocation.exists()) {
try {
org.apache.commons.io.FileUtils.copyDirectory(bundleOutputLocation, outputLocation);
org.apache.commons.io.FileUtils.deleteDirectory(bundleOutputLocation);
} catch (IOException e) {
e.printStackTrace();
}
}
} else {
File bundleOutputLocation = new File(appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()), "res.jar");
File tempDir = new File(outputLocation.getParentFile(), "unzip");
try {
if (bundleOutputLocation.exists() && ZipUtils.isZipFile(bundleOutputLocation)) {
BetterZip.unzipDirectory(bundleOutputLocation, tempDir);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
if (!mergedType.contains(ExtendedContentType.NATIVE_LIBS)) {
File tempDir = new File(outputLocation.getParentFile(), "unzip");
if (outputLocation != null && outputLocation.exists() && ZipUtils.isZipFile(outputLocation)) {
BetterZip.unzipDirectory(outputLocation, tempDir);
}
if (tempDir.exists() && tempDir.listFiles() != null) {
FileUtils.deleteIfExists(outputLocation);
BetterZip.zipDirectory(tempDir, outputLocation);
}
}
paths.parallelStream().forEach(s -> processAtlasNativeSo(s));
}
use of com.android.builder.files.FileCacheByPath in project atlas by alibaba.
the class AwbApkPackageTask method splitIncrementalAction.
private File splitIncrementalAction(ApkData apkData, @Nullable File processedResources, Map<File, FileStatus> changedInputs) throws IOException {
Set<File> androidResources = getAndroidResources(apkData, processedResources);
File incrementalDirForSplit = new File(getIncrementalFolder(awbBundle), apkData.getFullName());
File cacheByPathDir = new File(incrementalDirForSplit, ZIP_DIFF_CACHE_DIR);
if (!cacheByPathDir.exists()) {
FileUtils.mkdirs(cacheByPathDir);
}
FileCacheByPath cacheByPath = new FileCacheByPath(cacheByPathDir);
KnownFilesSaveData saveData = KnownFilesSaveData.make(incrementalDirForSplit);
final Set<File> assetsFiles = assets.getFiles();
Set<Runnable> cacheUpdates = new HashSet<>();
ImmutableMap<RelativeFile, FileStatus> changedDexFiles = KnownFilesSaveData.getChangedInputs(changedInputs, saveData, KnownFilesSaveData.InputSet.DEX, dexFolders.getFiles(), cacheByPath, cacheUpdates);
ImmutableMap<RelativeFile, FileStatus> changedJavaResources;
try {
changedJavaResources = KnownFilesSaveData.getChangedInputs(changedInputs, saveData, KnownFilesSaveData.InputSet.JAVA_RESOURCE, javaResouresFiles.getFiles(), cacheByPath, cacheUpdates);
} catch (Zip64NotSupportedException e) {
// copy all changedInputs into a smaller jar and rerun.
ImmutableMap.Builder<File, FileStatus> copiedInputs = ImmutableMap.builder();
for (Map.Entry<File, FileStatus> fileFileStatusEntry : changedInputs.entrySet()) {
copiedInputs.put(copyJavaResourcesOnly(getIncrementalFolder(awbBundle), fileFileStatusEntry.getKey()), fileFileStatusEntry.getValue());
}
changedJavaResources = KnownFilesSaveData.getChangedInputs(copiedInputs.build(), saveData, KnownFilesSaveData.InputSet.JAVA_RESOURCE, javaResouresFiles.getFiles(), cacheByPath, cacheUpdates);
}
ImmutableMap<RelativeFile, FileStatus> changedAssets = KnownFilesSaveData.getChangedInputs(changedInputs, saveData, KnownFilesSaveData.InputSet.ASSET, assetsFiles, cacheByPath, cacheUpdates);
ImmutableMap<RelativeFile, FileStatus> changedAndroidResources = KnownFilesSaveData.getChangedInputs(changedInputs, saveData, KnownFilesSaveData.InputSet.ANDROID_RESOURCE, androidResources, cacheByPath, cacheUpdates);
ImmutableMap<RelativeFile, FileStatus> changedNLibs = KnownFilesSaveData.getChangedInputs(changedInputs, saveData, KnownFilesSaveData.InputSet.NATIVE_RESOURCE, jniFolders.getFiles(), cacheByPath, cacheUpdates);
File outputFile = getOutputFile(awbBundle);
Collection<BuildOutput> manifestOutputs = BuildOutputs.load(TaskOutputHolder.TaskOutputType.MERGED_MANIFESTS, awbManifestFolder.getSingleFile().getParentFile());
doTask(apkData, incrementalDirForSplit, outputFile, cacheByPath, manifestOutputs, changedDexFiles, changedJavaResources, changedAssets, changedAndroidResources, changedNLibs);
/*
* Update the cache
*/
cacheUpdates.forEach(Runnable::run);
/*
* Update the save data keep files.
*/
ImmutableMap<RelativeFile, FileStatus> allDex = IncrementalRelativeFileSets.fromZipsAndDirectories(dexFolders);
ImmutableMap<RelativeFile, FileStatus> allJavaResources = IncrementalRelativeFileSets.fromZipsAndDirectories(javaResouresFiles);
ImmutableMap<RelativeFile, FileStatus> allAssets = IncrementalRelativeFileSets.fromZipsAndDirectories(assetsFiles);
ImmutableMap<RelativeFile, FileStatus> allAndroidResources = IncrementalRelativeFileSets.fromZipsAndDirectories(androidResources);
ImmutableMap<RelativeFile, FileStatus> allJniResources = IncrementalRelativeFileSets.fromZipsAndDirectories(jniFolders);
saveData.setInputSet(allDex.keySet(), KnownFilesSaveData.InputSet.DEX);
saveData.setInputSet(allJavaResources.keySet(), KnownFilesSaveData.InputSet.JAVA_RESOURCE);
saveData.setInputSet(allAssets.keySet(), KnownFilesSaveData.InputSet.ASSET);
saveData.setInputSet(allAndroidResources.keySet(), KnownFilesSaveData.InputSet.ANDROID_RESOURCE);
saveData.setInputSet(allJniResources.keySet(), KnownFilesSaveData.InputSet.NATIVE_RESOURCE);
saveData.saveCurrentData();
return outputFile;
}
use of com.android.builder.files.FileCacheByPath in project atlas by alibaba.
the class AwbApkPackageTask method splitFullAction.
public File splitFullAction(@NonNull ApkData apkData, @Nullable File processedResources) throws IOException {
File incrementalDirForSplit = new File(getIncrementalFolder(awbBundle), apkData.getFullName());
/*
* Clear the intermediate build directory. We don't know if anything is in there and
* since this is a full build, we don't want to get any interference from previous state.
*/
if (incrementalDirForSplit.exists()) {
FileUtils.deleteDirectoryContents(incrementalDirForSplit);
} else {
FileUtils.mkdirs(incrementalDirForSplit);
}
File cacheByPathDir = new File(incrementalDirForSplit, ZIP_DIFF_CACHE_DIR);
FileUtils.mkdirs(cacheByPathDir);
FileCacheByPath cacheByPath = new FileCacheByPath(cacheByPathDir);
/*
* Clear the cache to make sure we have do not do an incremental build.
*/
cacheByPath.clear();
Set<File> androidResources = getAndroidResources(apkData, processedResources);
appVariantOutputContext.getVariantContext().getProject().getLogger().warn(awbBundle.getName() + " androidResources File:" + androidResources.iterator().next().getAbsolutePath());
FileUtils.mkdirs(outputDirectory);
File outputFile = getOutputFile(awbBundle);
/*
* Additionally, make sure we have no previous package, if it exists.
*/
FileUtils.deleteIfExists(outputFile);
ImmutableMap<RelativeFile, FileStatus> updatedDex = IncrementalRelativeFileSets.fromZipsAndDirectories(dexFolders);
ImmutableMap<RelativeFile, FileStatus> updatedJavaResources = getJavaResourcesChanges();
ImmutableMap<RelativeFile, FileStatus> updatedAssets = IncrementalRelativeFileSets.fromZipsAndDirectories(assets.getFiles());
ImmutableMap<RelativeFile, FileStatus> updatedAndroidResources = IncrementalRelativeFileSets.fromZipsAndDirectories(androidResources);
ImmutableMap<RelativeFile, FileStatus> updatedJniResources = IncrementalRelativeFileSets.fromZipsAndDirectories(jniFolders);
Collection<BuildOutput> manifestOutputs = BuildOutputs.load(TaskOutputHolder.TaskOutputType.MERGED_MANIFESTS, awbManifestFolder.getSingleFile().getParentFile());
doTask(apkData, incrementalDirForSplit, outputFile, cacheByPath, manifestOutputs, updatedDex, updatedJavaResources, updatedAssets, updatedAndroidResources, updatedJniResources);
/*
* Update the known files.
*/
KnownFilesSaveData saveData = KnownFilesSaveData.make(incrementalDirForSplit);
saveData.setInputSet(updatedDex.keySet(), KnownFilesSaveData.InputSet.DEX);
saveData.setInputSet(updatedJavaResources.keySet(), KnownFilesSaveData.InputSet.JAVA_RESOURCE);
saveData.setInputSet(updatedAssets.keySet(), KnownFilesSaveData.InputSet.ASSET);
saveData.setInputSet(updatedAndroidResources.keySet(), KnownFilesSaveData.InputSet.ANDROID_RESOURCE);
saveData.setInputSet(updatedJniResources.keySet(), KnownFilesSaveData.InputSet.NATIVE_RESOURCE);
saveData.saveCurrentData();
File file;
String outputFileName = outputFile.getName();
file = getAwbPackageOutputFile(appVariantOutputContext.getVariantContext(), outputFileName);
FileUtils.copyFileToDirectory(outputFile, file.getParentFile());
return new File(file.getParentFile(), outputFileName);
}
Aggregations