use of com.android.build.gradle.internal.api.AwbTransform in project atlas by alibaba.
the class AtlasMergeJavaResourcesTransform method transform.
@Override
public void transform(TransformInvocation invocation) throws IOException {
waitableExecutor.execute(new Callable<Void>() {
@Override
public Void call() throws Exception {
cacheDir = new File(intermediateDir, "zip-cache");
FileUtils.mkdirs(cacheDir);
FileCacheByPath zipCache = new FileCacheByPath(cacheDir);
TransformOutputProvider outputProvider = invocation.getOutputProvider();
checkNotNull(outputProvider, "Missing output object for transform " + getName());
ParsedPackagingOptions packagingOptions = new ParsedPackagingOptions(AtlasMergeJavaResourcesTransform.this.packagingOptions);
boolean full = false;
IncrementalFileMergerState state = loadMergeState();
if (state == null || !invocation.isIncremental()) {
/*
* This is a full build.
*/
state = new IncrementalFileMergerState();
outputProvider.deleteAll();
full = true;
}
List<Runnable> cacheUpdates = new ArrayList<>();
Map<IncrementalFileMergerInput, QualifiedContent> contentMap = new HashMap<>();
List<IncrementalFileMergerInput> inputs = new ArrayList<>(AtlasIncrementalFileMergeTransformUtils.toInput(invocation, zipCache, cacheUpdates, full, contentMap, null, appVariantOutputContext.getVariantContext().getVariantName()));
/*
* In an ideal world, we could just send the inputs to the file merger. However, in the
* real world we live in, things are more complicated :)
*
* We need to:
*
* 1. We need to bring inputs that refer to the project scope before the other inputs.
* 2. Prefix libraries that come from directories with "lib/".
* 3. Filter all inputs to remove anything not accepted by acceptedPathsPredicate neither
* by packagingOptions.
*/
// Sort inputs to move project scopes to the start.
inputs.sort((i0, i1) -> {
int v0 = contentMap.get(i0).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
int v1 = contentMap.get(i1).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
return v0 - v1;
});
// Prefix libraries with "lib/" if we're doing libraries.
assert mergedType.size() == 1;
QualifiedContent.ContentType mergedType = AtlasMergeJavaResourcesTransform.this.mergedType.iterator().next();
if (mergedType == ExtendedContentType.NATIVE_LIBS) {
inputs = inputs.stream().map(i -> {
QualifiedContent qc = contentMap.get(i);
if (qc.getFile().isDirectory()) {
i = new RenameIncrementalFileMergerInput(i, s -> "lib/" + s, s -> s.substring("lib/".length()));
contentMap.put(i, qc);
}
return i;
}).collect(Collectors.toList());
}
// Filter inputs.
Predicate<String> inputFilter = acceptedPathsPredicate.and(path -> packagingOptions.getAction(path) != PackagingFileAction.EXCLUDE);
inputs = inputs.stream().map(i -> {
IncrementalFileMergerInput i2 = new FilterIncrementalFileMergerInput(i, inputFilter);
contentMap.put(i2, contentMap.get(i));
return i2;
}).collect(Collectors.toList());
/*
* Create the algorithm used by the merge transform. This algorithm decides on which
* algorithm to delegate to depending on the packaging option of the path. By default it
* requires just one file (no merging).
*/
StreamMergeAlgorithm mergeTransformAlgorithm = StreamMergeAlgorithms.select(path -> {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
switch(packagingAction) {
case EXCLUDE:
// Should have been excluded from the input.
throw new AssertionError();
case PICK_FIRST:
return StreamMergeAlgorithms.pickFirst();
case MERGE:
return StreamMergeAlgorithms.concat();
case NONE:
return StreamMergeAlgorithms.acceptOnlyOne();
default:
throw new AssertionError();
}
});
/*
* Create an output that uses the algorithm. This is not the final output because,
* unfortunately, we still have the complexity of the project scope overriding other scopes
* to solve.
*
* When resources inside a jar file are extracted to a directory, the results may not be
* expected on Windows if the file names end with "." (bug 65337573), or if there is an
* uppercase/lowercase conflict. To work around this issue, we copy these resources to a
* jar file.
*/
IncrementalFileMergerOutput baseOutput;
if (mergedType == QualifiedContent.DefaultContentType.RESOURCES) {
outputLocation = outputProvider.getContentLocation("resources", getOutputTypes(), getScopes(), Format.JAR);
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toZip(outputLocation));
AtlasBuildContext.atlasMainDexHelperMap.get(appVariantOutputContext.getVariantContext().getVariantName()).addMainJavaRes(outputLocation);
} else {
outputLocation = outputProvider.getContentLocation("resources", getOutputTypes(), getScopes(), Format.DIRECTORY);
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toDirectory(outputLocation));
}
/*
* We need a custom output to handle the case in which the same path appears in multiple
* inputs and the action is NONE, but only one input is actually PROJECT. In this specific
* case we will ignore all other inputs.
*/
Set<IncrementalFileMergerInput> projectInputs = contentMap.keySet().stream().filter(i -> contentMap.get(i).getScopes().contains(QualifiedContent.Scope.PROJECT)).collect(Collectors.toSet());
IncrementalFileMergerOutput output = new DelegateIncrementalFileMergerOutput(baseOutput) {
@Override
public void create(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
super.create(path, filter(path, inputs));
}
@Override
public void update(@NonNull String path, @NonNull List<String> prevInputNames, @NonNull List<IncrementalFileMergerInput> inputs) {
super.update(path, prevInputNames, filter(path, inputs));
}
@Override
public void remove(@NonNull String path) {
super.remove(path);
}
@NonNull
private ImmutableList<IncrementalFileMergerInput> filter(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
if (packagingAction == PackagingFileAction.NONE && inputs.stream().anyMatch(projectInputs::contains)) {
inputs = inputs.stream().filter(projectInputs::contains).collect(ImmutableCollectors.toImmutableList());
}
return ImmutableList.copyOf(inputs);
}
};
state = IncrementalFileMerger.merge(ImmutableList.copyOf(inputs), output, state);
saveMergeState(state);
cacheUpdates.forEach(Runnable::run);
return null;
}
});
for (AwbTransform awbTransform : appVariantOutputContext.getAwbTransformMap().values()) {
File awbCacheDir = new File(intermediateDir, "awb-zip-cache" + File.separator + awbTransform.getAwbBundle().getName());
waitableExecutor.execute(new Callable<Void>() {
@Override
public Void call() throws Exception {
FileUtils.mkdirs(awbCacheDir);
FileCacheByPath zipCache = new FileCacheByPath(awbCacheDir);
ParsedPackagingOptions packagingOptions = new ParsedPackagingOptions(AtlasMergeJavaResourcesTransform.this.packagingOptions);
boolean full = false;
IncrementalFileMergerState state = loadAwbMergeState(awbTransform.getAwbBundle());
if (state == null || !invocation.isIncremental()) {
/*
* This is a full build.
*/
state = new IncrementalFileMergerState();
if (appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle()).exists() && mergedType.contains(ExtendedContentType.NATIVE_LIBS)) {
FileUtils.deleteDirectoryContents(appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle()));
}
if (appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()).exists() && mergedType.contains(QualifiedContent.DefaultContentType.RESOURCES)) {
FileUtils.deleteDirectoryContents(appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()));
}
full = true;
}
List<Runnable> cacheUpdates = new ArrayList<>();
Map<IncrementalFileMergerInput, QualifiedContent> contentMap = new HashMap<>();
List<IncrementalFileMergerInput> inputs = new ArrayList<>(AtlasIncrementalFileMergeTransformUtils.toInput(invocation, zipCache, cacheUpdates, full, contentMap, awbTransform, appVariantOutputContext.getVariantContext().getVariantName()));
/*
* In an ideal world, we could just send the inputs to the file merger. However, in the
* real world we live in, things are more complicated :)
*
* We need to:
*
* 1. We need to bring inputs that refer to the project scope before the other inputs.
* 2. Prefix libraries that come from directories with "lib/".
* 3. Filter all inputs to remove anything not accepted by acceptedPathsPredicate neither
* by packagingOptions.
*/
// Sort inputs to move project scopes to the start.
inputs.sort((i0, i1) -> {
int v0 = contentMap.get(i0).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
int v1 = contentMap.get(i1).getScopes().contains(QualifiedContent.Scope.PROJECT) ? 0 : 1;
return v0 - v1;
});
// Prefix libraries with "lib/" if we're doing libraries.
assert mergedType.size() == 1;
QualifiedContent.ContentType mergedType = AtlasMergeJavaResourcesTransform.this.mergedType.iterator().next();
if (mergedType == ExtendedContentType.NATIVE_LIBS) {
inputs = inputs.stream().map(i -> {
QualifiedContent qc = contentMap.get(i);
if (qc.getFile().isDirectory()) {
i = new RenameIncrementalFileMergerInput(i, s -> "lib/" + s, s -> s.substring("lib/".length()));
contentMap.put(i, qc);
}
return i;
}).collect(Collectors.toList());
}
// Filter inputs.
Predicate<String> inputFilter = acceptedPathsPredicate.and(path -> packagingOptions.getAction(path) != PackagingFileAction.EXCLUDE);
inputs = inputs.stream().map(i -> {
IncrementalFileMergerInput i2 = new FilterIncrementalFileMergerInput(i, inputFilter);
contentMap.put(i2, contentMap.get(i));
return i2;
}).collect(Collectors.toList());
/*
* Create the algorithm used by the merge transform. This algorithm decides on which
* algorithm to delegate to depending on the packaging option of the path. By default it
* requires just one file (no merging).
*/
StreamMergeAlgorithm mergeTransformAlgorithm = StreamMergeAlgorithms.select(path -> {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
switch(packagingAction) {
case EXCLUDE:
// Should have been excluded from the input.
throw new AssertionError();
case PICK_FIRST:
return StreamMergeAlgorithms.pickFirst();
case MERGE:
return StreamMergeAlgorithms.concat();
case NONE:
return StreamMergeAlgorithms.acceptOnlyOne();
default:
throw new AssertionError();
}
});
/*
* Create an output that uses the algorithm. This is not the final output because,
* unfortunately, we still have the complexity of the project scope overriding other scopes
* to solve.
*
* When resources inside a jar file are extracted to a directory, the results may not be
* expected on Windows if the file names end with "." (bug 65337573), or if there is an
* uppercase/lowercase conflict. To work around this issue, we copy these resources to a
* jar file.
*/
IncrementalFileMergerOutput baseOutput;
if (mergedType == QualifiedContent.DefaultContentType.RESOURCES) {
File outputLocation = new File(appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()), "res.jar");
if (!appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()).exists()) {
appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()).mkdirs();
}
createEmptyZipFile(outputLocation);
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toZip(outputLocation));
} else {
File outputLocation = appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle());
baseOutput = IncrementalFileMergerOutputs.fromAlgorithmAndWriter(mergeTransformAlgorithm, MergeOutputWriters.toDirectory(outputLocation));
}
/*
* We need a custom output to handle the case in which the same path appears in multiple
* inputs and the action is NONE, but only one input is actually PROJECT. In this specific
* case we will ignore all other inputs.
*/
Set<IncrementalFileMergerInput> projectInputs = contentMap.keySet().stream().filter(i -> contentMap.get(i).getScopes().contains(QualifiedContent.Scope.PROJECT)).collect(Collectors.toSet());
IncrementalFileMergerOutput output = new DelegateIncrementalFileMergerOutput(baseOutput) {
@Override
public void create(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
super.create(path, filter(path, inputs));
}
@Override
public void update(@NonNull String path, @NonNull List<String> prevInputNames, @NonNull List<IncrementalFileMergerInput> inputs) {
super.update(path, prevInputNames, filter(path, inputs));
}
@Override
public void remove(@NonNull String path) {
super.remove(path);
}
@NonNull
private ImmutableList<IncrementalFileMergerInput> filter(@NonNull String path, @NonNull List<IncrementalFileMergerInput> inputs) {
PackagingFileAction packagingAction = packagingOptions.getAction(path);
if (packagingAction == PackagingFileAction.NONE && inputs.stream().anyMatch(projectInputs::contains)) {
inputs = inputs.stream().filter(projectInputs::contains).collect(ImmutableCollectors.toImmutableList());
}
return ImmutableList.copyOf(inputs);
}
};
state = IncrementalFileMerger.merge(ImmutableList.copyOf(inputs), output, state);
saveAwbMergeState(state, awbTransform.getAwbBundle());
cacheUpdates.forEach(Runnable::run);
return null;
}
});
}
try {
waitableExecutor.waitForTasksWithQuickFail(false);
} catch (InterruptedException e) {
e.printStackTrace();
}
appVariantOutputContext.getAwbTransformMap().values().stream().forEach(awbTransform -> {
if (awbTransform.getAwbBundle().isMBundle) {
if (mergedType.contains(ExtendedContentType.NATIVE_LIBS)) {
File bundleOutputLocation = appVariantOutputContext.getAwbJniFolder(awbTransform.getAwbBundle());
if (bundleOutputLocation.exists()) {
try {
org.apache.commons.io.FileUtils.copyDirectory(bundleOutputLocation, outputLocation);
org.apache.commons.io.FileUtils.deleteDirectory(bundleOutputLocation);
} catch (IOException e) {
e.printStackTrace();
}
}
} else {
File bundleOutputLocation = new File(appVariantOutputContext.getAwbJavaResFolder(awbTransform.getAwbBundle()), "res.jar");
File tempDir = new File(outputLocation.getParentFile(), "unzip");
try {
if (bundleOutputLocation.exists() && ZipUtils.isZipFile(bundleOutputLocation)) {
BetterZip.unzipDirectory(bundleOutputLocation, tempDir);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
});
if (!mergedType.contains(ExtendedContentType.NATIVE_LIBS)) {
File tempDir = new File(outputLocation.getParentFile(), "unzip");
if (outputLocation != null && outputLocation.exists() && ZipUtils.isZipFile(outputLocation)) {
BetterZip.unzipDirectory(outputLocation, tempDir);
}
if (tempDir.exists() && tempDir.listFiles() != null) {
FileUtils.deleteIfExists(outputLocation);
BetterZip.zipDirectory(tempDir, outputLocation);
}
}
paths.parallelStream().forEach(s -> processAtlasNativeSo(s));
}
use of com.android.build.gradle.internal.api.AwbTransform in project atlas by alibaba.
the class AtlasProguardTransform method doMainBundleProguard.
private void doMainBundleProguard(TransformInvocation invocation) throws Exception {
// apply bundle Inout
Profiler.enter("bundleKeep");
File bundleKeep = AtlasProguardHelper.generateBundleKeepCfg(appVariantContext);
Profiler.release();
Input input = new Input();
AwbBundle awbBundle = new AwbBundle();
awbBundle.getAndroidLibraries().addAll(AtlasBuildContext.androidDependencyTrees.get(appVariantContext.getVariantName()).getMainBundle().getAndroidLibraries());
AwbTransform awbTransform = new AwbTransform(awbBundle);
input.getAwbBundles().add(awbTransform);
List<File> unProguardJars = new ArrayList<>();
// Enter the input
for (TransformInput transformInput : invocation.getInputs()) {
for (JarInput jarInput : transformInput.getJarInputs()) {
File file = jarInput.getFile();
if (file.getName().startsWith("combined-rmerge")) {
unProguardJars.add(file);
} else if (AtlasBuildContext.atlasMainDexHelperMap.get(appVariantContext.getVariantName()).inMainDex(jarInput)) {
awbTransform.getInputLibraries().add(file);
}
}
for (DirectoryInput directoryInput : transformInput.getDirectoryInputs()) {
if (AtlasBuildContext.atlasMainDexHelperMap.get(appVariantContext.getVariantName()).getInputDirs().contains(directoryInput.getFile())) {
awbTransform.getInputLibraries().add(directoryInput.getFile());
}
}
}
// inputting librarys
input.getLibraries().addAll(appVariantContext.getScope().getGlobalScope().getAndroidBuilder().getBootClasspath(true));
input.getLibraries().addAll(unProguardJars);
// The default proguard configuration
input.getDefaultProguardFiles().addAll(defaultProguardFiles);
// bundle keeps
input.getParentKeeps().add(bundleKeep);
File outFile = invocation.getOutputProvider().getContentLocation("main", getOutputTypes(), getScopes(), Format.JAR);
outFile.delete();
input.proguardOutputDir = invocation.getOutputProvider().getContentLocation("main", getOutputTypes(), getScopes(), Format.DIRECTORY);
input.printMapping = (File) ReflectUtils.getField(oldTransform, "printMapping");
input.dump = (File) ReflectUtils.getField(oldTransform, "dump");
input.printSeeds = (File) ReflectUtils.getField(oldTransform, "printSeeds");
input.printUsage = (File) ReflectUtils.getField(oldTransform, "printUsage");
input.printConfiguration = new File(appVariantContext.getProject().getBuildDir(), "outputs/proguard.cfg");
Profiler.enter("executeproguard");
BundleProguarder.execute(appVariantContext, input);
transformInput(input);
Profiler.release();
for (File jar : unProguardJars) {
File to = invocation.getOutputProvider().getContentLocation(FileNameUtils.getUniqueJarName(jar), getOutputTypes(), getScopes(), Format.JAR);
FileUtils.copyFile(jar, to);
}
OriginalStream originalStream = OriginalStream.builder(appVariantContext.getProject(), "proguard-classes").addContentTypes(com.android.build.gradle.internal.pipeline.TransformManager.CONTENT_CLASS).addScope(QualifiedContent.Scope.PROJECT).setFileCollection(appVariantContext.getProject().files(AtlasBuildContext.atlasMainDexHelperMap.get(appVariantContext.getVariantName()).getAllMainDexJars())).build();
if (nextTransformTask != null) {
Collection consumedInputStreams = (Collection) ReflectUtils.getField(nextTransformTask, "consumedInputStreams");
if (appVariantContext.getAtlasExtension().getTBuildConfig().isFastProguard() && consumedInputStreams != null) {
consumedInputStreams.add(originalStream);
}
}
}
use of com.android.build.gradle.internal.api.AwbTransform in project atlas by alibaba.
the class ResourcesShrinker method splitAction.
@Nullable
public File splitAction(@NonNull ApkData apkData, @Nullable File uncompressedResourceFile, TransformInvocation invocation, SplitList splitList) {
if (uncompressedResourceFile == null) {
return null;
}
List<File> classes = new ArrayList<>();
classes.addAll(AtlasBuildContext.atlasMainDexHelperMap.get(variantContext.getVariantName()).getInputDirs());
classes.addAll(AtlasBuildContext.atlasMainDexHelperMap.get(variantContext.getVariantName()).getAllMainDexJars());
AppVariantOutputContext appVariantOutputContext = variantContext.getAppVariantOutputContext(apkData);
for (AwbTransform awbTransform : appVariantOutputContext.getAwbTransformMap().values()) {
classes.addAll(awbTransform.getInputLibraries());
if (awbTransform.getInputDirs() != null && awbTransform.getInputDirs().size() > 0) {
classes.addAll(awbTransform.getInputDirs());
}
}
WaitableExecutor executor = WaitableExecutor.useGlobalSharedThreadPool();
Collection<BuildOutput> mergedManifests = BuildOutputs.load(ResourcesShrinker.this.mergedManifests);
BuildOutput mergedManifest = OutputScope.getOutput(mergedManifests, TaskOutputHolder.TaskOutputType.MERGED_MANIFESTS, apkData);
File mappingFile = mappingFileSrc != null ? mappingFileSrc.getSingleFile() : null;
ForkJoinTask<File> task = executor.execute(() -> {
File reportFile = null;
if (mappingFile != null) {
File logDir = mappingFile.getParentFile();
if (logDir != null) {
reportFile = new File(logDir, "resources.txt");
}
}
File compressedResourceFile = new File(compressedResources, "resources-" + apkData.getBaseName() + "-stripped.ap_");
FileUtils.mkdirs(compressedResourceFile.getParentFile());
if (mergedManifest == null) {
try {
FileUtils.copyFile(uncompressedResourceFile, compressedResourceFile);
} catch (IOException e) {
logger.error("Failed to copy uncompressed resource file :", e);
throw new RuntimeException("Failed to copy uncompressed resource file", e);
}
return compressedResourceFile;
}
// Analyze resources and usages and strip out unused
ResourceUsageAnalyzer analyzer = new ResourceUsageAnalyzer(sourceDir, classes, mergedManifest.getOutputFile(), mappingFile, resourceDir.getSingleFile(), reportFile);
try {
analyzer.setVerbose(logger.isEnabled(LogLevel.INFO));
analyzer.setDebug(logger.isEnabled(LogLevel.DEBUG));
analyzer.analyze();
// Just rewrite the .ap_ file to strip out the res/ files for unused resources
analyzer.rewriteResourceZip(uncompressedResourceFile, compressedResourceFile);
// Dump some stats
int unused = analyzer.getUnusedResourceCount();
if (unused > 0) {
StringBuilder sb = new StringBuilder(200);
sb.append("Removed unused resources");
// This is a bit misleading until we can strip out all resource types:
// int total = analyzer.getTotalResourceCount()
// sb.append("(" + unused + "/" + total + ")")
long before = uncompressedResourceFile.length();
long after = compressedResourceFile.length();
long percent = (int) ((before - after) * 100 / before);
sb.append(": Binary resource data reduced from ").append(toKbString(before)).append("KB to ").append(toKbString(after)).append("KB: Removed ").append(percent).append("%");
if (!ourWarned) {
ourWarned = true;
String name = variantData.getVariantConfiguration().getBuildType().getName();
sb.append("\n").append("Note: If necessary, you can disable resource shrinking by adding\n").append("android {\n").append(" buildTypes {\n").append(" ").append(name).append(" {\n").append(" shrinkResources false\n").append(" }\n").append(" }\n").append("}");
}
System.out.println(sb.toString());
}
} catch (Exception e) {
logger.quiet("Failed to shrink resources: ignoring", e);
} finally {
analyzer.dispose();
}
return compressedResourceFile;
});
for (AwbTransform awbTransform : appVariantOutputContext.getAwbTransformMap().values()) {
AwbBundle awbBundle = awbTransform.getAwbBundle();
File compressedBundleResourceFile = appVariantOutputContext.getAwbCompressResourcePackageOutputFile(awbBundle);
File unCompressedBundleResourceFile = appVariantOutputContext.getAwbProcessResourcePackageOutputFile(awbBundle);
File awbResDir = appVariantOutputContext.getAwbMergedResourceDir(variantContext.getVariantConfiguration(), awbBundle);
File reportFile = new File(uncompressedResourceFile.getParentFile(), "resources.txt");
File bundleSourceDir = appVariantOutputContext.getAwbRClassSourceOutputDir(variantContext.getVariantConfiguration(), awbBundle);
executor.execute(() -> {
ResourceUsageAnalyzer analyzer = new ResourceUsageAnalyzer(bundleSourceDir, classes, mergedManifest.getOutputFile(), mappingFile, awbResDir, reportFile);
try {
analyzer.setVerbose(logger.isEnabled(LogLevel.INFO));
analyzer.setDebug(logger.isEnabled(LogLevel.DEBUG));
analyzer.analyze();
// Just rewrite the .ap_ file to strip out the res/ files for unused resources
analyzer.rewriteResourceZip(unCompressedBundleResourceFile, compressedBundleResourceFile);
// Dump some stats
int unused = analyzer.getUnusedResourceCount();
if (unused > 0) {
StringBuilder sb = new StringBuilder(200);
sb.append("Removed awb bundle" + awbBundle.getName() + " unused resources");
// This is a bit misleading until we can strip out all resource types:
// int total = analyzer.getTotalResourceCount()
// sb.append("(" + unused + "/" + total + ")")
long before = unCompressedBundleResourceFile.length();
long after = compressedBundleResourceFile.length();
long percent = (int) ((before - after) * 100 / before);
sb.append(": Binary resource data reduced from ").append(toKbString(before)).append("KB to ").append(toKbString(after)).append("KB: Removed ").append(percent).append("%");
if (!ourWarned) {
ourWarned = true;
String name = variantData.getVariantConfiguration().getBuildType().getName();
sb.append("\n").append("Note: If necessary, you can disable resource shrinking by adding\n").append("android {\n").append(" buildTypes {\n").append(" ").append(name).append(" {\n").append(" shrinkResources false\n").append(" }\n").append(" }\n").append("}");
}
System.out.println(sb.toString());
}
} catch (Exception e) {
logger.quiet("Failed to shrink resources: ignoring", e);
} finally {
analyzer.dispose();
}
return compressedBundleResourceFile;
});
}
try {
List<WaitableExecutor.TaskResult<File>> taskResults = executor.waitForAllTasks();
taskResults.forEach(taskResult -> {
if (taskResult.getException() != null) {
throw new BuildException(taskResult.getException().getMessage(), taskResult.getException());
}
});
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
return task.join();
}
use of com.android.build.gradle.internal.api.AwbTransform in project atlas by alibaba.
the class AwbDexsMerger method merge.
@Override
public void merge(TransformInvocation transformInvocation) {
mainDexOut = getDexOutputLocation(transformInvocation.getOutputProvider(), "main", TransformManager.SCOPE_FULL_PROJECT);
if (!mainDexOut.exists() || !mainDexOut.isDirectory()) {
return;
}
atomicInteger.set(org.apache.commons.io.FileUtils.listFiles(mainDexOut, new String[] { "dex" }, true).size());
for (AwbTransform awbTransform : variantOutputContext.getAwbTransformMap().values()) {
merge(awbTransform.getAwbBundle());
}
}
use of com.android.build.gradle.internal.api.AwbTransform in project atlas by alibaba.
the class DexByteCodeConverterHook method convertByteCode.
// @Override
// public void runDexer(DexProcessBuilder builder, DexOptions dexOptions, ProcessOutputHandler processOutputHandler) throws ProcessException, IOException, InterruptedException {
// builder.addInputs(inputFile);
// super.runDexer(builder,dexOptions,processOutputHandler);
//
//
// }
@Override
public void convertByteCode(Collection<File> inputs, File outDexFolder, boolean multidex, final File mainDexList, DexOptions dexOptions, ProcessOutputHandler processOutputHandler, int minSdkVersion) throws IOException, InterruptedException, ProcessException {
logger.warning("outDexFolder:" + outDexFolder.getAbsolutePath());
FileUtils.forceMkdir(outDexFolder);
// outDexFolder.mkdirs();
AtlasDependencyTree atlasDependencyTree = AtlasBuildContext.androidDependencyTrees.get(variantContext.getVariantName());
if (null != atlasDependencyTree) {
ProcessOutputHandler outputHandler = new ParsingProcessOutputHandler(new ToolOutputParser(new DexParser(), Message.Kind.ERROR, LoggerWrapper.getLogger(DexByteCodeConverterHook.class)), new ToolOutputParser(new DexParser(), LoggerWrapper.getLogger(DexByteCodeConverterHook.class)), AtlasBuildContext.androidBuilderMap.get(variantContext.getProject()).getErrorReporter());
for (final AwbBundle awbBundle : atlasDependencyTree.getAwbBundles()) {
waitableExecutor.execute((Callable<Void>) () -> {
try {
long start = System.currentTimeMillis();
// create dex
File dexOutputFile = ((AppVariantContext) variantContext).getAwbDexOutput(awbBundle.getName());
if (dexOutputFile.exists()) {
FileUtils.cleanDirectory(dexOutputFile);
} else {
FileUtils.forceMkdir(dexOutputFile);
}
// if some of our .jar input files exist, just reset the inputDir to null
AwbTransform awbTransform = variantOutputContext.getAwbTransformMap().get(awbBundle.getName());
List<File> inputFiles = new ArrayList<File>();
inputFiles.addAll(awbTransform.getInputFiles());
inputFiles.addAll(awbTransform.getInputLibraries());
if (null != awbTransform.getInputDirs()) {
inputFiles.addAll(awbTransform.getInputDirs());
}
if (variantContext.getScope().getDexer() == DexerTool.DX) {
AtlasBuildContext.androidBuilderMap.get(variantContext.getProject()).convertByteCode(inputFiles, dexOutputFile, false, null, dexOptions, outputHandler, true);
} else if (variantContext.getScope().getDexer() == DexerTool.D8) {
new AtlasD8Creator(inputFiles, ((AppVariantContext) variantContext).getAwbDexAchiveOutput(awbBundle), multidex, mainDexList, dexOptions, minSdkVersion, fileCache, processOutputHandler, variantContext, variantOutputContext).create(awbBundle);
}
if (awbBundle.isMBundle) {
mBundleSets.add(awbBundle);
}
} catch (Exception e) {
throw new ProcessException(awbBundle.getName(), e);
}
return null;
});
}
}
File tempDexFolder = null;
inputFile.addAll(AtlasBuildContext.atlasMainDexHelperMap.get(variantContext.getVariantName()).getAllMainDexJars());
inputFile.addAll(AtlasBuildContext.atlasMainDexHelperMap.get(variantContext.getVariantName()).getInputDirs());
logger.warning("maindex inputFile size :" + inputFile.size());
if (variantContext.getScope().getDexer() == DexerTool.D8) {
AtlasD8Creator atlasD8Creator = new AtlasD8Creator(inputs, ((AppVariantContext) variantContext).getMainDexAchive(), multidex, mainDexList, dexOptions, minSdkVersion, fileCache, processOutputHandler, variantContext, variantOutputContext);
atlasD8Creator.setMainDexOut(outDexFolder);
atlasD8Creator.create(new AwbBundle());
return;
}
initDexExecutorService(dexOptions);
if (!multidex) {
if (fileCache != null && globalCacheBuilder == null) {
globalCacheBuilder = new FileCache.Inputs.Builder(FileCache.Command.PREDEX_LIBRARY).putBoolean("multidex", false).putLong("minisdk", minSdkVersion).putString("dexoptions", dexOptions.getAdditionalParameters().toString()).putBoolean("jumbomode", dexOptions.getJumboMode()).putString("type", type);
inputFile = new ArrayList<>(inputFile);
Collections.sort((List<File>) inputFile);
FileCache.Inputs inputsKey = globalCacheBuilder.putString("md5", MD5Util.getFileMd5(inputFile)).build();
try {
fileCache.createFile(outDexFolder, inputsKey, () -> {
logger.warning("dex inputFile missCache: " + inputFile.toString());
outDexFolder.mkdirs();
DexByteCodeConverterHook.super.convertByteCode(inputFile, outDexFolder, multidex, mainDexList, dexOptions, processOutputHandler, minSdkVersion);
});
} catch (ExecutionException e) {
e.printStackTrace();
failures.add(e);
}
} else {
DexByteCodeConverterHook.super.convertByteCode(inputFile, outDexFolder, multidex, mainDexList, dexOptions, processOutputHandler, minSdkVersion);
}
// try {
// for (Future future : futureList) {
// future.get();
// }
// } catch (Exception e) {
// throw new ProcessException(e);
// }
} else {
if (mainDexList != null && !mainDexList.exists()) {
generateMainDexList(mainDexList);
}
tempDexFolder = variantOutputContext.getMainDexOutDir();
if (tempDexFolder.exists()) {
FileUtils.cleanDirectory(tempDexFolder);
}
File finalTempDexFolder = tempDexFolder;
if (fileCache != null && globalCacheBuilder == null) {
if (mainDexList != null) {
globalCacheBuilder = new FileCache.Inputs.Builder(FileCache.Command.PREDEX_LIBRARY).putBoolean("multidex", true).putFile("multidexlist", mainDexList, FileCache.FileProperties.HASH).putLong("minisdk", minSdkVersion).putString("dexoptions", dexOptions.getAdditionalParameters().toString()).putBoolean("jumbomode", dexOptions.getJumboMode()).putString("type", type);
} else {
globalCacheBuilder = new FileCache.Inputs.Builder(FileCache.Command.PREDEX_LIBRARY).putBoolean("multidex", true).putLong("minisdk", minSdkVersion).putString("dexoptions", dexOptions.getAdditionalParameters().toString()).putBoolean("jumbomode", dexOptions.getJumboMode()).putString("type", type);
}
}
if (inputFile.size() == 1) {
splitFile();
}
inputFile.parallelStream().forEach(file -> {
File outPutFolder = new File(finalTempDexFolder, FilenameUtils.getBaseName(file.getName()));
if (globalCacheBuilder != null && file.isFile()) {
FileCache.Inputs.Builder builder = copyOf(globalCacheBuilder);
FileCache.Inputs cacheInputs = null;
if (file.isFile()) {
cacheInputs = builder.putFile("hash", file, FileCache.FileProperties.HASH).build();
} else {
Collection<File> files = FileUtils.listFiles(file, new String[] { "class" }, true);
Collections.sort((List<File>) files);
cacheInputs = builder.putString("hash", MD5Util.getFileMd5(files)).build();
}
try {
fileCache.createFile(outPutFolder, cacheInputs, () -> {
logger.warning("maindex inputFile missCache:" + file.getAbsolutePath());
outPutFolder.mkdirs();
DexByteCodeConverterHook.super.convertByteCode(Arrays.asList(file), outPutFolder, true, mainDexList, dexOptions, processOutputHandler, minSdkVersion);
});
} catch (Exception e) {
failures.add(e);
e.printStackTrace();
}
} else {
logger.warning("maindex inputFile:" + file.getAbsolutePath());
outPutFolder.mkdirs();
try {
DexByteCodeConverterHook.super.convertByteCode(Arrays.asList(file), outPutFolder, true, mainDexList, dexOptions, processOutputHandler, minSdkVersion);
} catch (Exception e) {
e.printStackTrace();
failures.add(e);
}
}
});
if (failures.size() > 0) {
throw new ProcessException(failures.get(0));
}
// for (Future future : futureList) {
// try {
// future.get();
// } catch (ExecutionException e) {
// throw new ProcessException(e);
// }
// }
//
// inputFile.stream().parallel().forEach(new Consumer<File>() {
// @Override
// public void accept(File file) {
// fileCache.createFile()fileCacheMap.get(file)
// }
// });
Collection<File> dexFiles = FileUtils.listFiles(tempDexFolder, new String[] { "dex" }, true);
if (dexFiles != null) {
logger.warning("maindex outDexFiles size:" + dexFiles.size());
dexPaths = dexFiles.stream().map(file -> file.toPath()).collect(Collectors.toList());
}
mainforkJoinPool = new ForkJoinPool();
atlasDexArchiveMerger = new AtlasDexArchiveMerger(mainforkJoinPool);
if (!variantContext.getAtlasExtension().getTBuildConfig().getMergeBundlesDex()) {
try {
atlasDexArchiveMerger.mergeDexArchives(dexPaths, outDexFolder.toPath(), mainDexList == null ? null : mainDexList.toPath(), DexingType.LEGACY_MULTIDEX);
} catch (DexArchiveMergerException e) {
throw new ProcessException(e);
}
}
}
waitableExecutor.waitForTasksWithQuickFail(true);
atomicInteger.set(FileUtils.listFiles(outDexFolder, new String[] { "dex" }, true).size());
logger.warning("maindex final dexs size:" + atomicInteger.get());
for (AwbBundle bundle : mBundleSets) {
File awbDex = new File(((AppVariantContext) variantContext).getAwbDexOutput(bundle.getName()), "classes.dex");
if (awbDex.exists() && !variantContext.getAtlasExtension().getTBuildConfig().getMergeBundlesDex()) {
FileUtils.moveFile(awbDex, new File(outDexFolder, "classes" + atomicInteger.incrementAndGet() + ".dex"));
} else if (awbDex.exists() && variantContext.getAtlasExtension().getTBuildConfig().getMergeBundlesDex()) {
dexPaths.add(awbDex.toPath());
} else {
logger.warning(awbDex.getAbsoluteFile() + " is not exist!");
}
}
if (variantContext.getAtlasExtension().getTBuildConfig().getMergeBundlesDex()) {
try {
atlasDexArchiveMerger.mergeDexArchives(dexPaths, outDexFolder.toPath(), null, DexingType.LEGACY_MULTIDEX);
} catch (DexArchiveMergerException e) {
e.printStackTrace();
} finally {
}
}
if (tempDexFolder != null && tempDexFolder.exists()) {
FileUtils.deleteDirectory(tempDexFolder);
}
}
Aggregations