use of com.facebook.buck.util.concurrent.WeightedListeningExecutorService in project buck by facebook.
the class JavaBuildGraphProcessor method run.
/**
* Creates the appropriate target graph and other resources needed for the {@link Processor} and
* runs it. This method will take responsibility for cleaning up the executor service after it
* runs.
*/
static void run(final CommandRunnerParams params, final AbstractCommand command, final Processor processor) throws ExitCodeException, InterruptedException, IOException {
final ConcurrencyLimit concurrencyLimit = command.getConcurrencyLimit(params.getBuckConfig());
try (CommandThreadManager pool = new CommandThreadManager(command.getClass().getName(), concurrencyLimit)) {
Cell cell = params.getCell();
WeightedListeningExecutorService executorService = pool.getExecutor();
// Ideally, we should be able to construct the TargetGraph quickly assuming most of it is
// already in memory courtesy of buckd. Though we could make a performance optimization where
// we pass an option to buck.py that tells it to ignore reading the BUCK.autodeps files when
// parsing the BUCK files because we never need to consider the existing auto-generated deps
// when creating the new auto-generated deps. If we did so, we would have to make sure to keep
// the nodes for that version of the graph separate from the ones that are actually used for
// building.
TargetGraph graph;
try {
graph = params.getParser().buildTargetGraphForTargetNodeSpecs(params.getBuckEventBus(), cell, command.getEnableParserProfiling(), executorService, ImmutableList.of(TargetNodePredicateSpec.of(x -> true, BuildFileSpec.fromRecursivePath(Paths.get(""), cell.getRoot()))), /* ignoreBuckAutodepsFiles */
true).getTargetGraph();
} catch (BuildTargetException | BuildFileParseException e) {
params.getBuckEventBus().post(ConsoleEvent.severe(MoreExceptions.getHumanReadableOrLocalizedMessage(e)));
throw new ExitCodeException(1);
}
BuildRuleResolver buildRuleResolver = new BuildRuleResolver(graph, new DefaultTargetNodeToBuildRuleTransformer());
CachingBuildEngineBuckConfig cachingBuildEngineBuckConfig = params.getBuckConfig().getView(CachingBuildEngineBuckConfig.class);
LocalCachingBuildEngineDelegate cachingBuildEngineDelegate = new LocalCachingBuildEngineDelegate(params.getFileHashCache());
BuildEngine buildEngine = new CachingBuildEngine(cachingBuildEngineDelegate, executorService, executorService, new DefaultStepRunner(), CachingBuildEngine.BuildMode.SHALLOW, cachingBuildEngineBuckConfig.getBuildDepFiles(), cachingBuildEngineBuckConfig.getBuildMaxDepFileCacheEntries(), cachingBuildEngineBuckConfig.getBuildArtifactCacheSizeLimit(), params.getObjectMapper(), buildRuleResolver, cachingBuildEngineBuckConfig.getResourceAwareSchedulingInfo(), new RuleKeyFactoryManager(params.getBuckConfig().getKeySeed(), fs -> cachingBuildEngineDelegate.getFileHashCache(), buildRuleResolver, cachingBuildEngineBuckConfig.getBuildInputRuleKeyFileSizeLimit(), new DefaultRuleKeyCache<>()));
// Create a BuildEngine because we store symbol information as build artifacts.
BuckEventBus eventBus = params.getBuckEventBus();
ExecutionContext executionContext = ExecutionContext.builder().setConsole(params.getConsole()).setConcurrencyLimit(concurrencyLimit).setBuckEventBus(eventBus).setEnvironment(/* environment */
ImmutableMap.of()).setExecutors(ImmutableMap.<ExecutorPool, ListeningExecutorService>of(ExecutorPool.CPU, executorService)).setJavaPackageFinder(params.getJavaPackageFinder()).setObjectMapper(params.getObjectMapper()).setPlatform(params.getPlatform()).setCellPathResolver(params.getCell().getCellPathResolver()).build();
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(buildRuleResolver));
BuildEngineBuildContext buildContext = BuildEngineBuildContext.builder().setBuildContext(BuildContext.builder().setActionGraph(new ActionGraph(ImmutableList.of())).setSourcePathResolver(pathResolver).setJavaPackageFinder(executionContext.getJavaPackageFinder()).setEventBus(eventBus).build()).setClock(params.getClock()).setArtifactCache(params.getArtifactCacheFactory().newInstance()).setBuildId(eventBus.getBuildId()).setObjectMapper(params.getObjectMapper()).setEnvironment(executionContext.getEnvironment()).setKeepGoing(false).build();
// Traverse the TargetGraph to find all of the auto-generated dependencies.
JavaDepsFinder javaDepsFinder = JavaDepsFinder.createJavaDepsFinder(params.getBuckConfig(), params.getCell().getCellPathResolver(), params.getObjectMapper(), buildContext, executionContext, buildEngine);
processor.process(graph, javaDepsFinder, executorService);
}
}
use of com.facebook.buck.util.concurrent.WeightedListeningExecutorService in project buck by facebook.
the class CacheCommand method runWithoutHelp.
@Override
public int runWithoutHelp(CommandRunnerParams params) throws IOException, InterruptedException {
params.getBuckEventBus().post(ConsoleEvent.fine("cache command start"));
if (isNoCache()) {
params.getBuckEventBus().post(ConsoleEvent.severe("Caching is disabled."));
return 1;
}
List<String> arguments = getArguments();
if (arguments.isEmpty()) {
params.getBuckEventBus().post(ConsoleEvent.severe("No cache keys specified."));
return 1;
}
if (outputDir != null) {
outputPath = Optional.of(Paths.get(outputDir));
Files.createDirectories(outputPath.get());
}
ArtifactCache cache = params.getArtifactCacheFactory().newInstance();
List<RuleKey> ruleKeys = new ArrayList<>();
for (String hash : arguments) {
ruleKeys.add(new RuleKey(hash));
}
Path tmpDir = Files.createTempDirectory("buck-cache-command");
BuildEvent.Started started = BuildEvent.started(getArguments(), false);
List<ArtifactRunner> results = null;
try (CommandThreadManager pool = new CommandThreadManager("Build", getConcurrencyLimit(params.getBuckConfig()))) {
WeightedListeningExecutorService executor = pool.getExecutor();
fakeOutParseEvents(params.getBuckEventBus());
// Post the build started event, setting it to the Parser recorded start time if appropriate.
if (params.getParser().getParseStartTime().isPresent()) {
params.getBuckEventBus().post(started, params.getParser().getParseStartTime().get());
} else {
params.getBuckEventBus().post(started);
}
// Fetch all artifacts
List<ListenableFuture<ArtifactRunner>> futures = new ArrayList<>();
for (RuleKey ruleKey : ruleKeys) {
futures.add(executor.submit(new ArtifactRunner(ruleKey, tmpDir, cache)));
}
// Wait for all executions to complete or fail.
try {
results = Futures.allAsList(futures).get();
} catch (ExecutionException ex) {
params.getConsole().printBuildFailure("Failed");
ex.printStackTrace(params.getConsole().getStdErr());
}
}
int totalRuns = results.size();
String resultString = "";
int goodRuns = 0;
for (ArtifactRunner r : results) {
if (r.completed) {
goodRuns++;
}
resultString += r.resultString;
if (!outputPath.isPresent()) {
// legacy output
if (r.completed) {
params.getConsole().printSuccess(String.format("Successfully downloaded artifact with id %s at %s .", r.ruleKey, r.artifact));
} else {
params.getConsole().printErrorText(String.format("Failed to retrieve an artifact with id %s.", r.ruleKey));
}
}
}
int exitCode = (totalRuns == goodRuns) ? 0 : 1;
params.getBuckEventBus().post(BuildEvent.finished(started, exitCode));
if (outputPath.isPresent()) {
if (totalRuns == goodRuns) {
params.getConsole().printSuccess("Successfully downloaded all artifacts.");
} else {
params.getConsole().printErrorText(String.format("Downloaded %d of %d artifacts", goodRuns, totalRuns));
}
params.getConsole().getStdOut().println(resultString);
}
return exitCode;
}
Aggregations