use of com.google.common.base.Joiner in project intellij-community by JetBrains.
the class PyTestCase method compareStringSets.
/**
* Compares sets with string sorting them and displaying one-per-line to make comparision easier
*
* @param message message to display in case of error
* @param actual actual set
* @param expected expected set
*/
protected static void compareStringSets(@NotNull final String message, @NotNull final Set<String> actual, @NotNull final Set<String> expected) {
final Joiner joiner = Joiner.on("\n");
Assert.assertEquals(message, joiner.join(new TreeSet<>(actual)), joiner.join(new TreeSet<>(expected)));
}
use of com.google.common.base.Joiner in project smali by JesusFreke.
the class ClassPathResolver method loadLocalOrDeviceBootClassPathEntry.
private void loadLocalOrDeviceBootClassPathEntry(@Nonnull String entry) throws IOException, NoDexException, NotFoundException {
// first, see if the entry is a valid local path
if (loadLocalClassPathEntry(entry)) {
return;
}
// It's not a local path, so let's try to resolve it as a device path, relative to one of the provided
// directories
List<String> pathComponents = splitDevicePath(entry);
Joiner pathJoiner = Joiner.on(File.pathSeparatorChar);
for (String directory : classPathDirs) {
File directoryFile = new File(directory);
if (!directoryFile.exists()) {
continue;
}
for (int i = 0; i < pathComponents.size(); i++) {
String partialPath = pathJoiner.join(pathComponents.subList(i, pathComponents.size()));
File entryFile = new File(directoryFile, partialPath);
if (entryFile.exists() && entryFile.isFile()) {
loadEntry(entryFile, true);
return;
}
}
}
throw new NotFoundException("Could not find classpath entry %s", entry);
}
use of com.google.common.base.Joiner in project gerrit by GerritCodeReview.
the class ChangeUpdate method applyImpl.
@Override
protected CommitBuilder applyImpl(RevWalk rw, ObjectInserter ins, ObjectId curr) throws OrmException, IOException {
checkState(deleteCommentRewriter == null, "cannot update and rewrite ref in one BatchUpdate");
CommitBuilder cb = new CommitBuilder();
int ps = psId != null ? psId.get() : getChange().currentPatchSetId().get();
StringBuilder msg = new StringBuilder();
if (commitSubject != null) {
msg.append(commitSubject);
} else {
msg.append("Update patch set ").append(ps);
}
msg.append("\n\n");
if (changeMessage != null) {
msg.append(changeMessage);
msg.append("\n\n");
}
addPatchSetFooter(msg, ps);
if (currentPatchSet) {
addFooter(msg, FOOTER_CURRENT, Boolean.TRUE);
}
if (psDescription != null) {
addFooter(msg, FOOTER_PATCH_SET_DESCRIPTION, psDescription);
}
if (changeId != null) {
addFooter(msg, FOOTER_CHANGE_ID, changeId);
}
if (subject != null) {
addFooter(msg, FOOTER_SUBJECT, subject);
}
if (branch != null) {
addFooter(msg, FOOTER_BRANCH, branch);
}
if (status != null) {
addFooter(msg, FOOTER_STATUS, status.name().toLowerCase());
}
if (topic != null) {
addFooter(msg, FOOTER_TOPIC, topic);
}
if (commit != null) {
addFooter(msg, FOOTER_COMMIT, commit);
}
if (assignee != null) {
if (assignee.isPresent()) {
addFooter(msg, FOOTER_ASSIGNEE);
addIdent(msg, assignee.get()).append('\n');
} else {
addFooter(msg, FOOTER_ASSIGNEE).append('\n');
}
}
Joiner comma = Joiner.on(',');
if (hashtags != null) {
addFooter(msg, FOOTER_HASHTAGS, comma.join(hashtags));
}
if (tag != null) {
addFooter(msg, FOOTER_TAG, tag);
}
if (groups != null) {
addFooter(msg, FOOTER_GROUPS, comma.join(groups));
}
for (Map.Entry<Account.Id, ReviewerStateInternal> e : reviewers.entrySet()) {
addFooter(msg, e.getValue().getFooterKey());
addIdent(msg, e.getKey()).append('\n');
}
for (Map.Entry<Address, ReviewerStateInternal> e : reviewersByEmail.entrySet()) {
addFooter(msg, e.getValue().getByEmailFooterKey(), e.getKey().toString());
}
for (Table.Cell<String, Account.Id, Optional<Short>> c : approvals.cellSet()) {
addFooter(msg, FOOTER_LABEL);
// Label names/values are safe to append without sanitizing.
if (!c.getValue().isPresent()) {
msg.append('-').append(c.getRowKey());
} else {
msg.append(LabelVote.create(c.getRowKey(), c.getValue().get()).formatWithEquals());
}
Account.Id id = c.getColumnKey();
if (!id.equals(getAccountId())) {
addIdent(msg.append(' '), id);
}
msg.append('\n');
}
if (submissionId != null) {
addFooter(msg, FOOTER_SUBMISSION_ID, submissionId);
}
if (submitRecords != null) {
for (SubmitRecord rec : submitRecords) {
addFooter(msg, FOOTER_SUBMITTED_WITH).append(rec.status);
if (rec.errorMessage != null) {
msg.append(' ').append(sanitizeFooter(rec.errorMessage));
}
msg.append('\n');
if (rec.labels != null) {
for (SubmitRecord.Label label : rec.labels) {
// Label names/values are safe to append without sanitizing.
addFooter(msg, FOOTER_SUBMITTED_WITH).append(label.status).append(": ").append(label.label);
if (label.appliedBy != null) {
msg.append(": ");
addIdent(msg, label.appliedBy);
}
msg.append('\n');
}
}
}
}
if (!Objects.equals(accountId, realAccountId)) {
addFooter(msg, FOOTER_REAL_USER);
addIdent(msg, realAccountId).append('\n');
}
if (readOnlyUntil != null) {
addFooter(msg, FOOTER_READ_ONLY_UNTIL, ChangeNoteUtil.formatTime(serverIdent, readOnlyUntil));
}
if (isPrivate != null) {
addFooter(msg, FOOTER_PRIVATE, isPrivate);
}
if (workInProgress != null) {
addFooter(msg, FOOTER_WORK_IN_PROGRESS, workInProgress);
}
cb.setMessage(msg.toString());
try {
ObjectId treeId = storeRevisionNotes(rw, ins, curr);
if (treeId != null) {
cb.setTreeId(treeId);
}
} catch (ConfigInvalidException e) {
throw new OrmException(e);
}
return cb;
}
use of com.google.common.base.Joiner in project GeoGig by boundlessgeo.
the class Config method buildValueString.
/**
* Builds a single string out of all of the string parameters after the first one.
*
* @return the concatenated value string
*/
private String buildValueString() {
if (nameValuePair.isEmpty())
return null;
ArrayList<String> arrayCopy = new ArrayList<String>(nameValuePair);
// Remove name
arrayCopy.remove(0);
if (arrayCopy.isEmpty())
return null;
Joiner stringJoiner = Joiner.on(" ");
return stringJoiner.join(arrayCopy);
}
use of com.google.common.base.Joiner in project cdap by caskdata.
the class SparkRuntimeService method startUp.
@Override
protected void startUp() throws Exception {
// additional spark job initialization at run-time
// This context is for calling initialize and onFinish on the Spark program
// Fields injection for the Spark program
// It has to be done in here instead of in SparkProgramRunner for the @UseDataset injection
// since the dataset cache being used in Spark is a MultiThreadDatasetCache
// The AbstractExecutionThreadService guarantees that startUp(), run() and shutDown() all happens in the same thread
Reflections.visit(spark, spark.getClass(), new PropertyFieldSetter(runtimeContext.getSparkSpecification().getProperties()), new DataSetFieldSetter(runtimeContext.getDatasetCache()), new MetricsFieldSetter(runtimeContext));
// Creates a temporary directory locally for storing all generated files.
File tempDir = DirUtils.createTempDir(new File(cConf.get(Constants.CFG_LOCAL_DATA_DIR), cConf.get(Constants.AppFabric.TEMP_DIR)).getAbsoluteFile());
tempDir.mkdirs();
this.cleanupTask = createCleanupTask(tempDir, System.getProperties());
try {
initialize();
SparkRuntimeContextConfig contextConfig = new SparkRuntimeContextConfig(runtimeContext.getConfiguration());
final File jobJar = generateJobJar(tempDir, contextConfig.isLocal(), cConf);
final List<LocalizeResource> localizeResources = new ArrayList<>();
String metricsConfPath;
String classpath = "";
if (contextConfig.isLocal()) {
// In local mode, always copy (or link if local) user requested resources
copyUserResources(context.getLocalizeResources(), tempDir);
File metricsConf = SparkMetricsSink.writeConfig(new File(tempDir, CDAP_METRICS_PROPERTIES));
metricsConfPath = metricsConf.getAbsolutePath();
} else {
// Localize all user requested files in distributed mode
distributedUserResources(context.getLocalizeResources(), localizeResources);
// Localize program jar and the expanding program jar
File programJar = Locations.linkOrCopy(runtimeContext.getProgram().getJarLocation(), new File(tempDir, SparkRuntimeContextProvider.PROGRAM_JAR_NAME));
File expandedProgramJar = Locations.linkOrCopy(runtimeContext.getProgram().getJarLocation(), new File(tempDir, SparkRuntimeContextProvider.PROGRAM_JAR_EXPANDED_NAME));
// Localize both the unexpanded and expanded program jar
localizeResources.add(new LocalizeResource(programJar));
localizeResources.add(new LocalizeResource(expandedProgramJar, true));
// Localize plugins
if (pluginArchive != null) {
localizeResources.add(new LocalizeResource(pluginArchive, true));
}
// Create and localize the launcher jar, which is for setting up services and classloader for spark containers
localizeResources.add(new LocalizeResource(createLauncherJar(tempDir)));
// Create metrics conf file in the current directory since
// the same value for the "spark.metrics.conf" config needs to be used for both driver and executor processes
// Also localize the metrics conf file to the executor nodes
File metricsConf = SparkMetricsSink.writeConfig(new File(CDAP_METRICS_PROPERTIES));
metricsConfPath = metricsConf.getName();
localizeResources.add(new LocalizeResource(metricsConf));
// Localize the cConf file
localizeResources.add(new LocalizeResource(saveCConf(cConf, tempDir)));
// Preserves and localize runtime information in the hConf
Configuration hConf = contextConfig.set(runtimeContext, pluginArchive).getConfiguration();
localizeResources.add(new LocalizeResource(saveHConf(hConf, tempDir)));
// Joiner for creating classpath for spark containers
Joiner joiner = Joiner.on(File.pathSeparator).skipNulls();
// Localize the spark.jar archive, which contains all CDAP and dependency jars
File sparkJar = new File(tempDir, CDAP_SPARK_JAR);
classpath = joiner.join(Iterables.transform(buildDependencyJar(sparkJar), new Function<String, String>() {
@Override
public String apply(String name) {
return Paths.get("$PWD", CDAP_SPARK_JAR, name).toString();
}
}));
localizeResources.add(new LocalizeResource(sparkJar, true));
// Localize logback if there is one. It is placed at the beginning of the classpath
File logbackJar = ProgramRunners.createLogbackJar(new File(tempDir, "logback.xml.jar"));
if (logbackJar != null) {
localizeResources.add(new LocalizeResource(logbackJar));
classpath = joiner.join(Paths.get("$PWD", logbackJar.getName()), classpath);
}
// Localize extra jars and append to the end of the classpath
List<String> extraJars = new ArrayList<>();
for (URI jarURI : CConfigurationUtil.getExtraJars(cConf)) {
extraJars.add(Paths.get("$PWD", LocalizationUtils.getLocalizedName(jarURI)).toString());
localizeResources.add(new LocalizeResource(jarURI, false));
}
classpath = joiner.join(classpath, joiner.join(extraJars));
}
final Map<String, String> configs = createSubmitConfigs(tempDir, metricsConfPath, classpath, context.getLocalizeResources(), contextConfig.isLocal());
submitSpark = new Callable<ListenableFuture<RunId>>() {
@Override
public ListenableFuture<RunId> call() throws Exception {
// This happen when stop() was called whiling starting
if (!isRunning()) {
return immediateCancelledFuture();
}
return sparkSubmitter.submit(runtimeContext, configs, localizeResources, jobJar, runtimeContext.getRunId());
}
};
} catch (LinkageError e) {
// of the user program is missing dependencies (CDAP-2543)
throw new Exception(e.getMessage(), e);
} catch (Throwable t) {
cleanupTask.run();
throw t;
}
}
Aggregations