use of com.google.common.collect.ImmutableSet in project intellij-community by JetBrains.
the class GithubTagListProvider method toGithubTagList.
@NotNull
private static ImmutableSet<GithubTagInfo> toGithubTagList(@NotNull JsonElement jsonElement) throws GeneratorException {
if (jsonElement instanceof JsonArray) {
JsonArray array = (JsonArray) jsonElement;
ImmutableSet.Builder<GithubTagInfo> tags = ImmutableSet.builder();
for (JsonElement element : array) {
if (element instanceof JsonObject) {
JsonObject obj = (JsonObject) element;
JsonElement nameElement = obj.get("name");
String name = null;
if (nameElement != null) {
name = nameElement.getAsString();
}
String zipball = null;
JsonElement zipballElement = obj.get("zipball_url");
if (zipballElement != null) {
zipball = zipballElement.getAsString();
}
if (name != null && zipball != null) {
tags.add(new GithubTagInfo(name, zipball));
}
} else {
throw new GeneratorException("Unexpected child element " + element.getClass().getName());
}
}
return tags.build();
} else {
throw new GeneratorException("jsonElement is expected be instance of " + JsonArray.class.getName());
}
}
use of com.google.common.collect.ImmutableSet in project cdap by caskdata.
the class ArtifactStoreTest method testConcurrentSnapshotWrite.
@Category(SlowTests.class)
@Test
public void testConcurrentSnapshotWrite() throws Exception {
// write parent
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, "content");
final ArtifactRange parentArtifacts = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0"));
// start up a bunch of threads that will try and write the same artifact at the same time
// only one of them should be able to write it
int numThreads = 20;
final Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "abc", "1.0.0-SNAPSHOT");
// use a barrier so they all try and write at the same time
final CyclicBarrier barrier = new CyclicBarrier(numThreads);
final CountDownLatch latch = new CountDownLatch(numThreads);
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
final String writer = String.valueOf(i);
executorService.execute(new Runnable() {
@Override
public void run() {
try {
barrier.await();
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + writer, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactId, meta, writer);
} catch (InterruptedException | BrokenBarrierException | ArtifactAlreadyExistsException | IOException e) {
// something went wrong, fail the test
throw new RuntimeException(e);
} catch (WriteConflictException e) {
// these are ok though unexpected (means couldn't write after a bunch of retries too)
} finally {
latch.countDown();
}
}
});
}
// wait for all writers to finish
latch.await();
// figure out which was the last writer by reading our data. all the writers should have been able to write,
// and they should have all overwritten each other in a consistent manner
ArtifactDetail detail = artifactStore.getArtifact(artifactId);
// figure out the winning writer from the plugin name, which is 'plugin<writer>'
String pluginName = detail.getMeta().getClasses().getPlugins().iterator().next().getName();
String winnerWriter = pluginName.substring("plugin".length());
ArtifactMeta expectedMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
assertEqual(artifactId, expectedMeta, winnerWriter, detail);
// check only 1 plugin remains and that its the correct one
Map<ArtifactDescriptor, Set<PluginClass>> pluginMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "plugin-type");
Map<ArtifactDescriptor, Set<PluginClass>> expected = Maps.newHashMap();
expected.put(detail.getDescriptor(), ImmutableSet.<PluginClass>of(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())));
Assert.assertEquals(expected, pluginMap);
}
use of com.google.common.collect.ImmutableSet in project GeoGig by boundlessgeo.
the class HttpMappedRemoteRepo method listRefs.
/**
* List the mapped versions of the remote's {@link Ref refs}. For example, if the remote ref
* points to commit A, the returned ref will point to the commit that A is mapped to.
*
* @param getHeads whether to return refs in the {@code refs/heads} namespace
* @param getTags whether to return refs in the {@code refs/tags} namespace
* @return an immutable set of refs from the remote
*/
@Override
public ImmutableSet<Ref> listRefs(boolean getHeads, boolean getTags) {
HttpURLConnection connection = null;
ImmutableSet.Builder<Ref> builder = new ImmutableSet.Builder<Ref>();
try {
String expanded = repositoryURL.toString() + "/repo/manifest";
connection = (HttpURLConnection) new URL(expanded).openConnection();
connection.setRequestMethod("GET");
connection.setUseCaches(false);
connection.setDoOutput(true);
// Get Response
InputStream is = connection.getInputStream();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
try {
while ((line = rd.readLine()) != null) {
if ((getHeads && line.startsWith("refs/heads")) || (getTags && line.startsWith("refs/tags"))) {
Ref remoteRef = HttpUtils.parseRef(line);
Ref newRef = remoteRef;
if (!(newRef instanceof SymRef) && localRepository.graphDatabase().exists(remoteRef.getObjectId())) {
ObjectId mappedCommit = localRepository.graphDatabase().getMapping(remoteRef.getObjectId());
if (mappedCommit != null) {
newRef = new Ref(remoteRef.getName(), mappedCommit);
}
}
builder.add(newRef);
}
}
} finally {
rd.close();
}
} catch (Exception e) {
throw Throwables.propagate(e);
} finally {
HttpUtils.consumeErrStreamAndCloseConnection(connection);
}
return builder.build();
}
use of com.google.common.collect.ImmutableSet in project GeoGig by boundlessgeo.
the class HttpRemoteRepo method listRefs.
/**
* List the remote's {@link Ref refs}.
*
* @param getHeads whether to return refs in the {@code refs/heads} namespace
* @param getTags whether to return refs in the {@code refs/tags} namespace
* @return an immutable set of refs from the remote
*/
@Override
public ImmutableSet<Ref> listRefs(final boolean getHeads, final boolean getTags) {
HttpURLConnection connection = null;
ImmutableSet.Builder<Ref> builder = new ImmutableSet.Builder<Ref>();
try {
String expanded = repositoryURL.toString() + "/repo/manifest";
connection = HttpUtils.connect(expanded);
// Get Response
InputStream is = HttpUtils.getResponseStream(connection);
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
try {
while ((line = rd.readLine()) != null) {
if ((getHeads && line.startsWith("refs/heads")) || (getTags && line.startsWith("refs/tags"))) {
builder.add(HttpUtils.parseRef(line));
}
}
} finally {
rd.close();
}
} catch (Exception e) {
throw Throwables.propagate(e);
} finally {
HttpUtils.consumeErrStreamAndCloseConnection(connection);
}
return builder.build();
}
use of com.google.common.collect.ImmutableSet in project GeoGig by boundlessgeo.
the class LocalMappedRemoteRepo method listRefs.
/**
* List the remote's {@link Ref refs}.
*
* @param getHeads whether to return refs in the {@code refs/heads} namespace
* @param getTags whether to return refs in the {@code refs/tags} namespace
* @return an immutable set of refs from the remote
*/
@Override
public ImmutableSet<Ref> listRefs(final boolean getHeads, final boolean getTags) {
Predicate<Ref> filter = new Predicate<Ref>() {
@Override
public boolean apply(Ref input) {
boolean keep = false;
if (getHeads) {
keep = input.getName().startsWith(Ref.HEADS_PREFIX);
}
if (getTags) {
keep = keep || input.getName().startsWith(Ref.TAGS_PREFIX);
}
return keep;
}
};
ImmutableSet<Ref> remoteRefs = remoteGeoGig.command(ForEachRef.class).setFilter(filter).call();
// Translate the refs to their mapped values.
ImmutableSet.Builder<Ref> builder = new ImmutableSet.Builder<Ref>();
for (Ref remoteRef : remoteRefs) {
Ref newRef = remoteRef;
if (!(newRef instanceof SymRef) && localRepository.graphDatabase().exists(remoteRef.getObjectId())) {
ObjectId mappedCommit = localRepository.graphDatabase().getMapping(remoteRef.getObjectId());
if (mappedCommit != null) {
newRef = new Ref(remoteRef.getName(), mappedCommit);
}
}
builder.add(newRef);
}
return builder.build();
}
Aggregations