use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project cdap by caskdata.
the class SmartWorkflow method configure.
@Override
protected void configure() {
setName(NAME);
setDescription(DESCRIPTION);
// set the pipeline spec as a property in case somebody like the UI wants to read it
Map<String, String> properties = new HashMap<>();
properties.put(Constants.PIPELINE_SPEC_KEY, GSON.toJson(spec));
setProperties(properties);
stageSpecs = new HashMap<>();
useSpark = engine == Engine.SPARK;
for (StageSpec stageSpec : spec.getStages()) {
stageSpecs.put(stageSpec.getName(), stageSpec);
String pluginType = stageSpec.getPlugin().getType();
if (SparkCompute.PLUGIN_TYPE.equals(pluginType) || SparkSink.PLUGIN_TYPE.equals(pluginType)) {
useSpark = true;
}
}
PipelinePlanner planner;
Set<String> actionTypes = ImmutableSet.of(Action.PLUGIN_TYPE, Constants.SPARK_PROGRAM_PLUGIN_TYPE);
Set<String> multiPortTypes = ImmutableSet.of(SplitterTransform.PLUGIN_TYPE);
if (useSpark) {
// if the pipeline uses spark, we don't need to break the pipeline up into phases, we can just have
// a single phase.
planner = new PipelinePlanner(supportedPluginTypes, ImmutableSet.<String>of(), ImmutableSet.<String>of(), actionTypes, multiPortTypes);
} else {
planner = new PipelinePlanner(supportedPluginTypes, ImmutableSet.of(BatchAggregator.PLUGIN_TYPE, BatchJoiner.PLUGIN_TYPE), ImmutableSet.of(SparkCompute.PLUGIN_TYPE, SparkSink.PLUGIN_TYPE), actionTypes, multiPortTypes);
}
plan = planner.plan(spec);
WorkflowProgramAdder programAdder = new TrunkProgramAdder(getConfigurer());
// single phase, just add the program directly
if (plan.getPhases().size() == 1) {
addProgram(plan.getPhases().keySet().iterator().next(), programAdder);
return;
}
// Dag classes don't allow a 'dag' without connections
if (plan.getPhaseConnections().isEmpty()) {
WorkflowProgramAdder fork = programAdder.fork();
for (String phaseName : plan.getPhases().keySet()) {
addProgram(phaseName, fork);
}
fork.join();
return;
}
dag = new ControlDag(plan.getPhaseConnections());
boolean dummyNodeAdded = false;
Map<String, ConditionBranches> conditionBranches = plan.getConditionPhaseBranches();
if (conditionBranches.isEmpty()) {
// after flattening, there is guaranteed to be just one source
dag.flatten();
} else if (!conditionBranches.keySet().containsAll(dag.getSources())) {
// Continue only if the conditon node is not the source of the dag, otherwise dag is already in the
// required form
Set<String> conditions = conditionBranches.keySet();
// flatten only the part of the dag starting from sources and ending in conditions/sinks.
Set<String> dagNodes = dag.accessibleFrom(dag.getSources(), Sets.union(dag.getSinks(), conditions));
Set<String> dagNodesWithoutCondition = Sets.difference(dagNodes, conditions);
Set<Connection> connections = new HashSet<>();
Deque<String> bfs = new LinkedList<>();
Set<String> sinks = new HashSet<>();
// If its a single phase without condition then no need to flatten
if (dagNodesWithoutCondition.size() > 1) {
Dag subDag;
try {
subDag = dag.createSubDag(dagNodesWithoutCondition);
} catch (IllegalArgumentException | DisjointConnectionsException e) {
// DisjointConnectionsException thrown when islands are created from the dagNodesWithoutCondition
// IllegalArgumentException thrown when connections are empty
// In both cases we need to add dummy node and create connected Dag
String dummyNode = "dummy";
dummyNodeAdded = true;
Set<Connection> subDagConnections = new HashSet<>();
for (String source : dag.getSources()) {
subDagConnections.add(new Connection(dummyNode, source));
}
Deque<String> subDagBFS = new LinkedList<>();
subDagBFS.addAll(dag.getSources());
while (subDagBFS.peek() != null) {
String node = subDagBFS.poll();
for (String output : dag.getNodeOutputs(node)) {
if (dagNodesWithoutCondition.contains(output)) {
subDagConnections.add(new Connection(node, output));
subDagBFS.add(output);
}
}
}
subDag = new Dag(subDagConnections);
}
ControlDag cdag = new ControlDag(subDag);
cdag.flatten();
// Add all connections from cdag
bfs.addAll(cdag.getSources());
while (bfs.peek() != null) {
String node = bfs.poll();
for (String output : cdag.getNodeOutputs(node)) {
connections.add(new Connection(node, output));
bfs.add(output);
}
}
sinks.addAll(cdag.getSinks());
} else {
sinks.addAll(dagNodesWithoutCondition);
}
// Add back the existing condition nodes and corresponding conditions
Set<String> conditionsFromDag = Sets.intersection(dagNodes, conditions);
for (String condition : conditionsFromDag) {
connections.add(new Connection(sinks.iterator().next(), condition));
}
bfs.addAll(Sets.intersection(dagNodes, conditions));
while (bfs.peek() != null) {
String node = bfs.poll();
ConditionBranches branches = conditionBranches.get(node);
if (branches == null) {
// not a condition node. add outputs
for (String output : dag.getNodeOutputs(node)) {
connections.add(new Connection(node, output));
bfs.add(output);
}
} else {
// condition node
for (Boolean condition : Arrays.asList(true, false)) {
String phase = condition ? branches.getTrueOutput() : branches.getFalseOutput();
if (phase == null) {
continue;
}
connections.add(new Connection(node, phase, condition));
bfs.add(phase);
}
}
}
dag = new ControlDag(connections);
}
if (dummyNodeAdded) {
WorkflowProgramAdder fork = programAdder.fork();
String dummyNode = dag.getSources().iterator().next();
for (String output : dag.getNodeOutputs(dummyNode)) {
// need to make sure we don't call also() if this is the final branch
if (!addBranchPrograms(output, fork)) {
fork = fork.also();
}
}
} else {
String start = dag.getSources().iterator().next();
addPrograms(start, programAdder);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project cdap by caskdata.
the class ArtifactStoreTest method testConcurrentSnapshotWrite.
@Category(SlowTests.class)
@Test
public void testConcurrentSnapshotWrite() throws Exception {
// write parent
Id.Artifact parentArtifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "parent", "1.0.0");
ArtifactMeta parentMeta = new ArtifactMeta(ArtifactClasses.builder().build());
writeArtifact(parentArtifactId, parentMeta, "content");
final ArtifactRange parentArtifacts = new ArtifactRange(NamespaceId.DEFAULT.getNamespace(), "parent", new ArtifactVersion("1.0.0"), new ArtifactVersion("2.0.0"));
// start up a bunch of threads that will try and write the same artifact at the same time
// only one of them should be able to write it
int numThreads = 20;
final Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "abc", "1.0.0-SNAPSHOT");
// use a barrier so they all try and write at the same time
final CyclicBarrier barrier = new CyclicBarrier(numThreads);
final CountDownLatch latch = new CountDownLatch(numThreads);
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
final String writer = String.valueOf(i);
executorService.execute(new Runnable() {
@Override
public void run() {
try {
barrier.await();
ArtifactMeta meta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + writer, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
writeArtifact(artifactId, meta, writer);
} catch (InterruptedException | BrokenBarrierException | ArtifactAlreadyExistsException | IOException e) {
// something went wrong, fail the test
throw new RuntimeException(e);
} catch (WriteConflictException e) {
// these are ok though unexpected (means couldn't write after a bunch of retries too)
} finally {
latch.countDown();
}
}
});
}
// wait for all writers to finish
latch.await();
// figure out which was the last writer by reading our data. all the writers should have been able to write,
// and they should have all overwritten each other in a consistent manner
ArtifactDetail detail = artifactStore.getArtifact(artifactId);
// figure out the winning writer from the plugin name, which is 'plugin<writer>'
String pluginName = detail.getMeta().getClasses().getPlugins().iterator().next().getName();
String winnerWriter = pluginName.substring("plugin".length());
ArtifactMeta expectedMeta = new ArtifactMeta(ArtifactClasses.builder().addPlugin(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())).build(), ImmutableSet.of(parentArtifacts));
assertEqual(artifactId, expectedMeta, winnerWriter, detail);
// check only 1 plugin remains and that its the correct one
Map<ArtifactDescriptor, Set<PluginClass>> pluginMap = artifactStore.getPluginClasses(NamespaceId.DEFAULT, parentArtifactId, "plugin-type");
Map<ArtifactDescriptor, Set<PluginClass>> expected = Maps.newHashMap();
expected.put(detail.getDescriptor(), ImmutableSet.<PluginClass>of(new PluginClass("plugin-type", "plugin" + winnerWriter, "", "classname", "cfg", ImmutableMap.<String, PluginPropertyField>of())));
Assert.assertEquals(expected, pluginMap);
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project SpongeCommon by SpongePowered.
the class ServerPlayerMixin method impl$handleClientSettings.
@SuppressWarnings({ "ConstantConditions", "UnstableApiUsage" })
@Inject(method = "updateOptions", at = @At("HEAD"))
private void impl$handleClientSettings(final ServerboundClientInformationPacket packet, final CallbackInfo ci) {
if (!ShouldFire.PLAYER_CHANGE_CLIENT_SETTINGS_EVENT) {
return;
}
final ServerboundClientInformationPacketAccessor $packet = (ServerboundClientInformationPacketAccessor) packet;
final Locale newLocale = LocaleCache.getLocale($packet.accessor$language());
final ImmutableSet<SkinPart> skinParts = Sponge.game().registry(RegistryTypes.SKIN_PART).stream().map(part -> (SpongeSkinPart) part).filter(part -> part.test(packet.getModelCustomisation())).collect(ImmutableSet.toImmutableSet());
final int viewDistance = $packet.accessor$viewDistance();
// Post before the player values are updated
try (final CauseStackManager.StackFrame frame = PhaseTracker.getCauseStackManager().pushCauseFrame()) {
final ChatVisibility visibility = (ChatVisibility) (Object) packet.getChatVisibility();
final PlayerChangeClientSettingsEvent event = SpongeEventFactory.createPlayerChangeClientSettingsEvent(frame.currentCause(), visibility, skinParts, newLocale, (ServerPlayer) this, packet.getChatColors(), viewDistance);
SpongeCommon.post(event);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project gradle by gradle.
the class DefaultTypeMetadataStore method createTypeMetadata.
private <T> TypeMetadata createTypeMetadata(Class<T> type) {
Class<?> publicType = GeneratedSubclasses.unpack(type);
ReplayingTypeValidationContext validationContext = new ReplayingTypeValidationContext();
TypeAnnotationMetadata annotationMetadata = typeAnnotationMetadataStore.getTypeAnnotationMetadata(publicType);
annotationMetadata.visitValidationFailures(validationContext);
for (TypeAnnotationHandler annotationHandler : typeAnnotationHandlers) {
if (annotationMetadata.isAnnotationPresent(annotationHandler.getAnnotationType())) {
annotationHandler.validateTypeMetadata(publicType, validationContext);
}
}
ImmutableSet.Builder<PropertyMetadata> effectiveProperties = ImmutableSet.builderWithExpectedSize(annotationMetadata.getPropertiesAnnotationMetadata().size());
for (PropertyAnnotationMetadata propertyAnnotationMetadata : annotationMetadata.getPropertiesAnnotationMetadata()) {
Map<AnnotationCategory, Annotation> propertyAnnotations = propertyAnnotationMetadata.getAnnotations();
Annotation typeAnnotation = propertyAnnotations.get(TYPE);
Annotation normalizationAnnotation = propertyAnnotations.get(NORMALIZATION);
Class<? extends Annotation> propertyType = determinePropertyType(typeAnnotation, normalizationAnnotation);
if (propertyType == null) {
validationContext.visitPropertyProblem(problem -> problem.withId(ValidationProblemId.MISSING_ANNOTATION).forProperty(propertyAnnotationMetadata.getPropertyName()).reportAs(ERROR).withDescription(() -> "is missing " + displayName).happensBecause("A property without annotation isn't considered during up-to-date checking").addPossibleSolution(() -> "Add " + displayName).addPossibleSolution("Mark it as @Internal").documentedAt("validation_problems", "missing_annotation"));
continue;
}
PropertyAnnotationHandler annotationHandler = propertyAnnotationHandlers.get(propertyType);
if (annotationHandler == null) {
validationContext.visitPropertyProblem(problem -> problem.withId(ValidationProblemId.ANNOTATION_INVALID_IN_CONTEXT).forProperty(propertyAnnotationMetadata.getPropertyName()).reportAs(ERROR).withDescription(() -> String.format("is annotated with invalid property type @%s", propertyType.getSimpleName())).happensBecause(() -> "The '@" + propertyType.getSimpleName() + "' annotation cannot be used in this context").addPossibleSolution("Remove the property").addPossibleSolution(() -> "Use a different annotation, e.g one of " + toListOfAnnotations(propertyAnnotationHandlers.keySet())).documentedAt("validation_problems", "annotation_invalid_in_context"));
continue;
}
ImmutableSet<? extends AnnotationCategory> allowedModifiersForPropertyType = annotationHandler.getAllowedModifiers();
for (Map.Entry<AnnotationCategory, Annotation> entry : propertyAnnotations.entrySet()) {
AnnotationCategory annotationCategory = entry.getKey();
if (annotationCategory == TYPE) {
continue;
}
Class<? extends Annotation> annotationType = entry.getValue().annotationType();
if (!allowedModifiersForPropertyType.contains(annotationCategory)) {
validationContext.visitPropertyProblem(problem -> problem.withId(ValidationProblemId.INCOMPATIBLE_ANNOTATIONS).forProperty(propertyAnnotationMetadata.getPropertyName()).reportAs(ERROR).withDescription(() -> "is annotated with @" + annotationType.getSimpleName() + " but that is not allowed for '" + propertyType.getSimpleName() + "' properties").happensBecause(() -> "This modifier is used in conjunction with a property of type '" + propertyType.getSimpleName() + "' but this doesn't have semantics").withLongDescription(() -> "The list of allowed modifiers for '" + propertyType.getSimpleName() + "' is " + toListOfAnnotations(allowedPropertyModifiers)).addPossibleSolution(() -> "Remove the '@" + annotationType.getSimpleName() + "' annotation").documentedAt("validation_problems", "incompatible_annotations"));
} else if (!allowedPropertyModifiers.contains(annotationType)) {
validationContext.visitPropertyProblem(problem -> problem.withId(ValidationProblemId.ANNOTATION_INVALID_IN_CONTEXT).forProperty(propertyAnnotationMetadata.getPropertyName()).reportAs(ERROR).withDescription(() -> String.format("is annotated with invalid modifier @%s", annotationType.getSimpleName())).happensBecause(() -> "The '@" + annotationType.getSimpleName() + "' annotation cannot be used in this context").addPossibleSolution("Remove the annotation").addPossibleSolution(() -> "Use a different annotation, e.g one of " + toListOfAnnotations(allowedPropertyModifiers)).documentedAt("validation_problems", "annotation_invalid_in_context"));
}
}
PropertyMetadata property = new DefaultPropertyMetadata(propertyType, propertyAnnotationMetadata);
annotationHandler.validatePropertyMetadata(property, validationContext);
if (annotationHandler.isPropertyRelevant()) {
effectiveProperties.add(property);
}
}
return new DefaultTypeMetadata(effectiveProperties.build(), validationContext, propertyAnnotationHandlers);
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project gradle by gradle.
the class AbstractFileWatcherUpdater method update.
private void update(SnapshotHierarchy root) {
FileHierarchySet oldWatchedFiles = watchedFiles;
watchedFiles = resolveWatchedFiles(watchableHierarchies, root);
if (!watchedFiles.equals(oldWatchedFiles)) {
updateWatchesOnChangedWatchedFiles(watchedFiles);
}
// Probe every hierarchy that is watched, even ones nested inside others
ImmutableSet<File> oldProbedHierarchies = probedHierarchies;
probedHierarchies = watchableHierarchies.stream().filter(watchedFiles::contains).collect(ImmutableSet.toImmutableSet());
if (oldProbedHierarchies.equals(probedHierarchies)) {
return;
}
oldProbedHierarchies.stream().filter(oldProbedHierarchy -> !probedHierarchies.contains(oldProbedHierarchy)).forEach(probedHierarchy -> {
File probeDirectory = probeRegistry.getProbeDirectory(probedHierarchy);
probeRegistry.disarmWatchProbe(probedHierarchy);
stopWatchingProbeDirectory(probeDirectory);
});
probedHierarchies.stream().filter(newProbedHierarchy -> !oldProbedHierarchies.contains(newProbedHierarchy)).forEach(probedHierarchy -> {
File probeDirectory = probeRegistry.getProbeDirectory(probedHierarchy);
// Make sure the directory exists, this can be necessary when
// included builds are evaluated with configuration cache
// noinspection ResultOfMethodCallIgnored
probeDirectory.mkdirs();
startWatchingProbeDirectory(probeDirectory);
probeRegistry.armWatchProbe(probedHierarchy);
});
}
Aggregations