use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project buck by facebook.
the class ParserTest method resolveTargetSpecsPreservesOrder.
@Test
public void resolveTargetSpecsPreservesOrder() throws Exception {
BuildTarget foo = BuildTargetFactory.newInstance(filesystem, "//foo:foo");
Path buckFile = cellRoot.resolve("foo/BUCK");
Files.createDirectories(buckFile.getParent());
Files.write(buckFile, "genrule(name='foo', out='foo', cmd='foo')".getBytes(UTF_8));
BuildTarget bar = BuildTargetFactory.newInstance(filesystem, "//bar:bar");
buckFile = cellRoot.resolve("bar/BUCK");
Files.createDirectories(buckFile.getParent());
Files.write(buckFile, "genrule(name='bar', out='bar', cmd='bar')".getBytes(UTF_8));
ImmutableList<ImmutableSet<BuildTarget>> targets = parser.resolveTargetSpecs(eventBus, cell, false, executorService, ImmutableList.of(TargetNodePredicateSpec.of(x -> true, BuildFileSpec.fromRecursivePath(Paths.get("bar"), cell.getRoot())), TargetNodePredicateSpec.of(x -> true, BuildFileSpec.fromRecursivePath(Paths.get("foo"), cell.getRoot()))), SpeculativeParsing.of(true), ParserConfig.ApplyDefaultFlavorsMode.ENABLED);
assertThat(targets, equalTo(ImmutableList.of(ImmutableSet.of(bar), ImmutableSet.of(foo))));
targets = parser.resolveTargetSpecs(eventBus, cell, false, executorService, ImmutableList.of(TargetNodePredicateSpec.of(x -> true, BuildFileSpec.fromRecursivePath(Paths.get("foo"), cell.getRoot())), TargetNodePredicateSpec.of(x -> true, BuildFileSpec.fromRecursivePath(Paths.get("bar"), cell.getRoot()))), SpeculativeParsing.of(true), ParserConfig.ApplyDefaultFlavorsMode.ENABLED);
assertThat(targets, equalTo(ImmutableList.of(ImmutableSet.of(foo), ImmutableSet.of(bar))));
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project buck by facebook.
the class ParsePipelineTest method exceptionOnSwappedRawNodesInGetTargetNode.
@Test
public void exceptionOnSwappedRawNodesInGetTargetNode() throws Exception {
// should be and we want to test both of them.
try (Fixture fixture = createSynchronousExecutionFixture("pipeline_test")) {
Cell cell = fixture.getCell();
Path rootBuildFilePath = cell.getFilesystem().resolve("BUCK");
Path aBuildFilePath = cell.getFilesystem().resolve("a/BUCK");
fixture.getTargetNodeParsePipeline().getAllNodes(cell, rootBuildFilePath);
Optional<ImmutableSet<Map<String, Object>>> rootRawNodes = fixture.getRawNodeParsePipelineCache().lookupComputedNode(cell, rootBuildFilePath);
fixture.getRawNodeParsePipelineCache().putComputedNodeIfNotPresent(cell, aBuildFilePath, rootRawNodes.get());
expectedException.expect(IllegalStateException.class);
expectedException.expectMessage("Raw data claims to come from [], but we tried rooting it at [a].");
fixture.getTargetNodeParsePipeline().getNode(cell, BuildTargetFactory.newInstance(cell.getFilesystem(), "//a:lib"));
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project druid by druid-io.
the class WhiteListBasedConverter method readMap.
private ImmutableSortedMap<String, ImmutableSet<String>> readMap(final String mapPath) {
String fileContent;
String actualPath = mapPath;
try {
if (Strings.isNullOrEmpty(mapPath)) {
URL resource = this.getClass().getClassLoader().getResource("defaultWhiteListMap.json");
actualPath = resource.getFile();
LOGGER.info("using default whiteList map located at [%s]", actualPath);
fileContent = Resources.toString(resource, Charset.defaultCharset());
} else {
fileContent = Files.asCharSource(new File(mapPath), Charset.forName("UTF-8")).read();
}
return mapper.reader(new TypeReference<ImmutableSortedMap<String, ImmutableSet<String>>>() {
}).readValue(fileContent);
} catch (IOException e) {
throw new ISE(e, "Got an exception while parsing file [%s]", actualPath);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project druid by druid-io.
the class IndexerSQLMetadataStorageCoordinator method announceHistoricalSegments.
/**
* {@inheritDoc}
*/
@Override
public SegmentPublishResult announceHistoricalSegments(final Set<DataSegment> segments, final DataSourceMetadata startMetadata, final DataSourceMetadata endMetadata) throws IOException {
if (segments.isEmpty()) {
throw new IllegalArgumentException("segment set must not be empty");
}
final String dataSource = segments.iterator().next().getDataSource();
for (DataSegment segment : segments) {
if (!dataSource.equals(segment.getDataSource())) {
throw new IllegalArgumentException("segments must all be from the same dataSource");
}
}
if ((startMetadata == null && endMetadata != null) || (startMetadata != null && endMetadata == null)) {
throw new IllegalArgumentException("start/end metadata pair must be either null or non-null");
}
// Find which segments are used (i.e. not overshadowed).
final Set<DataSegment> usedSegments = Sets.newHashSet();
for (TimelineObjectHolder<String, DataSegment> holder : VersionedIntervalTimeline.forSegments(segments).lookup(JodaUtils.ETERNITY)) {
for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
usedSegments.add(chunk.getObject());
}
}
final AtomicBoolean txnFailure = new AtomicBoolean(false);
try {
return connector.retryTransaction(new TransactionCallback<SegmentPublishResult>() {
@Override
public SegmentPublishResult inTransaction(final Handle handle, final TransactionStatus transactionStatus) throws Exception {
final Set<DataSegment> inserted = Sets.newHashSet();
if (startMetadata != null) {
final DataSourceMetadataUpdateResult result = updateDataSourceMetadataWithHandle(handle, dataSource, startMetadata, endMetadata);
if (result != DataSourceMetadataUpdateResult.SUCCESS) {
transactionStatus.setRollbackOnly();
txnFailure.set(true);
if (result == DataSourceMetadataUpdateResult.FAILURE) {
throw new RuntimeException("Aborting transaction!");
} else if (result == DataSourceMetadataUpdateResult.TRY_AGAIN) {
throw new RetryTransactionException("Aborting transaction!");
}
}
}
for (final DataSegment segment : segments) {
if (announceHistoricalSegment(handle, segment, usedSegments.contains(segment))) {
inserted.add(segment);
}
}
return new SegmentPublishResult(ImmutableSet.copyOf(inserted), true);
}
}, 3, SQLMetadataConnector.DEFAULT_MAX_TRIES);
} catch (CallbackFailedException e) {
if (txnFailure.get()) {
return new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false);
} else {
throw e;
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.com.google.common.collect.ImmutableSet in project core-java by SpineEventEngine.
the class IoUtil method loadAllProperties.
/**
* Loads all data from {@code .properties} file(s) into memory.
*
* <p>Logs {@link IOException} if it occurs.
*
* @param propsFilePath the path of the {@code .properties} file to load
*/
public static ImmutableSet<Properties> loadAllProperties(String propsFilePath) {
checkNotNull(propsFilePath);
final ImmutableSet.Builder<Properties> result = ImmutableSet.builder();
final Enumeration<URL> resources = getResources(propsFilePath);
if (resources == null) {
return result.build();
}
while (resources.hasMoreElements()) {
final URL resourceUrl = resources.nextElement();
final Properties properties = loadPropertiesFile(resourceUrl);
result.add(properties);
}
return result.build();
}
Aggregations