use of java.util.Comparator in project druid by druid-io.
the class DoubleLeastPostAggregatorTest method testComparator.
@Test
public void testComparator() {
final String aggName = "rows";
DoubleLeastPostAggregator leastPostAggregator;
CountAggregator agg = new CountAggregator();
Map<String, Object> metricValues = new HashMap<String, Object>();
metricValues.put(aggName, agg.get());
List<PostAggregator> postAggregatorList = Lists.newArrayList(new ConstantPostAggregator("roku", 2D), new FieldAccessPostAggregator("rows", aggName));
leastPostAggregator = new DoubleLeastPostAggregator("least", postAggregatorList);
Comparator comp = leastPostAggregator.getComparator();
Object before = leastPostAggregator.compute(metricValues);
agg.aggregate();
agg.aggregate();
agg.aggregate();
metricValues.put(aggName, agg.get());
Object after = leastPostAggregator.compute(metricValues);
Assert.assertEquals(-1, comp.compare(before, after));
Assert.assertEquals(0, comp.compare(before, before));
Assert.assertEquals(0, comp.compare(after, after));
Assert.assertEquals(1, comp.compare(after, before));
}
use of java.util.Comparator in project druid by druid-io.
the class DataSegment method bucketMonthComparator.
public static Comparator<DataSegment> bucketMonthComparator() {
return new Comparator<DataSegment>() {
@Override
public int compare(DataSegment lhs, DataSegment rhs) {
int retVal;
DateTime lhsMonth = Granularities.MONTH.bucketStart(lhs.getInterval().getStart());
DateTime rhsMonth = Granularities.MONTH.bucketStart(rhs.getInterval().getStart());
retVal = lhsMonth.compareTo(rhsMonth);
if (retVal != 0) {
return retVal;
}
return lhs.compareTo(rhs);
}
};
}
use of java.util.Comparator in project druid by druid-io.
the class HadoopConverterJobTest method testSimpleJob.
@Test
public void testSimpleJob() throws IOException, InterruptedException {
final SQLMetadataSegmentManager manager = new SQLMetadataSegmentManager(HadoopDruidConverterConfig.jsonMapper, new Supplier<MetadataSegmentManagerConfig>() {
@Override
public MetadataSegmentManagerConfig get() {
return new MetadataSegmentManagerConfig();
}
}, metadataStorageTablesConfigSupplier, connector);
final List<DataSegment> oldSemgments = getDataSegments(manager);
final File tmpDir = temporaryFolder.newFolder();
final HadoopConverterJob converterJob = new HadoopConverterJob(new HadoopDruidConverterConfig(DATASOURCE, interval, new IndexSpec(new RoaringBitmapSerdeFactory(null), CompressedObjectStrategy.CompressionStrategy.UNCOMPRESSED, CompressedObjectStrategy.CompressionStrategy.UNCOMPRESSED, CompressionFactory.LongEncodingStrategy.LONGS), oldSemgments, true, tmpDir.toURI(), ImmutableMap.<String, String>of(), null, tmpSegmentDir.toURI().toString()));
final List<DataSegment> segments = Lists.newArrayList(converterJob.run());
Assert.assertNotNull("bad result", segments);
Assert.assertEquals("wrong segment count", 4, segments.size());
Assert.assertTrue(converterJob.getLoadedBytes() > 0);
Assert.assertTrue(converterJob.getWrittenBytes() > 0);
Assert.assertTrue(converterJob.getWrittenBytes() > converterJob.getLoadedBytes());
Assert.assertEquals(oldSemgments.size(), segments.size());
final DataSegment segment = segments.get(0);
Assert.assertTrue(interval.contains(segment.getInterval()));
Assert.assertTrue(segment.getVersion().endsWith("_converted"));
Assert.assertTrue(segment.getLoadSpec().get("path").toString().contains("_converted"));
for (File file : tmpDir.listFiles()) {
Assert.assertFalse(file.isDirectory());
Assert.assertTrue(file.isFile());
}
final Comparator<DataSegment> segmentComparator = new Comparator<DataSegment>() {
@Override
public int compare(DataSegment o1, DataSegment o2) {
return o1.getIdentifier().compareTo(o2.getIdentifier());
}
};
Collections.sort(oldSemgments, segmentComparator);
Collections.sort(segments, segmentComparator);
for (int i = 0; i < oldSemgments.size(); ++i) {
final DataSegment oldSegment = oldSemgments.get(i);
final DataSegment newSegment = segments.get(i);
Assert.assertEquals(oldSegment.getDataSource(), newSegment.getDataSource());
Assert.assertEquals(oldSegment.getInterval(), newSegment.getInterval());
Assert.assertEquals(Sets.<String>newHashSet(oldSegment.getMetrics()), Sets.<String>newHashSet(newSegment.getMetrics()));
Assert.assertEquals(Sets.<String>newHashSet(oldSegment.getDimensions()), Sets.<String>newHashSet(newSegment.getDimensions()));
Assert.assertEquals(oldSegment.getVersion() + "_converted", newSegment.getVersion());
Assert.assertTrue(oldSegment.getSize() < newSegment.getSize());
Assert.assertEquals(oldSegment.getBinaryVersion(), newSegment.getBinaryVersion());
}
}
use of java.util.Comparator in project buck by facebook.
the class JavaDepsFinder method findDepsForBuildFiles.
private DepsForBuildFiles findDepsForBuildFiles(final TargetGraph graph, final DependencyInfo dependencyInfo, final Console console) {
// For the rules that expect to have their deps generated, look through all of their required
// symbols and try to find the build rule that provides each symbols. Store these build rules in
// the depsForBuildFiles data structure.
//
// Currently, we process each rule with autodeps=True on a single thread. See the class overview
// for DepsForBuildFiles about what it would take to do this work in a multi-threaded way.
DepsForBuildFiles depsForBuildFiles = new DepsForBuildFiles();
for (final TargetNode<?, ?> rule : dependencyInfo.rulesWithAutodeps) {
final Set<BuildTarget> providedDeps = dependencyInfo.rulesWithAutodepsToProvidedDeps.get(rule);
final Predicate<TargetNode<?, ?>> isVisibleDepNotAlreadyInProvidedDeps = provider -> provider.isVisibleTo(graph, rule) && !providedDeps.contains(provider.getBuildTarget());
final boolean isJavaTestRule = rule.getDescription() instanceof JavaTestDescription;
for (DependencyType type : DependencyType.values()) {
HashMultimap<TargetNode<?, ?>, String> ruleToSymbolsMap;
switch(type) {
case DEPS:
ruleToSymbolsMap = dependencyInfo.ruleToRequiredSymbols;
break;
case EXPORTED_DEPS:
ruleToSymbolsMap = dependencyInfo.ruleToExportedSymbols;
break;
default:
throw new IllegalStateException("Unrecognized type: " + type);
}
final DependencyType typeOfDepToAdd;
if (isJavaTestRule) {
// java_test rules do not honor exported_deps: add all dependencies to the ordinary deps.
typeOfDepToAdd = DependencyType.DEPS;
} else {
typeOfDepToAdd = type;
}
for (String requiredSymbol : ruleToSymbolsMap.get(rule)) {
BuildTarget provider = findProviderForSymbolFromBuckConfig(requiredSymbol);
if (provider != null) {
depsForBuildFiles.addDep(rule.getBuildTarget(), provider, typeOfDepToAdd);
continue;
}
Set<TargetNode<?, ?>> providers = dependencyInfo.symbolToProviders.get(requiredSymbol);
SortedSet<TargetNode<?, ?>> candidateProviders = providers.stream().filter(isVisibleDepNotAlreadyInProvidedDeps).collect(MoreCollectors.toImmutableSortedSet(Comparator.<TargetNode<?, ?>>naturalOrder()));
int numCandidates = candidateProviders.size();
if (numCandidates == 1) {
depsForBuildFiles.addDep(rule.getBuildTarget(), Iterables.getOnlyElement(candidateProviders).getBuildTarget(), typeOfDepToAdd);
} else if (numCandidates > 1) {
// Warn the user that there is an ambiguity. This could be very common with macros that
// generate multiple versions of a java_library() with the same sources.
// If numProviders is 0, then hopefully the dep is provided by something the user
// hardcoded in the BUCK file.
console.printErrorText(String.format("WARNING: Multiple providers for %s: %s. " + "Consider adding entry to .buckconfig to eliminate ambiguity:\n" + "[autodeps]\n" + "java-package-mappings = %s => %s", requiredSymbol, Joiner.on(", ").join(candidateProviders), requiredSymbol, Iterables.getFirst(candidateProviders, null)));
} else {
// If there aren't any candidates, then see if there is a visible rule that can provide
// the symbol via its exported_deps. We make this a secondary check because we prefer to
// depend on the rule that defines the symbol directly rather than one of possibly many
// rules that provides it via its exported_deps.
ImmutableSortedSet<TargetNode<?, ?>> newCandidates = providers.stream().flatMap(candidate -> dependencyInfo.ruleToRulesThatExportIt.get(candidate).stream()).filter(ruleThatExportsCandidate -> ruleThatExportsCandidate.isVisibleTo(graph, rule)).collect(MoreCollectors.toImmutableSortedSet(Comparator.<TargetNode<?, ?>>naturalOrder()));
int numNewCandidates = newCandidates.size();
if (numNewCandidates == 1) {
depsForBuildFiles.addDep(rule.getBuildTarget(), Iterables.getOnlyElement(newCandidates).getBuildTarget(), typeOfDepToAdd);
} else if (numNewCandidates > 1) {
console.printErrorText(String.format("WARNING: No providers found for '%s' for build rule %s, " + "but there are multiple rules that export a rule to provide %s: %s", requiredSymbol, rule.getBuildTarget(), requiredSymbol, Joiner.on(", ").join(newCandidates)));
}
// In the case that numNewCandidates is 0, we assume that the user is taking
// responsibility for declaring a provider for the symbol by hardcoding it in the deps.
}
}
}
}
return depsForBuildFiles;
}
use of java.util.Comparator in project OpenRefine by OpenRefine.
the class SortingRecordVisitor method end.
@Override
public void end(Project project) {
_visitor.start(project);
Collections.sort(_records, new Comparator<Record>() {
Project project;
Comparator<Record> init(Project project) {
this.project = project;
return this;
}
@Override
public int compare(Record o1, Record o2) {
return SortingRecordVisitor.this.compare(project, o1, o1.recordIndex, o2, o2.recordIndex);
}
}.init(project));
for (Record record : _records) {
_visitor.visit(project, record);
}
_visitor.end(project);
}
Aggregations