use of java.util.Comparator in project camel by apache.
the class GenericFileDefaultSorter method sortByFileLanguage.
/**
* Returns a new sort by file language expression
*
* @param context the camel context
* @param expression the file language expression
* @param reverse true to reverse order
* @param ignoreCase ignore case if comparing strings
* @param nested nested comparator for sub group sorting, can be null
* @return the comparator
*/
public static Comparator<Exchange> sortByFileLanguage(final CamelContext context, final String expression, final boolean reverse, final boolean ignoreCase, final Comparator<Exchange> nested) {
// the expression should be enclosed by ${ }
String text = expression;
if (!expression.startsWith("${")) {
text = "${" + text;
}
if (!expression.endsWith("}")) {
text = text + "}";
}
Language language = context.resolveLanguage("file");
final Expression exp = language.createExpression(text);
return new Comparator<Exchange>() {
public int compare(Exchange o1, Exchange o2) {
Object result1 = exp.evaluate(o1, Object.class);
Object result2 = exp.evaluate(o2, Object.class);
int answer = ObjectHelper.compare(result1, result2, ignoreCase);
// if equal then sub sort by nested comparator
if (answer == 0 && nested != null) {
answer = nested.compare(o1, o2);
}
return reverse ? -1 * answer : answer;
}
public String toString() {
return expression + (nested != null ? ";" + nested.toString() : "");
}
};
}
use of java.util.Comparator in project groovy by apache.
the class TupleConstructorASTTransformation method createConstructor.
public static void createConstructor(AbstractASTTransformation xform, ClassNode cNode, boolean includeFields, boolean includeProperties, boolean includeSuperFields, boolean includeSuperProperties, boolean callSuper, boolean force, List<String> excludes, final List<String> includes, boolean useSetters, boolean defaults, boolean allNames, SourceUnit sourceUnit, ClosureExpression pre, ClosureExpression post) {
// no processing if existing constructors found
if (!cNode.getDeclaredConstructors().isEmpty() && !force)
return;
List<FieldNode> superList = new ArrayList<FieldNode>();
if (includeSuperProperties) {
superList.addAll(getSuperPropertyFields(cNode.getSuperClass()));
}
if (includeSuperFields) {
superList.addAll(getSuperNonPropertyFields(cNode.getSuperClass()));
}
List<FieldNode> list = new ArrayList<FieldNode>();
if (includeProperties) {
list.addAll(getInstancePropertyFields(cNode));
}
if (includeFields) {
list.addAll(getInstanceNonPropertyFields(cNode));
}
final List<Parameter> params = new ArrayList<Parameter>();
final List<Expression> superParams = new ArrayList<Expression>();
final BlockStatement preBody = new BlockStatement();
boolean superInPre = false;
if (pre != null) {
superInPre = copyStatementsWithSuperAdjustment(pre, preBody);
if (superInPre && callSuper) {
xform.addError("Error during " + MY_TYPE_NAME + " processing, can't have a super call in 'pre' " + "closure and also 'callSuper' enabled", cNode);
}
}
final BlockStatement body = new BlockStatement();
for (FieldNode fNode : superList) {
String name = fNode.getName();
if (shouldSkipUndefinedAware(name, excludes, includes, allNames))
continue;
params.add(createParam(fNode, name, defaults, xform));
boolean hasSetter = cNode.getProperty(name) != null && !fNode.isFinal();
if (callSuper) {
superParams.add(varX(name));
} else if (!superInPre) {
if (useSetters && hasSetter) {
body.addStatement(stmt(callThisX(getSetterName(name), varX(name))));
} else {
body.addStatement(assignS(propX(varX("this"), name), varX(name)));
}
}
}
if (callSuper) {
body.addStatement(stmt(ctorX(ClassNode.SUPER, args(superParams))));
}
if (!preBody.isEmpty()) {
body.addStatements(preBody.getStatements());
}
for (FieldNode fNode : list) {
String name = fNode.getName();
if (shouldSkipUndefinedAware(name, excludes, includes, allNames))
continue;
Parameter nextParam = createParam(fNode, name, defaults, xform);
params.add(nextParam);
boolean hasSetter = cNode.getProperty(name) != null && !fNode.isFinal();
if (useSetters && hasSetter) {
body.addStatement(stmt(callThisX(getSetterName(name), varX(nextParam))));
} else {
body.addStatement(assignS(propX(varX("this"), name), varX(nextParam)));
}
}
if (post != null) {
body.addStatement(post.getCode());
}
if (includes != null) {
Comparator<Parameter> includeComparator = new Comparator<Parameter>() {
public int compare(Parameter p1, Parameter p2) {
return new Integer(includes.indexOf(p1.getName())).compareTo(includes.indexOf(p2.getName()));
}
};
Collections.sort(params, includeComparator);
}
cNode.addConstructor(new ConstructorNode(ACC_PUBLIC, params.toArray(new Parameter[params.size()]), ClassNode.EMPTY_ARRAY, body));
if (sourceUnit != null && !body.isEmpty()) {
VariableScopeVisitor scopeVisitor = new VariableScopeVisitor(sourceUnit);
scopeVisitor.visitClass(cNode);
}
// or if there is only one Map property (for backwards compatibility)
if (!params.isEmpty() && defaults) {
ClassNode firstParam = params.get(0).getType();
if (params.size() > 1 || firstParam.equals(ClassHelper.OBJECT_TYPE)) {
String message = "The class " + cNode.getName() + " was incorrectly initialized via the map constructor with null.";
if (firstParam.equals(ClassHelper.MAP_TYPE)) {
addMapConstructors(cNode, true, message);
} else {
ClassNode candidate = HMAP_TYPE;
while (candidate != null) {
if (candidate.equals(firstParam)) {
addMapConstructors(cNode, true, message);
break;
}
candidate = candidate.getSuperClass();
}
}
}
}
}
use of java.util.Comparator in project flink by apache.
the class PartitionITCase method testRangePartitionerWithKeySelectorOnSequenceNestedDataWithOrders.
@Test
public void testRangePartitionerWithKeySelectorOnSequenceNestedDataWithOrders() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
final DataSet<Tuple2<ComparablePojo, Long>> dataSet = env.generateSequence(0, 10000).map(new MapFunction<Long, Tuple2<ComparablePojo, Long>>() {
@Override
public Tuple2<ComparablePojo, Long> map(Long value) throws Exception {
return new Tuple2<>(new ComparablePojo(value / 5000, value % 5000), value);
}
});
final List<Tuple2<ComparablePojo, ComparablePojo>> collected = dataSet.partitionByRange(new KeySelector<Tuple2<ComparablePojo, Long>, ComparablePojo>() {
@Override
public ComparablePojo getKey(Tuple2<ComparablePojo, Long> value) throws Exception {
return value.f0;
}
}).withOrders(Order.ASCENDING).mapPartition(new MinMaxSelector<>(new ComparablePojoComparator())).mapPartition(new ExtractComparablePojo()).collect();
final Comparator<Tuple2<ComparablePojo, ComparablePojo>> pojoComparator = new Comparator<Tuple2<ComparablePojo, ComparablePojo>>() {
@Override
public int compare(Tuple2<ComparablePojo, ComparablePojo> o1, Tuple2<ComparablePojo, ComparablePojo> o2) {
return o1.f0.compareTo(o2.f1);
}
};
Collections.sort(collected, pojoComparator);
ComparablePojo previousMax = null;
for (Tuple2<ComparablePojo, ComparablePojo> element : collected) {
assertTrue("Min element in each partition should be smaller than max.", element.f0.compareTo(element.f1) <= 0);
if (previousMax == null) {
previousMax = element.f1;
} else {
assertTrue("Partitions overlap. Previous max should be smaller than current min.", previousMax.compareTo(element.f0) < 0);
if (previousMax.first.equals(element.f0.first)) {
assertEquals("Ordering on the second field should be continous.", previousMax.second - 1, element.f0.second.longValue());
}
previousMax = element.f1;
}
}
}
use of java.util.Comparator in project hive by apache.
the class TestDummyTxnManager method testDedupLockObjects.
@Test
public void testDedupLockObjects() {
List<HiveLockObj> lockObjs = new ArrayList<HiveLockObj>();
String path1 = "path1";
String path2 = "path2";
HiveLockObjectData lockData1 = new HiveLockObjectData("query1", "1", "IMPLICIT", "drop table table1");
HiveLockObjectData lockData2 = new HiveLockObjectData("query1", "1", "IMPLICIT", "drop table table1");
// Start with the following locks:
// [path1, shared]
// [path1, exclusive]
// [path2, shared]
// [path2, shared]
// [path2, shared]
lockObjs.add(new HiveLockObj(new HiveLockObject(path1, lockData1), HiveLockMode.SHARED));
String name1 = lockObjs.get(lockObjs.size() - 1).getName();
lockObjs.add(new HiveLockObj(new HiveLockObject(path1, lockData1), HiveLockMode.EXCLUSIVE));
lockObjs.add(new HiveLockObj(new HiveLockObject(path2, lockData2), HiveLockMode.SHARED));
String name2 = lockObjs.get(lockObjs.size() - 1).getName();
lockObjs.add(new HiveLockObj(new HiveLockObject(path2, lockData2), HiveLockMode.SHARED));
lockObjs.add(new HiveLockObj(new HiveLockObject(path2, lockData2), HiveLockMode.SHARED));
DummyTxnManager.dedupLockObjects(lockObjs);
// After dedup we should be left with 2 locks:
// [path1, exclusive]
// [path2, shared]
Assert.assertEquals("Locks should be deduped", 2, lockObjs.size());
Comparator<HiveLockObj> cmp = new Comparator<HiveLockObj>() {
@Override
public int compare(HiveLockObj lock1, HiveLockObj lock2) {
return lock1.getName().compareTo(lock2.getName());
}
};
Collections.sort(lockObjs, cmp);
HiveLockObj lockObj = lockObjs.get(0);
Assert.assertEquals(name1, lockObj.getName());
Assert.assertEquals(HiveLockMode.EXCLUSIVE, lockObj.getMode());
lockObj = lockObjs.get(1);
Assert.assertEquals(name2, lockObj.getName());
Assert.assertEquals(HiveLockMode.SHARED, lockObj.getMode());
}
use of java.util.Comparator in project storm by apache.
the class DirectoryCleaner method deleteOldestWhileTooLarge.
/**
* If totalSize of files exceeds the either the per-worker quota or global quota,
* Logviewer deletes oldest inactive log files in a worker directory or in all worker dirs.
* We use the parameter for_per_dir to switch between the two deletion modes.
* @param dirs the list of directories to be scanned for deletion
* @param quota the per-dir quota or the total quota for the all directories
* @param for_per_dir if true, deletion happens for a single dir; otherwise, for all directories globally
* @param active_dirs only for global deletion, we want to skip the active logs in active_dirs
* @return number of files deleted
*/
public int deleteOldestWhileTooLarge(List<File> dirs, long quota, boolean for_per_dir, Set<String> active_dirs) throws IOException {
// max number of files to delete for every round
final int PQ_SIZE = 1024;
// max rounds of scanning the dirs
final int MAX_ROUNDS = 512;
long totalSize = 0;
int deletedFiles = 0;
for (File dir : dirs) {
try (DirectoryStream<Path> stream = getStreamForDirectory(dir)) {
for (Path path : stream) {
File file = path.toFile();
totalSize += file.length();
}
}
}
long toDeleteSize = totalSize - quota;
if (toDeleteSize <= 0) {
return deletedFiles;
}
Comparator<File> comparator = new Comparator<File>() {
public int compare(File f1, File f2) {
if (f1.lastModified() > f2.lastModified()) {
return -1;
} else {
return 1;
}
}
};
// the oldest pq_size files in this directory will be placed in PQ, with the newest at the root
PriorityQueue<File> pq = new PriorityQueue<File>(PQ_SIZE, comparator);
int round = 0;
while (toDeleteSize > 0) {
LOG.debug("To delete size is {}, start a new round of deletion, round: {}", toDeleteSize, round);
for (File dir : dirs) {
try (DirectoryStream<Path> stream = getStreamForDirectory(dir)) {
for (Path path : stream) {
File file = path.toFile();
if (for_per_dir) {
if (ACTIVE_LOG_PATTERN.matcher(file.getName()).matches()) {
// skip active log files
continue;
}
} else {
// for global cleanup
if (active_dirs.contains(dir.getCanonicalPath())) {
// for an active worker's dir, make sure for the last "/"
if (ACTIVE_LOG_PATTERN.matcher(file.getName()).matches()) {
// skip active log files
continue;
}
} else {
if (META_LOG_PATTERN.matcher(file.getName()).matches()) {
// skip yaml and pid files
continue;
}
}
}
if (pq.size() < PQ_SIZE) {
pq.offer(file);
} else {
if (file.lastModified() < pq.peek().lastModified()) {
pq.poll();
pq.offer(file);
}
}
}
}
}
// need to reverse the order of elements in PQ to delete files from oldest to newest
Stack<File> stack = new Stack<File>();
while (!pq.isEmpty()) {
File file = pq.poll();
stack.push(file);
}
while (!stack.isEmpty() && toDeleteSize > 0) {
File file = stack.pop();
toDeleteSize -= file.length();
LOG.info("Delete file: {}, size: {}, lastModified: {}", file.getCanonicalPath(), file.length(), file.lastModified());
file.delete();
deletedFiles++;
}
pq.clear();
round++;
if (round >= MAX_ROUNDS) {
if (for_per_dir) {
LOG.warn("Reach the MAX_ROUNDS: {} during per-dir deletion, you may have too many files in " + "a single directory : {}, will delete the rest files in next interval.", MAX_ROUNDS, dirs.get(0).getCanonicalPath());
} else {
LOG.warn("Reach the MAX_ROUNDS: {} during global deletion, you may have too many files, " + "will delete the rest files in next interval.", MAX_ROUNDS);
}
break;
}
}
return deletedFiles;
}
Aggregations