use of java.util.TreeMap in project camel by apache.
the class MailBinding method extractHeadersFromMail.
protected Map<String, Object> extractHeadersFromMail(Message mailMessage, Exchange exchange) throws MessagingException, IOException {
Map<String, Object> answer = new TreeMap<String, Object>(String.CASE_INSENSITIVE_ORDER);
Enumeration<?> names = mailMessage.getAllHeaders();
while (names.hasMoreElements()) {
Header header = (Header) names.nextElement();
String value = header.getValue();
if (headerFilterStrategy != null && !headerFilterStrategy.applyFilterToExternalHeaders(header.getName(), value, exchange)) {
CollectionHelper.appendValue(answer, header.getName(), value);
}
}
// if the message is a multipart message, do not set the content type to multipart/*
if (((MailEndpoint) exchange.getFromEndpoint()).getConfiguration().isMapMailMessage()) {
Object content = mailMessage.getContent();
if (content instanceof MimeMultipart) {
MimeMultipart multipart = (MimeMultipart) content;
int size = multipart.getCount();
for (int i = 0; i < size; i++) {
BodyPart part = multipart.getBodyPart(i);
content = part.getContent();
// in case of nested multiparts iterate into them
while (content instanceof MimeMultipart) {
if (multipart.getCount() < 1) {
break;
}
part = ((MimeMultipart) content).getBodyPart(0);
content = part.getContent();
}
if (part.getContentType().toLowerCase().startsWith("text")) {
answer.put(Exchange.CONTENT_TYPE, part.getContentType());
break;
}
}
}
}
return answer;
}
use of java.util.TreeMap in project flink by apache.
the class FlinkRelDecorrelator method decorrelateRel.
/**
* Rewrite LogicalJoin.
*
* @param rel LogicalJoin
*/
public Frame decorrelateRel(LogicalJoin rel) {
//
// Rewrite logic:
//
// 1. rewrite join condition.
// 2. map output positions and produce cor vars if any.
//
final RelNode oldLeft = rel.getInput(0);
final RelNode oldRight = rel.getInput(1);
final Frame leftFrame = getInvoke(oldLeft, rel);
final Frame rightFrame = getInvoke(oldRight, rel);
if (leftFrame == null || rightFrame == null) {
// If any input has not been rewritten, do not rewrite this rel.
return null;
}
final RelNode newJoin = LogicalJoin.create(leftFrame.r, rightFrame.r, decorrelateExpr(rel.getCondition()), ImmutableSet.<CorrelationId>of(), rel.getJoinType());
// Create the mapping between the output of the old correlation rel
// and the new join rel
Map<Integer, Integer> mapOldToNewOutputPos = Maps.newHashMap();
int oldLeftFieldCount = oldLeft.getRowType().getFieldCount();
int newLeftFieldCount = leftFrame.r.getRowType().getFieldCount();
int oldRightFieldCount = oldRight.getRowType().getFieldCount();
assert rel.getRowType().getFieldCount() == oldLeftFieldCount + oldRightFieldCount;
// Left input positions are not changed.
mapOldToNewOutputPos.putAll(leftFrame.oldToNewOutputPos);
// Right input positions are shifted by newLeftFieldCount.
for (int i = 0; i < oldRightFieldCount; i++) {
mapOldToNewOutputPos.put(i + oldLeftFieldCount, rightFrame.oldToNewOutputPos.get(i) + newLeftFieldCount);
}
final SortedMap<Correlation, Integer> mapCorVarToOutputPos = new TreeMap<>(leftFrame.corVarOutputPos);
// Right input positions are shifted by newLeftFieldCount.
for (Map.Entry<Correlation, Integer> entry : rightFrame.corVarOutputPos.entrySet()) {
mapCorVarToOutputPos.put(entry.getKey(), entry.getValue() + newLeftFieldCount);
}
return register(rel, newJoin, mapOldToNewOutputPos, mapCorVarToOutputPos);
}
use of java.util.TreeMap in project flink by apache.
the class FlinkRelDecorrelator method decorrelateRel.
/**
* Rewrites a {@link LogicalAggregate}.
*
* @param rel Aggregate to rewrite
*/
public Frame decorrelateRel(LogicalAggregate rel) {
if (rel.getGroupType() != Aggregate.Group.SIMPLE) {
throw new AssertionError(Bug.CALCITE_461_FIXED);
}
// Aggregate itself should not reference cor vars.
assert !cm.mapRefRelToCorVar.containsKey(rel);
final RelNode oldInput = rel.getInput();
final Frame frame = getInvoke(oldInput, rel);
if (frame == null) {
// If input has not been rewritten, do not rewrite this rel.
return null;
}
final RelNode newInput = frame.r;
// map from newInput
Map<Integer, Integer> mapNewInputToProjOutputPos = Maps.newHashMap();
final int oldGroupKeyCount = rel.getGroupSet().cardinality();
// Project projects the original expressions,
// plus any correlated variables the input wants to pass along.
final List<Pair<RexNode, String>> projects = Lists.newArrayList();
List<RelDataTypeField> newInputOutput = newInput.getRowType().getFieldList();
int newPos = 0;
// oldInput has the original group by keys in the front.
final NavigableMap<Integer, RexLiteral> omittedConstants = new TreeMap<>();
for (int i = 0; i < oldGroupKeyCount; i++) {
final RexLiteral constant = projectedLiteral(newInput, i);
if (constant != null) {
// Exclude constants. Aggregate({true}) occurs because Aggregate({})
// would generate 1 row even when applied to an empty table.
omittedConstants.put(i, constant);
continue;
}
int newInputPos = frame.oldToNewOutputPos.get(i);
projects.add(RexInputRef.of2(newInputPos, newInputOutput));
mapNewInputToProjOutputPos.put(newInputPos, newPos);
newPos++;
}
final SortedMap<Correlation, Integer> mapCorVarToOutputPos = new TreeMap<>();
if (!frame.corVarOutputPos.isEmpty()) {
// position oldGroupKeyCount.
for (Map.Entry<Correlation, Integer> entry : frame.corVarOutputPos.entrySet()) {
projects.add(RexInputRef.of2(entry.getValue(), newInputOutput));
mapCorVarToOutputPos.put(entry.getKey(), newPos);
mapNewInputToProjOutputPos.put(entry.getValue(), newPos);
newPos++;
}
}
// add the remaining fields
final int newGroupKeyCount = newPos;
for (int i = 0; i < newInputOutput.size(); i++) {
if (!mapNewInputToProjOutputPos.containsKey(i)) {
projects.add(RexInputRef.of2(i, newInputOutput));
mapNewInputToProjOutputPos.put(i, newPos);
newPos++;
}
}
assert newPos == newInputOutput.size();
// This Project will be what the old input maps to,
// replacing any previous mapping from old input).
RelNode newProject = RelOptUtil.createProject(newInput, projects, false);
// update mappings:
// oldInput ----> newInput
//
// newProject
// |
// oldInput ----> newInput
//
// is transformed to
//
// oldInput ----> newProject
// |
// newInput
Map<Integer, Integer> combinedMap = Maps.newHashMap();
for (Integer oldInputPos : frame.oldToNewOutputPos.keySet()) {
combinedMap.put(oldInputPos, mapNewInputToProjOutputPos.get(frame.oldToNewOutputPos.get(oldInputPos)));
}
register(oldInput, newProject, combinedMap, mapCorVarToOutputPos);
// now it's time to rewrite the Aggregate
final ImmutableBitSet newGroupSet = ImmutableBitSet.range(newGroupKeyCount);
List<AggregateCall> newAggCalls = Lists.newArrayList();
List<AggregateCall> oldAggCalls = rel.getAggCallList();
int oldInputOutputFieldCount = rel.getGroupSet().cardinality();
int newInputOutputFieldCount = newGroupSet.cardinality();
int i = -1;
for (AggregateCall oldAggCall : oldAggCalls) {
++i;
List<Integer> oldAggArgs = oldAggCall.getArgList();
List<Integer> aggArgs = Lists.newArrayList();
// for the argument.
for (int oldPos : oldAggArgs) {
aggArgs.add(combinedMap.get(oldPos));
}
final int filterArg = oldAggCall.filterArg < 0 ? oldAggCall.filterArg : combinedMap.get(oldAggCall.filterArg);
newAggCalls.add(oldAggCall.adaptTo(newProject, aggArgs, filterArg, oldGroupKeyCount, newGroupKeyCount));
// The old to new output position mapping will be the same as that
// of newProject, plus any aggregates that the oldAgg produces.
combinedMap.put(oldInputOutputFieldCount + i, newInputOutputFieldCount + i);
}
relBuilder.push(LogicalAggregate.create(newProject, false, newGroupSet, null, newAggCalls));
if (!omittedConstants.isEmpty()) {
final List<RexNode> postProjects = new ArrayList<>(relBuilder.fields());
for (Map.Entry<Integer, RexLiteral> entry : omittedConstants.descendingMap().entrySet()) {
postProjects.add(entry.getKey() + frame.corVarOutputPos.size(), entry.getValue());
}
relBuilder.project(postProjects);
}
// located at the same position as the input newProject.
return register(rel, relBuilder.build(), combinedMap, mapCorVarToOutputPos);
}
use of java.util.TreeMap in project groovy by apache.
the class BindPath method updateLocalSyntheticProperties.
public synchronized void updateLocalSyntheticProperties(Map<String, TriggerBinding> synthetics) {
localSynthetics = null;
String endName = "#" + propertyName;
for (Map.Entry<String, TriggerBinding> syntheticEntry : synthetics.entrySet()) {
if (syntheticEntry.getKey().endsWith(endName)) {
if (localSynthetics == null) {
localSynthetics = new TreeMap();
}
localSynthetics.put(syntheticEntry.getKey(), syntheticEntry.getValue());
}
}
}
use of java.util.TreeMap in project hadoop by apache.
the class TestDFSUpgradeFromImage method verifyDir.
private void verifyDir(DistributedFileSystem dfs, Path dir, CRC32 overallChecksum) throws IOException {
FileStatus[] fileArr = dfs.listStatus(dir);
TreeMap<Path, Boolean> fileMap = new TreeMap<Path, Boolean>();
for (FileStatus file : fileArr) {
fileMap.put(file.getPath(), Boolean.valueOf(file.isDirectory()));
}
for (Iterator<Path> it = fileMap.keySet().iterator(); it.hasNext(); ) {
Path path = it.next();
boolean isDir = fileMap.get(path);
String pathName = path.toUri().getPath();
overallChecksum.update(pathName.getBytes());
if (isDir) {
verifyDir(dfs, path, overallChecksum);
} else {
// this is not a directory. Checksum the file data.
CRC32 fileCRC = new CRC32();
FSInputStream in = dfsOpenFileWithRetries(dfs, pathName);
byte[] buf = new byte[4096];
int nRead = 0;
while ((nRead = in.read(buf, 0, buf.length)) > 0) {
fileCRC.update(buf, 0, nRead);
}
verifyChecksum(pathName, fileCRC.getValue());
}
}
}
Aggregations