use of org.apache.commons.lang3.tuple.Pair in project PneumaticCraft by MineMaarten.
the class TileEntityPressureTube method updateEntity.
@Override
public void updateEntity() {
super.updateEntity();
for (TubeModule module : modules) {
if (module != null) {
module.shouldDrop = true;
module.update();
}
}
List<Pair<ForgeDirection, IAirHandler>> teList = getConnectedPneumatics();
boolean hasModules = false;
for (TubeModule module : modules) {
if (module != null) {
hasModules = true;
break;
}
}
if (!hasModules && teList.size() - specialConnectedHandlers.size() == 1 && !worldObj.isRemote) {
for (Pair<ForgeDirection, IAirHandler> entry : teList) {
if (entry.getKey() != ForgeDirection.UNKNOWN && modules[entry.getKey().getOpposite().ordinal()] == null && isConnectedTo(entry.getKey().getOpposite()))
airLeak(entry.getKey().getOpposite());
}
}
}
use of org.apache.commons.lang3.tuple.Pair in project DataX by alibaba.
the class AdsInsertUtil method getColumnMetaData.
public static Map<String, Pair<Integer, String>> getColumnMetaData(TableInfo tableInfo, List<String> userColumns) {
Map<String, Pair<Integer, String>> columnMetaData = new HashMap<String, Pair<Integer, String>>();
List<ColumnInfo> columnInfoList = tableInfo.getColumns();
for (String column : userColumns) {
if (column.startsWith(Constant.ADS_QUOTE_CHARACTER) && column.endsWith(Constant.ADS_QUOTE_CHARACTER)) {
column = column.substring(1, column.length() - 1);
}
for (ColumnInfo columnInfo : columnInfoList) {
if (column.equalsIgnoreCase(columnInfo.getName())) {
Pair<Integer, String> eachPair = new ImmutablePair<Integer, String>(columnInfo.getDataType().sqlType, columnInfo.getDataType().name);
columnMetaData.put(columnInfo.getName(), eachPair);
}
}
}
return columnMetaData;
}
use of org.apache.commons.lang3.tuple.Pair in project DataX by alibaba.
the class SingleTableSplitUtil method genSplitSqlForOracle.
/**
* support Number and String split
* */
public static List<String> genSplitSqlForOracle(String splitPK, String table, String where, Configuration configuration, int adviceNum) {
if (adviceNum < 1) {
throw new IllegalArgumentException(String.format("切分份数不能小于1. 此处:adviceNum=[%s].", adviceNum));
} else if (adviceNum == 1) {
return null;
}
String whereSql = String.format("%s IS NOT NULL", splitPK);
if (StringUtils.isNotBlank(where)) {
whereSql = String.format(" WHERE (%s) AND (%s) ", whereSql, where);
} else {
whereSql = String.format(" WHERE (%s) ", whereSql);
}
Double percentage = configuration.getDouble(Key.SAMPLE_PERCENTAGE, 0.1);
String sampleSqlTemplate = "SELECT * FROM ( SELECT %s FROM %s SAMPLE (%s) %s ORDER BY DBMS_RANDOM.VALUE) WHERE ROWNUM <= %s ORDER by %s ASC";
String splitSql = String.format(sampleSqlTemplate, splitPK, table, percentage, whereSql, adviceNum, splitPK);
int fetchSize = configuration.getInt(Constant.FETCH_SIZE, 32);
String jdbcURL = configuration.getString(Key.JDBC_URL);
String username = configuration.getString(Key.USERNAME);
String password = configuration.getString(Key.PASSWORD);
Connection conn = DBUtil.getConnection(DATABASE_TYPE, jdbcURL, username, password);
LOG.info("split pk [sql={}] is running... ", splitSql);
ResultSet rs = null;
List<Pair<Object, Integer>> splitedRange = new ArrayList<Pair<Object, Integer>>();
try {
try {
rs = DBUtil.query(conn, splitSql, fetchSize);
} catch (Exception e) {
throw RdbmsException.asQueryException(DATABASE_TYPE, e, splitSql, table, username);
}
if (configuration != null) {
configuration.set(Constant.PK_TYPE, Constant.PK_TYPE_MONTECARLO);
}
ResultSetMetaData rsMetaData = rs.getMetaData();
while (DBUtil.asyncResultSetNext(rs)) {
ImmutablePair<Object, Integer> eachPoint = new ImmutablePair<Object, Integer>(rs.getObject(1), rsMetaData.getColumnType(1));
splitedRange.add(eachPoint);
}
} catch (DataXException e) {
throw e;
} catch (Exception e) {
throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, "DataX尝试切分表发生错误. 请检查您的配置并作出修改.", e);
} finally {
DBUtil.closeDBResources(rs, null, null);
}
LOG.debug(JSON.toJSONString(splitedRange));
List<String> rangeSql = new ArrayList<String>();
int splitedRangeSize = splitedRange.size();
// demo: Parameter rangeResult can not be null and its length can not <2. detail:rangeResult=[24999930].
if (splitedRangeSize >= 2) {
// warn: oracle Number is long type here
if (isLongType(splitedRange.get(0).getRight())) {
BigInteger[] integerPoints = new BigInteger[splitedRange.size()];
for (int i = 0; i < splitedRangeSize; i++) {
integerPoints[i] = new BigInteger(splitedRange.get(i).getLeft().toString());
}
rangeSql.addAll(RdbmsRangeSplitWrap.wrapRange(integerPoints, splitPK));
// its ok if splitedRangeSize is 1
rangeSql.add(RdbmsRangeSplitWrap.wrapFirstLastPoint(integerPoints[0], integerPoints[splitedRangeSize - 1], splitPK));
} else if (isStringType(splitedRange.get(0).getRight())) {
// warn: treated as string type
String[] stringPoints = new String[splitedRange.size()];
for (int i = 0; i < splitedRangeSize; i++) {
stringPoints[i] = new String(splitedRange.get(i).getLeft().toString());
}
rangeSql.addAll(RdbmsRangeSplitWrap.wrapRange(stringPoints, splitPK, "'", DATABASE_TYPE));
// its ok if splitedRangeSize is 1
rangeSql.add(RdbmsRangeSplitWrap.wrapFirstLastPoint(stringPoints[0], stringPoints[splitedRangeSize - 1], splitPK, "'", DATABASE_TYPE));
} else {
throw DataXException.asDataXException(DBUtilErrorCode.ILLEGAL_SPLIT_PK, "您配置的DataX切分主键(splitPk)有误. 因为您配置的切分主键(splitPk) 类型 DataX 不支持. DataX 仅支持切分主键为一个,并且类型为整数或者字符串类型. 请尝试使用其他的切分主键或者联系 DBA 进行处理.");
}
}
return rangeSql;
}
use of org.apache.commons.lang3.tuple.Pair in project pinot by linkedin.
the class StarTreeJsonNode method build.
private int build(StarTreeIndexNodeInterf indexNode, StarTreeJsonNode json) {
Iterator<? extends StarTreeIndexNodeInterf> childrenIterator = indexNode.getChildrenIterator();
if (!childrenIterator.hasNext()) {
return 0;
}
int childDimensionId = indexNode.getChildDimensionName();
String childDimensionName = dimensionNameToIndexMap.inverse().get(childDimensionId);
Dictionary dictionary = dictionaries.get(childDimensionName);
int totalChildNodes = indexNode.getNumChildren();
Comparator<Pair<String, Integer>> comparator = new Comparator<Pair<String, Integer>>() {
@Override
public int compare(Pair<String, Integer> o1, Pair<String, Integer> o2) {
return -1 * Integer.compare(o1.getRight(), o2.getRight());
}
};
MinMaxPriorityQueue<Pair<String, Integer>> queue = MinMaxPriorityQueue.orderedBy(comparator).maximumSize(MAX_CHILDREN).create();
StarTreeJsonNode allNode = null;
while (childrenIterator.hasNext()) {
StarTreeIndexNodeInterf childIndexNode = childrenIterator.next();
int childDimensionValueId = childIndexNode.getDimensionValue();
String childDimensionValue = "ALL";
if (childDimensionValueId != StarTreeIndexNodeInterf.ALL) {
childDimensionValue = dictionary.get(childDimensionValueId).toString();
}
StarTreeJsonNode childJson = new StarTreeJsonNode(childDimensionValue);
totalChildNodes += build(childIndexNode, childJson);
if (childDimensionValueId != StarTreeIndexNodeInterf.ALL) {
json.addChild(childJson);
queue.add(ImmutablePair.of(childDimensionValue, totalChildNodes));
} else {
allNode = childJson;
}
}
//put ALL node at the end
if (allNode != null) {
json.addChild(allNode);
}
if (totalChildNodes > MAX_CHILDREN) {
Iterator<Pair<String, Integer>> qIterator = queue.iterator();
Set<String> topKDimensions = new HashSet<>();
topKDimensions.add("ALL");
while (qIterator.hasNext()) {
topKDimensions.add(qIterator.next().getKey());
}
Iterator<StarTreeJsonNode> iterator = json.getChildren().iterator();
while (iterator.hasNext()) {
StarTreeJsonNode next = iterator.next();
if (!topKDimensions.contains(next.getName())) {
iterator.remove();
}
}
}
return totalChildNodes;
}
use of org.apache.commons.lang3.tuple.Pair in project presto by prestodb.
the class AccumuloClient method autoGenerateMapping.
/**
* Auto-generates the mapping of Presto column name to Accumulo family/qualifier, respecting the locality groups (if any).
*
* @param columns Presto columns for the table
* @param groups Mapping of locality groups to a set of Presto columns, or null if none
* @return Column mappings
*/
private static Map<String, Pair<String, String>> autoGenerateMapping(List<ColumnMetadata> columns, Optional<Map<String, Set<String>>> groups) {
Map<String, Pair<String, String>> mapping = new HashMap<>();
for (ColumnMetadata column : columns) {
Optional<String> family = getColumnLocalityGroup(column.getName(), groups);
mapping.put(column.getName(), Pair.of(family.orElse(column.getName()), column.getName()));
}
return mapping;
}
Aggregations