use of org.apache.phoenix.hive.ql.index.IndexSearchCondition in project phoenix by apache.
the class PhoenixInputFormat method getSplits.
@Override
public InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException {
String tableName = jobConf.get(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME);
String query;
String executionEngine = jobConf.get(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.getDefaultValue());
if (LOG.isDebugEnabled()) {
LOG.debug("Target table name at split phase : " + tableName + "with whereCondition :" + jobConf.get(TableScanDesc.FILTER_TEXT_CONF_STR) + " and " + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname + " : " + executionEngine);
}
if (PhoenixStorageHandlerConstants.MR.equals(executionEngine)) {
List<IndexSearchCondition> conditionList = null;
String filterExprSerialized = jobConf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
if (filterExprSerialized != null) {
ExprNodeGenericFuncDesc filterExpr = Utilities.deserializeExpression(filterExprSerialized);
PhoenixPredicateDecomposer predicateDecomposer = PhoenixPredicateDecomposer.create(Arrays.asList(jobConf.get(serdeConstants.LIST_COLUMNS).split(",")));
predicateDecomposer.decomposePredicate(filterExpr);
if (predicateDecomposer.isCalledPPD()) {
conditionList = predicateDecomposer.getSearchConditionList();
}
}
query = PhoenixQueryBuilder.getInstance().buildQuery(jobConf, tableName, PhoenixStorageHandlerUtil.getReadColumnNames(jobConf), conditionList);
} else if (PhoenixStorageHandlerConstants.TEZ.equals(executionEngine)) {
Map<String, TypeInfo> columnTypeMap = PhoenixStorageHandlerUtil.createColumnTypeMap(jobConf);
if (LOG.isDebugEnabled()) {
LOG.debug("Column type map for TEZ : " + columnTypeMap);
}
String whereClause = jobConf.get(TableScanDesc.FILTER_TEXT_CONF_STR);
query = PhoenixQueryBuilder.getInstance().buildQuery(jobConf, tableName, PhoenixStorageHandlerUtil.getReadColumnNames(jobConf), whereClause, columnTypeMap);
} else {
throw new IOException(executionEngine + " execution engine unsupported yet.");
}
final QueryPlan queryPlan = getQueryPlan(jobConf, query);
final List<KeyRange> allSplits = queryPlan.getSplits();
final List<InputSplit> splits = generateSplits(jobConf, queryPlan, allSplits, query);
return splits.toArray(new InputSplit[splits.size()]);
}
Aggregations