use of io.prestosql.sql.planner.PlanFragment in project hetu-core by openlookeng.
the class TestUtil method createExchangePlanFragment.
private static PlanFragment createExchangePlanFragment(RowExpression expr) {
Symbol testSymbol = new Symbol("a");
Map<Symbol, ColumnHandle> scanAssignments = ImmutableMap.<Symbol, ColumnHandle>builder().put(testSymbol, new TestingMetadata.TestingColumnHandle("a")).build();
Map<Symbol, ColumnHandle> assignments = Maps.filterKeys(scanAssignments, Predicates.in(ImmutableList.of(testSymbol)));
TableScanNode tableScanNode = new TableScanNode(new PlanNodeId(UUID.randomUUID().toString()), makeTableHandle(TupleDomain.none()), ImmutableList.copyOf(assignments.keySet()), assignments, TupleDomain.none(), Optional.empty(), ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_DEFAULT, new UUID(0, 0), 0, false);
PlanBuilder planBuilder = new PlanBuilder(new PlanNodeIdAllocator(), dummyMetadata());
FilterNode filterNode = planBuilder.filter(expr, tableScanNode);
PlanNode planNode = new LimitNode(new PlanNodeId("limit"), filterNode, 1, false);
ImmutableMap.Builder<Symbol, Type> types = ImmutableMap.builder();
for (Symbol symbol : planNode.getOutputSymbols()) {
types.put(symbol, VARCHAR);
}
return new PlanFragment(new PlanFragmentId("limit_fragment_id"), planNode, types.build(), SOURCE_DISTRIBUTION, ImmutableList.of(planNode.getId()), new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), planNode.getOutputSymbols()), ungroupedExecution(), StatsAndCosts.empty(), Optional.empty(), Optional.empty(), Optional.empty());
}
use of io.prestosql.sql.planner.PlanFragment in project hetu-core by openlookeng.
the class PlanPrinter method formatFragment.
private static String formatFragment(Function<TableScanNode, TableInfo> tableInfoSupplier, ValuePrinter valuePrinter, PlanFragment fragment, Optional<StageInfo> stageInfo, Optional<Map<PlanNodeId, PlanNodeStats>> planNodeStats, boolean verbose, List<PlanFragment> allFragments, Metadata metadata) {
StringBuilder builder = new StringBuilder();
builder.append(format("Fragment %s [%s]\n", fragment.getId(), fragment.getPartitioning()));
if (stageInfo.isPresent()) {
StageStats stageStats = stageInfo.get().getStageStats();
double avgPositionsPerTask = stageInfo.get().getTasks().stream().mapToLong(task -> task.getStats().getProcessedInputPositions()).average().orElse(Double.NaN);
double squaredDifferences = stageInfo.get().getTasks().stream().mapToDouble(task -> Math.pow(task.getStats().getProcessedInputPositions() - avgPositionsPerTask, 2)).sum();
double sdAmongTasks = Math.sqrt(squaredDifferences / stageInfo.get().getTasks().size());
builder.append(indentString(1)).append(format("CPU: %s, Scheduled: %s, Input: %s (%s); per task: avg.: %s std.dev.: %s, Output: %s (%s)\n", stageStats.getTotalCpuTime().convertToMostSuccinctTimeUnit(), stageStats.getTotalScheduledTime().convertToMostSuccinctTimeUnit(), formatPositions(stageStats.getProcessedInputPositions()), stageStats.getProcessedInputDataSize(), formatDouble(avgPositionsPerTask), formatDouble(sdAmongTasks), formatPositions(stageStats.getOutputPositions()), stageStats.getOutputDataSize()));
}
PartitioningScheme partitioningScheme = fragment.getPartitioningScheme();
builder.append(indentString(1)).append(format("Output layout: [%s]\n", Joiner.on(", ").join(partitioningScheme.getOutputLayout())));
boolean replicateNullsAndAny = partitioningScheme.isReplicateNullsAndAny();
List<String> arguments = partitioningScheme.getPartitioning().getArguments().stream().map(argument -> {
if (argument.isConstant()) {
NullableValue constant = argument.getConstant();
String printableValue = valuePrinter.castToVarchar(constant.getType(), constant.getValue());
return constant.getType().getDisplayName() + "(" + printableValue + ")";
}
return argument.getColumn().toString();
}).collect(toImmutableList());
builder.append(indentString(1));
if (replicateNullsAndAny) {
builder.append(format("Output partitioning: %s (replicate nulls and any) [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn())));
} else {
builder.append(format("Output partitioning: %s [%s]%s\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments), formatHash(partitioningScheme.getHashColumn())));
}
builder.append(indentString(1)).append(format("Stage Execution Strategy: %s\n", fragment.getStageExecutionDescriptor().getStageExecutionStrategy()));
TypeProvider typeProvider = TypeProvider.copyOf(allFragments.stream().flatMap(f -> f.getSymbols().entrySet().stream()).distinct().collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)));
builder.append(new PlanPrinter(fragment.getRoot(), typeProvider, Optional.of(fragment.getStageExecutionDescriptor()), tableInfoSupplier, valuePrinter, fragment.getStatsAndCosts(), planNodeStats, metadata).toText(verbose, 1)).append("\n");
return builder.toString();
}
use of io.prestosql.sql.planner.PlanFragment in project hetu-core by openlookeng.
the class PlanPrinter method textDistributedPlan.
public static String textDistributedPlan(SubPlan plan, Metadata metadata, Session session, boolean verbose) {
TableInfoSupplier supplier = new TableInfoSupplier(metadata, session);
ValuePrinter printer = new ValuePrinter(metadata, session);
StringBuilder builder = new StringBuilder();
for (PlanFragment fragment : plan.getAllFragments()) {
builder.append(formatFragment(supplier, printer, fragment, Optional.empty(), Optional.empty(), verbose, plan.getAllFragments(), metadata));
}
return builder.toString();
}
use of io.prestosql.sql.planner.PlanFragment in project hetu-core by openlookeng.
the class PlanPrinter method textDistributedPlan.
public static String textDistributedPlan(StageInfo outputStageInfo, ValuePrinter valuePrinter, boolean verbose, Metadata metadata) {
Map<PlanNodeId, TableInfo> tableInfos = getAllStages(Optional.of(outputStageInfo)).stream().map(StageInfo::getTables).map(Map::entrySet).flatMap(Collection::stream).collect(toImmutableMap(Entry::getKey, Entry::getValue));
StringBuilder builder = new StringBuilder();
List<StageInfo> allStages = getAllStages(Optional.of(outputStageInfo));
List<PlanFragment> allFragments = allStages.stream().map(StageInfo::getPlan).collect(toImmutableList());
Map<PlanNodeId, PlanNodeStats> aggregatedStats = aggregateStageStats(allStages);
for (StageInfo stageInfo : allStages) {
builder.append(formatFragment(tableScanNode -> tableInfos.get(tableScanNode.getId()), valuePrinter, stageInfo.getPlan(), Optional.of(stageInfo), Optional.of(aggregatedStats), verbose, allFragments, metadata));
}
return builder.toString();
}
use of io.prestosql.sql.planner.PlanFragment in project hetu-core by openlookeng.
the class SqlQueryExecution method handleCrossRegionDynamicFilter.
private void handleCrossRegionDynamicFilter(PlanRoot plan) {
if (!isCrossRegionDynamicFilterEnabled(getSession()) || plan == null) {
return;
}
StateStore stateStore = stateStoreProvider.getStateStore();
if (stateStore == null) {
return;
}
String queryId = getSession().getQueryId().getId();
log.debug("queryId=%s begin to find columnToColumnMapping.", queryId);
PlanNode outputNode = plan.getRoot().getFragment().getRoot();
Map<String, Set<String>> columnToSymbolMapping = new HashMap<>();
if (outputNode != null && outputNode instanceof OutputNode) {
List<String> queryColumnNames = ((OutputNode) outputNode).getColumnNames();
List<Symbol> outputSymbols = outputNode.getOutputSymbols();
Map<String, Set<String>> tmpMapping = new HashMap<>(outputSymbols.size());
for (Symbol symbol : outputNode.getOutputSymbols()) {
Set<String> sets = new HashSet();
sets.add(symbol.getName());
tmpMapping.put(symbol.getName(), sets);
}
for (PlanFragment fragment : plan.getRoot().getAllFragments()) {
if ("0".equals(fragment.getId().toString())) {
continue;
}
PlanNode sourceNode = fragment.getRoot();
findMappingFromPlan(tmpMapping, sourceNode);
}
for (int i = 0; i < outputSymbols.size(); i++) {
columnToSymbolMapping.put(queryColumnNames.get(i), tmpMapping.get(outputSymbols.get(i).getName()));
}
}
// save mapping into stateStore
StateMap<String, Object> mappingStateMap = (StateMap<String, Object>) stateStore.getOrCreateStateCollection(CROSS_REGION_DYNAMIC_FILTERS, StateCollection.Type.MAP);
mappingStateMap.put(queryId + QUERY_COLUMN_NAME_TO_SYMBOL_MAPPING, columnToSymbolMapping);
log.debug("queryId=%s, add columnToSymbolMapping into hazelcast success.", queryId + QUERY_COLUMN_NAME_TO_SYMBOL_MAPPING);
}
Aggregations