use of org.apache.drill.exec.server.options.OptionManager in project drill by apache.
the class HiveAbstractReader method setup.
@Override
public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
// initializes "reader"
final Callable<Void> readerInitializer = new Callable<Void>() {
@Override
public Void call() throws Exception {
init();
return null;
}
};
final ListenableFuture<Void> result = context.runCallableAs(proxyUgi, readerInitializer);
try {
result.get();
} catch (InterruptedException e) {
result.cancel(true);
// Preserve evidence that the interruption occurred so that code higher up on the call stack can learn of the
// interruption and respond to it if it wants to.
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
throw ExecutionSetupException.fromThrowable(e.getMessage(), e);
}
try {
final OptionManager options = fragmentContext.getOptions();
for (int i = 0; i < selectedColumnNames.size(); i++) {
MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedColumnTypes.get(i), options);
MaterializedField field = MaterializedField.create(selectedColumnNames.get(i), type);
Class<? extends ValueVector> vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
vectors.add(output.addField(field, vvClass));
}
for (int i = 0; i < selectedPartitionNames.size(); i++) {
MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i), options);
MaterializedField field = MaterializedField.create(selectedPartitionNames.get(i), type);
Class<? extends ValueVector> vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
pVectors.add(output.addField(field, vvClass));
}
} catch (SchemaChangeException e) {
throw new ExecutionSetupException(e);
}
}
use of org.apache.drill.exec.server.options.OptionManager in project drill by apache.
the class MemoryAllocationUtilities method setupSortMemoryAllocations.
/**
* Helper method to setup SortMemoryAllocations
* since this method can be used in multiple places adding it in this class
* rather than keeping it in Foreman
* @param plan
* @param queryContext
*/
public static void setupSortMemoryAllocations(final PhysicalPlan plan, final QueryContext queryContext) {
if (plan.getProperties().hasResourcePlan) {
return;
}
// look for external sorts
final List<ExternalSort> sortList = new LinkedList<>();
for (final PhysicalOperator op : plan.getSortedOperators()) {
if (op instanceof ExternalSort) {
sortList.add((ExternalSort) op);
}
}
// if there are any sorts, compute the maximum allocation, and set it on them
if (sortList.size() > 0) {
final OptionManager optionManager = queryContext.getOptions();
final long maxWidthPerNode = optionManager.getOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY).num_val;
long maxAllocPerNode = Math.min(DrillConfig.getMaxDirectMemory(), queryContext.getConfig().getLong(RootAllocatorFactory.TOP_LEVEL_MAX_ALLOC));
maxAllocPerNode = Math.min(maxAllocPerNode, optionManager.getOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY).num_val);
final long maxSortAlloc = maxAllocPerNode / (sortList.size() * maxWidthPerNode);
logger.debug("Max sort alloc: {}", maxSortAlloc);
for (final ExternalSort externalSort : sortList) {
// Ensure that the sort receives the minimum memory needed to make progress.
// Without this, the math might work out to allocate too little memory.
long alloc = Math.max(maxSortAlloc, externalSort.getInitialAllocation());
externalSort.setMaxAllocation(alloc);
}
}
plan.getProperties().hasResourcePlan = true;
}
use of org.apache.drill.exec.server.options.OptionManager in project drill by apache.
the class QueryTestUtil method setupScalarReplacementOption.
/**
* Set up the options to test the scalar replacement retry option (see
* ClassTransformer.java). Scalar replacement rewrites bytecode to replace
* value holders (essentially boxed values) with their member variables as
* locals. There is still one pattern that doesn't work, and occasionally new
* ones are introduced. This can be used in tests that exercise failing patterns.
*
* <p>This also flushes the compiled code cache.
*
* @param drillbit the drillbit
* @param srOption the scalar replacement option value to use
* @return the original scalar replacement option setting (so it can be restored)
*/
@SuppressWarnings("resource")
public static OptionValue setupScalarReplacementOption(final Drillbit drillbit, final ClassTransformer.ScalarReplacementOption srOption) {
// set the system option
final DrillbitContext drillbitContext = drillbit.getContext();
final OptionManager optionManager = drillbitContext.getOptionManager();
final OptionValue originalOptionValue = optionManager.getOption(ClassTransformer.SCALAR_REPLACEMENT_OPTION);
final OptionValue newOptionValue = OptionValue.createString(OptionValue.OptionType.SYSTEM, ClassTransformer.SCALAR_REPLACEMENT_OPTION, srOption.name().toLowerCase());
optionManager.setOption(newOptionValue);
// flush the code cache
drillbitContext.getCompiler().flushCache();
return originalOptionValue;
}
use of org.apache.drill.exec.server.options.OptionManager in project drill by apache.
the class QueryTestUtil method restoreScalarReplacementOption.
/**
* Restore the original scalar replacement option returned from
* setupScalarReplacementOption().
*
* <p>This also flushes the compiled code cache.
*
* @param drillbit the drillbit
* @param srOption the scalar replacement option value to use
*/
public static void restoreScalarReplacementOption(final Drillbit drillbit, final OptionValue srOption) {
@SuppressWarnings("resource") final DrillbitContext drillbitContext = drillbit.getContext();
@SuppressWarnings("resource") final OptionManager optionManager = drillbitContext.getOptionManager();
optionManager.setOption(srOption);
// flush the code cache
drillbitContext.getCompiler().flushCache();
}
use of org.apache.drill.exec.server.options.OptionManager in project drill by axbaretto.
the class HiveRecordReader method setup.
@Override
public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
// initializes "reader"
final Callable<Void> readerInitializer = new Callable<Void>() {
@Override
public Void call() throws Exception {
init();
return null;
}
};
final ListenableFuture<Void> result = context.runCallableAs(proxyUgi, readerInitializer);
try {
result.get();
} catch (InterruptedException e) {
result.cancel(true);
// Preserve evidence that the interruption occurred so that code higher up on the call stack can learn of the
// interruption and respond to it if it wants to.
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
throw ExecutionSetupException.fromThrowable(e.getMessage(), e);
}
try {
final OptionManager options = fragmentContext.getOptions();
for (int i = 0; i < selectedColumnNames.size(); i++) {
MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedColumnTypes.get(i), options);
MaterializedField field = MaterializedField.create(selectedColumnNames.get(i), type);
Class<? extends ValueVector> vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
vectors.add(output.addField(field, vvClass));
}
for (int i = 0; i < selectedPartitionNames.size(); i++) {
MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i), options);
MaterializedField field = MaterializedField.create(selectedPartitionNames.get(i), type);
Class<? extends ValueVector> vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
pVectors.add(output.addField(field, vvClass));
}
} catch (SchemaChangeException e) {
throw new ExecutionSetupException(e);
}
}
Aggregations