use of io.trino.FeaturesConfig in project trino by trinodb.
the class TestFeaturesConfig method testExplicitPropertyMappings.
@Test
public void testExplicitPropertyMappings() {
Map<String, String> properties = ImmutableMap.<String, String>builder().put("grouped-execution-enabled", "true").put("dynamic-schedule-for-grouped-execution", "true").put("concurrent-lifespans-per-task", "1").put("redistribute-writes", "false").put("scale-writers", "true").put("writer-min-size", "42GB").put("regex-library", "RE2J").put("re2j.dfa-states-limit", "42").put("re2j.dfa-retries", "42").put("spill-enabled", "true").put("aggregation-operator-unspill-memory-limit", "100MB").put("spiller-spill-path", "/tmp/custom/spill/path1,/tmp/custom/spill/path2").put("spiller-threads", "42").put("spiller-max-used-space-threshold", "0.8").put("memory-revoking-threshold", "0.2").put("memory-revoking-target", "0.8").put("exchange.compression-enabled", "true").put("exchange.data-integrity-verification", "RETRY").put("deprecated.legacy-row-to-json-cast", "true").put("parse-decimal-literals-as-double", "true").put("pages-index.eager-compaction-enabled", "true").put("filter-and-project-min-output-page-size", "1MB").put("filter-and-project-min-output-page-row-count", "2048").put("max-recursion-depth", "8").put("analyzer.max-grouping-sets", "2047").put("experimental.late-materialization.enabled", "true").put("deprecated.omit-datetime-type-precision", "true").put("deprecated.legacy-catalog-roles", "true").put("incremental-hash-array-load-factor.enabled", "false").put("hide-inaccessible-columns", "true").put("legacy.allow-set-view-authorization", "true").buildOrThrow();
FeaturesConfig expected = new FeaturesConfig().setGroupedExecutionEnabled(true).setDynamicScheduleForGroupedExecutionEnabled(true).setConcurrentLifespansPerTask(1).setRedistributeWrites(false).setScaleWriters(true).setWriterMinSize(DataSize.of(42, GIGABYTE)).setRegexLibrary(RE2J).setRe2JDfaStatesLimit(42).setRe2JDfaRetries(42).setSpillEnabled(true).setAggregationOperatorUnspillMemoryLimit(DataSize.valueOf("100MB")).setSpillerSpillPaths("/tmp/custom/spill/path1,/tmp/custom/spill/path2").setSpillerThreads(42).setSpillMaxUsedSpaceThreshold(0.8).setMemoryRevokingThreshold(0.2).setMemoryRevokingTarget(0.8).setExchangeCompressionEnabled(true).setExchangeDataIntegrityVerification(DataIntegrityVerification.RETRY).setLegacyRowToJsonCast(true).setParseDecimalLiteralsAsDouble(true).setPagesIndexEagerCompactionEnabled(true).setFilterAndProjectMinOutputPageSize(DataSize.of(1, MEGABYTE)).setFilterAndProjectMinOutputPageRowCount(2048).setMaxRecursionDepth(8).setMaxGroupingSets(2047).setLateMaterializationEnabled(true).setOmitDateTimeTypePrecision(true).setLegacyCatalogRoles(true).setIncrementalHashArrayLoadFactorEnabled(false).setHideInaccessibleColumns(true).setAllowSetViewAuthorization(true);
assertFullMapping(properties, expected);
}
use of io.trino.FeaturesConfig in project trino by trinodb.
the class TestGenericPartitioningSpiller method setUp.
@BeforeClass
public void setUp() throws Exception {
tempDirectory = createTempDirectory(getClass().getSimpleName());
FeaturesConfig featuresConfig = new FeaturesConfig();
featuresConfig.setSpillerSpillPaths(tempDirectory.toString());
featuresConfig.setSpillerThreads(8);
featuresConfig.setSpillMaxUsedSpaceThreshold(1.0);
SingleStreamSpillerFactory singleStreamSpillerFactory = new FileSingleStreamSpillerFactory(new TestingBlockEncodingSerde(), new SpillerStats(), featuresConfig, new NodeSpillConfig());
factory = new GenericPartitioningSpillerFactory(singleStreamSpillerFactory);
scheduledExecutor = newSingleThreadScheduledExecutor();
}
use of io.trino.FeaturesConfig in project trino by trinodb.
the class TestAddExchangesPlans method createLocalQueryRunner.
@Override
protected LocalQueryRunner createLocalQueryRunner() {
Session session = testSessionBuilder().setCatalog("tpch").setSchema("tiny").build();
FeaturesConfig featuresConfig = new FeaturesConfig().setSpillerSpillPaths("/tmp/test_spill_path");
LocalQueryRunner queryRunner = LocalQueryRunner.builder(session).withFeaturesConfig(featuresConfig).build();
queryRunner.createCatalog("tpch", new TpchConnectorFactory(1), ImmutableMap.of());
return queryRunner;
}
use of io.trino.FeaturesConfig in project trino by trinodb.
the class TestGlobalFunctionCatalog method testConflictingScalarAggregation.
@Test
public void testConflictingScalarAggregation() {
FunctionBundle functions = extractFunctions(ScalarSum.class);
TypeOperators typeOperators = new TypeOperators();
GlobalFunctionCatalog globalFunctionCatalog = new GlobalFunctionCatalog();
globalFunctionCatalog.addFunctions(SystemFunctionBundle.create(new FeaturesConfig(), typeOperators, new BlockTypeOperators(typeOperators), NodeVersion.UNKNOWN));
assertThatThrownBy(() -> globalFunctionCatalog.addFunctions(functions)).isInstanceOf(IllegalStateException.class).hasMessage("'sum' is both an aggregation and a scalar function");
}
use of io.trino.FeaturesConfig in project trino by trinodb.
the class FunctionManager method createTestingFunctionManager.
public static FunctionManager createTestingFunctionManager() {
TypeOperators typeOperators = new TypeOperators();
GlobalFunctionCatalog functionCatalog = new GlobalFunctionCatalog();
functionCatalog.addFunctions(SystemFunctionBundle.create(new FeaturesConfig(), typeOperators, new BlockTypeOperators(typeOperators), UNKNOWN));
functionCatalog.addFunctions(new InternalFunctionBundle(new LiteralFunction(new InternalBlockEncodingSerde(new BlockEncodingManager(), TESTING_TYPE_MANAGER))));
return new FunctionManager(functionCatalog);
}
Aggregations