use of io.deephaven.io.logger.Logger in project deephaven-core by deephaven.
the class QueryTableTreeTest method testConcurrentInstantiationOfSort.
public void testConcurrentInstantiationOfSort() throws ExecutionException, InterruptedException {
final Logger log = new StreamLoggerImpl(System.out, LogLevel.DEBUG);
final boolean oldMemoize = QueryTable.setMemoizeResults(false);
try {
final QueryTable source = TstUtils.testRefreshingTable(RowSetFactory.flat(10).toTracking(), col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2), col("Extra", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j"));
final QueryTable source2 = TstUtils.testRefreshingTable(RowSetFactory.flat(11).toTracking(), col("Sentinel", 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), col("Parent", NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2, NULL_INT, 11), col("Extra", "bb", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"));
final java.util.function.Function<Table, Table> doSort = t -> t.sortDescending("Extra");
final java.util.function.Function<Table, Table> doTree = t -> t.treeTable("Sentinel", "Parent");
final java.util.function.Function<Table, Table> doSortAndTree = doSort.andThen(doTree);
final Table expect = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> doSortAndTree.apply(source));
final Table expectOriginal = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> doSortAndTree.apply(makeStatic(source)));
final Table expect2 = UpdateGraphProcessor.DEFAULT.exclusiveLock().computeLocked(() -> doSortAndTree.apply(makeStatic(source2)));
final String hierarchicalColumnName = getHierarchicalColumnName(expect);
final Table sorted0 = doSort.apply(source);
final Table sorted0Original = doSort.apply(makeStatic(source));
final Table sorted2 = doSort.apply(makeStatic(source2));
UpdateGraphProcessor.DEFAULT.startCycleForUnitTests();
final Table treed1 = pool.submit(() -> doSortAndTree.apply(source)).get();
final Table sorted1 = pool.submit(() -> doSort.apply(source)).get();
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect, 0, 10, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
TstUtils.removeRows(source, i(0));
TstUtils.addToTable(source, i(1, 11, 12), c("Sentinel", 2, 11, 12), c("Parent", NULL_INT, NULL_INT, 11), c("Extra", "bb", "k", "l"));
final Table treed2a = pool.submit(() -> doSortAndTree.apply(source)).get();
final Table treed2b = pool.submit(() -> doTree.apply(sorted0)).get();
final Table treed2c = pool.submit(() -> doTree.apply(sorted1)).get();
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, true, false, 0, 10, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expectOriginal, true, false, 0, 10, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expectOriginal, true, false, 0, 10, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expectOriginal, true, false, 0, 10, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
source.notifyListeners(i(11, 12), i(0), i(1));
UpdateGraphProcessor.DEFAULT.flushAllNormalNotificationsForUnitTests();
// everything should have current values now
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect2, false, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expect2, false, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expect2, false, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expect2, false, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
// but still have a previous value for things that are old
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expectOriginal, true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expectOriginal, true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expectOriginal, true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
// we now initialize things after the notification is complete
final Table treed3a = pool.submit(() -> doSortAndTree.apply(source)).get();
final Table treed3b = pool.submit(() -> doTree.apply(sorted0)).get();
final Table treed3c = pool.submit(() -> doTree.apply(sorted1)).get();
System.out.println("Tree3a");
dumpRollup(treed3a, hierarchicalColumnName);
System.out.println("Tree3b");
dumpRollup(treed3b, hierarchicalColumnName);
System.out.println("Tree3c");
dumpRollup(treed3c, hierarchicalColumnName);
// everything should have current values now
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3a, expect2, false, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3b, expect2, false, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3c, expect2, false, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
// Note that previous is not defined to be the starting value, now that redirectToGet has been discontinued.
assertTableEquals(sorted0Original, prevTable(sorted0));
assertTableEquals(sorted0Original, prevTable(sorted1));
assertTableEquals(sorted2, sorted0);
assertTableEquals(sorted2, sorted1);
UpdateGraphProcessor.DEFAULT.completeCycleForUnitTests();
doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed1, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2a, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2b, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2c, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3a, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3b, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3c, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY);
} finally {
QueryTable.setMemoizeResults(oldMemoize);
}
}
use of io.deephaven.io.logger.Logger in project deephaven-core by deephaven.
the class QueryTableAjTest method testAjRandomStaticOverflow.
@SuppressWarnings("SameParameterValue")
private void testAjRandomStaticOverflow(int seed, int leftSize, int rightSize) {
final Logger log = new StreamLoggerImpl();
final Random random = new Random(seed);
final QueryTable leftTable = getTable(false, leftSize, random, initColumnInfos(new String[] { "Bucket", "LeftStamp", "LeftSentinel" }, new TstUtils.StringGenerator(leftSize / 10), new TstUtils.IntGenerator(0, 100000), new TstUtils.IntGenerator(10_000_000, 10_010_000)));
final QueryTable rightTable = getTable(false, rightSize, random, initColumnInfos(new String[] { "Bucket", "RightStamp", "RightSentinel" }, new TstUtils.StringGenerator(rightSize / 10), new TstUtils.SortedIntGenerator(0, 100000), new TstUtils.IntGenerator(20_000_000, 20_010_000)));
final Table result = AsOfJoinHelper.asOfJoin(QueryTableJoinTest.SMALL_LEFT_CONTROL, leftTable, (QueryTable) rightTable.reverse(), MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), SortingOrder.Descending, true);
checkAjResults(result.partitionBy("Bucket"), leftTable.partitionBy("Bucket"), rightTable.partitionBy("Bucket"), true, true);
}
use of io.deephaven.io.logger.Logger in project deephaven-core by deephaven.
the class QueryTableAjTest method testAjRandomLeftIncrementalRightStaticOverflow.
@Test
public void testAjRandomLeftIncrementalRightStaticOverflow() {
final Logger log = new StreamLoggerImpl();
final JoinIncrement joinIncrement = base.leftStepShift;
final int seed = 0;
final Random random = new Random(seed);
final int maxSteps = 3;
final ColumnInfo[] leftColumnInfo;
final int leftSize = 32000;
final int rightSize = 32000;
final QueryTable leftTable = getTable(true, 100000, random, leftColumnInfo = initColumnInfos(new String[] { "Bucket", "LeftStamp", "LeftSentinel" }, new TstUtils.StringGenerator(leftSize), new TstUtils.IntGenerator(0, 100000), new TstUtils.IntGenerator(10_000_000, 10_010_000)));
final ColumnInfo[] rightColumnInfo;
final QueryTable rightTable = getTable(false, 100000, random, rightColumnInfo = initColumnInfos(new String[] { "Bucket", "RightStamp", "RightSentinel" }, new TstUtils.StringGenerator(leftSize), new TstUtils.SortedIntGenerator(0, 100000), new TstUtils.IntGenerator(20_000_000, 20_010_000)));
final EvalNuggetInterface[] en = new EvalNuggetInterface[] { new EvalNugget() {
@Override
protected Table e() {
return AsOfJoinHelper.asOfJoin(QueryTableJoinTest.SMALL_RIGHT_CONTROL, leftTable, rightTable, MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), MatchPairFactory.getExpressions("RightSentinel"), SortingOrder.Ascending, false);
}
} };
for (int step = 0; step < maxSteps; step++) {
System.out.println("Step = " + step + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", seed = " + seed + ", joinIncrement=" + joinIncrement);
if (RefreshingTableTestCase.printTableUpdates) {
System.out.println("Left Table:" + leftTable.size());
TableTools.showWithRowSet(leftTable, 100);
System.out.println("Right Table:" + rightTable.size());
TableTools.showWithRowSet(rightTable, 100);
}
joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, random);
}
}
use of io.deephaven.io.logger.Logger in project deephaven-core by deephaven.
the class QueryTableNaturalJoinTest method testNaturalJoinIncremental.
private static void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, int leftSize, int rightSize, JoinIncrement joinIncrement, long seed, MutableInt numSteps, JoinControl control) {
final Random random = new Random(seed);
final int maxSteps = numSteps.intValue();
final Logger log = new StreamLoggerImpl();
final TstUtils.ColumnInfo[] rightColumnInfo;
final TstUtils.UniqueIntGenerator rightIntGenerator = new TstUtils.UniqueIntGenerator(1, rightSize * (rightStatic ? 2 : 4));
final TstUtils.UniqueIntGenerator rightInt2Generator = new TstUtils.UniqueIntGenerator(1, rightSize * (rightStatic ? 2 : 4));
final TstUtils.IntGenerator duplicateGenerator = new TstUtils.IntGenerator(100000, 100010);
final List<TstUtils.Generator<Integer, Integer>> generatorList = Arrays.asList(rightIntGenerator, duplicateGenerator);
final TstUtils.Generator<Integer, Integer> compositeGenerator = new TstUtils.CompositeGenerator<>(generatorList, 0.9);
final QueryTable rightTable = getTable(!rightStatic, rightSize, random, rightColumnInfo = initColumnInfos(new String[] { "I1", "C1", "C2" }, compositeGenerator, new SetGenerator<>("a", "b"), rightInt2Generator));
final ColumnInfo[] leftColumnInfo;
final QueryTable leftTable = getTable(!leftStatic, leftSize, random, leftColumnInfo = initColumnInfos(new String[] { "I1", "C1", "C2" }, new FromUniqueIntGenerator(rightIntGenerator, new IntGenerator(20, 10000), 0.75), new SetGenerator<>("a", "b", "c"), new FromUniqueIntGenerator(rightInt2Generator, new IntGenerator(20, 10000), 0.75)));
final EvalNugget[] en = new EvalNugget[] { new EvalNugget() {
public Table e() {
return NaturalJoinHelper.naturalJoin(leftTable, rightTable, MatchPairFactory.getExpressions("I1"), MatchPairFactory.getExpressions("LI1=I1", "LC1=C1", "LC2=C2"), false, control);
}
}, new EvalNugget() {
public Table e() {
return NaturalJoinHelper.naturalJoin(leftTable, rightTable, MatchPairFactory.getExpressions("C1", "I1"), MatchPairFactory.getExpressions("LC2=C2"), false, control);
}
}, new EvalNugget() {
public Table e() {
return NaturalJoinHelper.naturalJoin(leftTable, (QueryTable) rightTable.update("Exists=true"), MatchPairFactory.getExpressions("C1", "C2", "I1"), MatchPairFactory.getExpressions("Exists"), false, control);
}
} };
if (printTableUpdates) {
for (int ii = 0; ii < en.length; ++ii) {
en[ii].showResult("Original " + ii, en[ii].originalValue);
}
}
final int leftStepSize = (int) Math.ceil(Math.sqrt(leftSize));
final int rightStepSize = (int) Math.ceil(Math.sqrt(rightSize));
for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) {
if (printTableUpdates) {
System.out.println("Step = " + numSteps.intValue() + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", seed = " + seed + ", joinIncrement=" + joinIncrement);
System.out.println("Left Table:" + leftTable.size());
TableTools.showWithRowSet(leftTable, 100);
System.out.println("Right Table:" + rightTable.size());
TableTools.showWithRowSet(rightTable, 100);
}
joinIncrement.step(leftStepSize, rightStepSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, random);
}
}
use of io.deephaven.io.logger.Logger in project deephaven-core by deephaven.
the class LoggerFactorySingleCache method create.
@Override
public final Logger create(String name) {
Logger local;
if ((local = INSTANCE) == null) {
synchronized (this) {
if ((local = INSTANCE) == null) {
local = createInternal();
INSTANCE = local;
}
}
}
return local;
}
Aggregations