use of org.elasticsearch.common.unit.ByteSizeValue in project crate by crate.
the class SortingProjectorTest method testUsedMemoryIsAccountedFor.
@Test
public void testUsedMemoryIsAccountedFor() throws Exception {
MemoryCircuitBreaker circuitBreaker = new MemoryCircuitBreaker(new ByteSizeValue(30, ByteSizeUnit.BYTES), 1, LogManager.getLogger(SortingProjectorTest.class));
RowCellsAccountingWithEstimators rowAccounting = new RowCellsAccountingWithEstimators(List.of(DataTypes.LONG, DataTypes.BOOLEAN), ConcurrentRamAccounting.forCircuitBreaker("testContext", circuitBreaker), 0);
Projector projector = createProjector(rowAccounting, 1, 0);
consumer.accept(projector.apply(TestingBatchIterators.range(1, 11)), null);
expectedException.expect(CircuitBreakingException.class);
consumer.getResult();
}
use of org.elasticsearch.common.unit.ByteSizeValue in project crate by crate.
the class SortingTopNProjectorTest method testUsedMemoryIsAccountedFor.
@Test
public void testUsedMemoryIsAccountedFor() throws Exception {
MemoryCircuitBreaker circuitBreaker = new MemoryCircuitBreaker(new ByteSizeValue(30, ByteSizeUnit.BYTES), 1, LogManager.getLogger(SortingTopNProjectorTest.class));
RowCellsAccountingWithEstimators rowAccounting = new RowCellsAccountingWithEstimators(List.of(DataTypes.LONG, DataTypes.BOOLEAN), ConcurrentRamAccounting.forCircuitBreaker("testContext", circuitBreaker), 0);
Projector projector = getProjector(rowAccounting, 1, 100_000, TopN.NO_OFFSET, FIRST_CELL_ORDERING);
consumer.accept(projector.apply(TestingBatchIterators.range(1, 11)), null);
expectedException.expect(CircuitBreakingException.class);
consumer.getResult();
}
use of org.elasticsearch.common.unit.ByteSizeValue in project crate by crate.
the class DiskThresholdSettings method doValidateAsBytes.
private static void doValidateAsBytes(final String low, final String high, final String flood) {
final ByteSizeValue lowWatermarkBytes = thresholdBytesFromWatermark(low, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), false);
final ByteSizeValue highWatermarkBytes = thresholdBytesFromWatermark(high, CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), false);
final ByteSizeValue floodStageBytes = thresholdBytesFromWatermark(flood, CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), false);
if (lowWatermarkBytes.getBytes() < highWatermarkBytes.getBytes()) {
throw new IllegalArgumentException("low disk watermark [" + low + "] less than high disk watermark [" + high + "]");
}
if (highWatermarkBytes.getBytes() < floodStageBytes.getBytes()) {
throw new IllegalArgumentException("high disk watermark [" + high + "] less than flood stage disk watermark [" + flood + "]");
}
}
use of org.elasticsearch.common.unit.ByteSizeValue in project crate by crate.
the class RamAccountingPageIteratorTest method testCircuitBreaking.
@Test
public void testCircuitBreaking() throws Exception {
PagingIterator<Integer, Row> pagingIterator = PagingIterator.create(2, true, null, () -> new RowAccountingWithEstimators(List.of(DataTypes.STRING, DataTypes.STRING, DataTypes.STRING), ConcurrentRamAccounting.forCircuitBreaker("test", new MemoryCircuitBreaker(new ByteSizeValue(197, ByteSizeUnit.BYTES), 1, LogManager.getLogger(RowAccountingWithEstimatorsTest.class)))));
expectedException.expect(CircuitBreakingException.class);
expectedException.expectMessage("Data too large, data for field [test] would be [288/288b], which is larger than the limit of [197/197b]");
pagingIterator.merge(Arrays.asList(new KeyIterable<>(0, Collections.singletonList(TEST_ROWS[0])), new KeyIterable<>(1, Collections.singletonList(TEST_ROWS[1]))));
}
use of org.elasticsearch.common.unit.ByteSizeValue in project crate by crate.
the class NodeFetchResponseTest method testResponseCircuitBreaker.
@Test
public void testResponseCircuitBreaker() throws Exception {
NodeFetchResponse orig = new NodeFetchResponse(fetched);
BytesStreamOutput out = new BytesStreamOutput();
orig.writeTo(out);
StreamInput in = out.bytes().streamInput();
expectedException.expect(CircuitBreakingException.class);
new NodeFetchResponse(in, streamers, ConcurrentRamAccounting.forCircuitBreaker("test", new MemoryCircuitBreaker(new ByteSizeValue(2, ByteSizeUnit.BYTES), 1.0, LogManager.getLogger(NodeFetchResponseTest.class))));
}
Aggregations