use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.
the class OssInputSourceTest method testCreateSplitsWithSplitHintSpecRespectingHint.
@Test
public void testCreateSplitsWithSplitHintSpecRespectingHint() {
EasyMock.reset(OSSCLIENT);
expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)), CONTENT);
expectListObjects(PREFIXES.get(1), ImmutableList.of(EXPECTED_URIS.get(1)), CONTENT);
EasyMock.replay(OSSCLIENT);
OssInputSource inputSource = new OssInputSource(OSSCLIENT, INPUT_DATA_CONFIG, null, PREFIXES, null, null);
Stream<InputSplit<List<CloudObjectLocation>>> splits = inputSource.createSplits(new JsonInputFormat(JSONPathSpec.DEFAULT, null, null), new MaxSizeSplitHintSpec(new HumanReadableBytes(CONTENT.length * 3L), null));
Assert.assertEquals(ImmutableList.of(EXPECTED_URIS.stream().map(CloudObjectLocation::new).collect(Collectors.toList())), splits.map(InputSplit::get).collect(Collectors.toList()));
EasyMock.verify(OSSCLIENT);
}
use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.
the class GoogleCloudStorageInputSourceTest method testCreateSplitsWithSplitHintSpecRespectingHint.
@Test
public void testCreateSplitsWithSplitHintSpecRespectingHint() throws IOException {
EasyMock.reset(STORAGE);
EasyMock.reset(INPUT_DATA_CONFIG);
addExpectedPrefixObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)));
addExpectedPrefixObjects(PREFIXES.get(1), ImmutableList.of(EXPECTED_URIS.get(1)));
EasyMock.expect(INPUT_DATA_CONFIG.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH);
EasyMock.replay(STORAGE);
EasyMock.replay(INPUT_DATA_CONFIG);
GoogleCloudStorageInputSource inputSource = new GoogleCloudStorageInputSource(STORAGE, INPUT_DATA_CONFIG, null, PREFIXES, null);
Stream<InputSplit<List<CloudObjectLocation>>> splits = inputSource.createSplits(new JsonInputFormat(JSONPathSpec.DEFAULT, null, null), new MaxSizeSplitHintSpec(new HumanReadableBytes(CONTENT.length * 3L), null));
Assert.assertEquals(ImmutableList.of(EXPECTED_URIS.stream().map(CloudObjectLocation::new).collect(Collectors.toList())), splits.map(InputSplit::get).collect(Collectors.toList()));
}
use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.
the class MaxSizeSplitHintSpecTest method testSplitSmallInputsWithMaxNumFilesEachSplitShouldHaveLessFilesAssigned.
@Test
public void testSplitSmallInputsWithMaxNumFilesEachSplitShouldHaveLessFilesAssigned() {
final int eachInputSize = 3;
final MaxSizeSplitHintSpec splitHintSpec = new MaxSizeSplitHintSpec(new HumanReadableBytes("500M"), 3);
final Function<Integer, InputFileAttribute> inputAttributeExtractor = InputFileAttribute::new;
final List<List<Integer>> splits = Lists.newArrayList(splitHintSpec.split(IntStream.generate(() -> eachInputSize).limit(10).iterator(), inputAttributeExtractor));
Assert.assertEquals(4, splits.size());
Assert.assertEquals(3, splits.get(0).size());
Assert.assertEquals(3, splits.get(1).size());
Assert.assertEquals(3, splits.get(2).size());
Assert.assertEquals(1, splits.get(3).size());
}
use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.
the class MaxSizeSplitHintSpecTest method testSerde.
@Test
public void testSerde() throws IOException {
final ObjectMapper mapper = new ObjectMapper();
final MaxSizeSplitHintSpec original = new MaxSizeSplitHintSpec(new HumanReadableBytes(1024L), 20_000);
final byte[] bytes = mapper.writeValueAsBytes(original);
final MaxSizeSplitHintSpec fromJson = (MaxSizeSplitHintSpec) mapper.readValue(bytes, SplitHintSpec.class);
Assert.assertEquals(original, fromJson);
}
use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.
the class MaxSizeSplitHintSpecTest method testReadFromJson.
@Test
public void testReadFromJson() throws JsonProcessingException {
final ObjectMapper mapper = new ObjectMapper();
final String json = "{" + " \"type\":\"maxSize\"," + " \"maxSplitSize\":1024," + " \"maxNumFiles\":20000" + "}\n";
final MaxSizeSplitHintSpec fromJson = (MaxSizeSplitHintSpec) mapper.readValue(json, SplitHintSpec.class);
Assert.assertEquals(new MaxSizeSplitHintSpec(new HumanReadableBytes(1024L), 20_000), fromJson);
}
Aggregations