use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class ITSqlInputSourceTest method testIndexData.
@Test(dataProvider = "resources")
public void testIndexData(List<String> sqlQueries) throws Exception {
final String indexDatasource = "wikipedia_index_test_" + UUID.randomUUID();
try (final Closeable ignored1 = unloader(indexDatasource + config.getExtraDatasourceNameSuffix())) {
final Function<String, String> sqlInputSourcePropsTransform = spec -> {
try {
spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, null)));
return StringUtils.replace(spec, "%%SQL_QUERY%%", jsonMapper.writeValueAsString(sqlQueries));
} catch (Exception e) {
throw new RuntimeException(e);
}
};
doIndexTest(indexDatasource, INDEX_TASK, sqlInputSourcePropsTransform, INDEX_QUERIES_RESOURCE, false, true, true, new Pair<>(false, false));
}
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class AbstractS3InputSourceParallelIndexTest method doTest.
void doTest(Pair<String, List> s3InputSource, Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair) throws Exception {
final String indexDatasource = "wikipedia_index_test_" + UUID.randomUUID();
try (final Closeable ignored1 = unloader(indexDatasource + config.getExtraDatasourceNameSuffix())) {
final Function<String, String> s3PropsTransform = spec -> {
try {
String inputSourceValue = jsonMapper.writeValueAsString(s3InputSource.rhs);
inputSourceValue = StringUtils.replace(inputSourceValue, "%%BUCKET%%", config.getCloudBucket());
inputSourceValue = StringUtils.replace(inputSourceValue, "%%PATH%%", config.getCloudPath());
spec = StringUtils.replace(spec, "%%INPUT_FORMAT_TYPE%%", InputFormatDetails.JSON.getInputFormatType());
spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, null)));
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_TYPE%%", "s3");
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_KEY%%", s3InputSource.lhs);
return StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_VALUE%%", inputSourceValue);
} catch (Exception e) {
throw new RuntimeException(e);
}
};
doIndexTest(indexDatasource, INDEX_TASK, s3PropsTransform, INDEX_QUERIES_RESOURCE, false, true, true, segmentAvailabilityConfirmationPair);
}
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class ITCombiningInputSourceParallelIndexTest method testIndexData.
@Test
public void testIndexData() throws Exception {
Map inputFormatMap = new ImmutableMap.Builder<String, Object>().put("type", "json").build();
try (final Closeable ignored1 = unloader(INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
final Closeable ignored2 = unloader(COMBINING_INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix())) {
final Function<String, String> combiningInputSourceSpecTransform = spec -> {
try {
spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, null)));
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_FILTER%%", "wikipedia_index_data*");
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_BASE_DIR%%", "/resources/data/batch_index/json");
spec = StringUtils.replace(spec, "%%INPUT_FORMAT%%", jsonMapper.writeValueAsString(inputFormatMap));
spec = StringUtils.replace(spec, "%%APPEND_TO_EXISTING%%", jsonMapper.writeValueAsString(false));
spec = StringUtils.replace(spec, "%%DROP_EXISTING%%", jsonMapper.writeValueAsString(false));
spec = StringUtils.replace(spec, "%%FORCE_GUARANTEED_ROLLUP%%", jsonMapper.writeValueAsString(false));
spec = StringUtils.replace(spec, "%%COMBINING_DATASOURCE%%", INDEX_DATASOURCE + config.getExtraDatasourceNameSuffix());
return spec;
} catch (Exception e) {
throw new RuntimeException(e);
}
};
doIndexTest(INDEX_DATASOURCE, INDEX_TASK, combiningInputSourceSpecTransform, INDEX_QUERIES_RESOURCE, false, true, true, new Pair<>(false, false));
doIndexTest(COMBINING_INDEX_DATASOURCE, COMBINING_INDEX_TASK, combiningInputSourceSpecTransform, COMBINING_QUERIES_RESOURCE, false, true, true, new Pair<>(false, false));
}
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class AbstractAzureInputSourceParallelIndexTest method doTest.
void doTest(Pair<String, List> azureInputSource, Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair) throws Exception {
final String indexDatasource = "wikipedia_index_test_" + UUID.randomUUID();
try (final Closeable ignored1 = unloader(indexDatasource + config.getExtraDatasourceNameSuffix())) {
final Function<String, String> azurePropsTransform = spec -> {
try {
String inputSourceValue = jsonMapper.writeValueAsString(azureInputSource.rhs);
inputSourceValue = StringUtils.replace(inputSourceValue, "%%BUCKET%%", config.getCloudBucket());
inputSourceValue = StringUtils.replace(inputSourceValue, "%%PATH%%", config.getCloudPath());
spec = StringUtils.replace(spec, "%%INPUT_FORMAT_TYPE%%", InputFormatDetails.JSON.getInputFormatType());
spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, null)));
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_TYPE%%", "azure");
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_KEY%%", azureInputSource.lhs);
return StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_VALUE%%", inputSourceValue);
} catch (Exception e) {
throw new RuntimeException(e);
}
};
doIndexTest(indexDatasource, INDEX_TASK, azurePropsTransform, INDEX_QUERIES_RESOURCE, false, true, true, segmentAvailabilityConfirmationPair);
}
}
use of org.apache.druid.indexer.partitions.DynamicPartitionsSpec in project druid by druid-io.
the class AbstractOssInputSourceParallelIndexTest method doTest.
void doTest(Pair<String, List> inputSource, Pair<Boolean, Boolean> segmentAvailabilityConfirmationPair) throws Exception {
final String indexDatasource = "wikipedia_index_test_" + UUID.randomUUID();
try (final Closeable ignored1 = unloader(indexDatasource + config.getExtraDatasourceNameSuffix())) {
final Function<String, String> propsTransform = spec -> {
try {
String inputSourceValue = jsonMapper.writeValueAsString(inputSource.rhs);
inputSourceValue = StringUtils.replace(inputSourceValue, "%%BUCKET%%", config.getCloudBucket());
inputSourceValue = StringUtils.replace(inputSourceValue, "%%PATH%%", config.getCloudPath());
spec = StringUtils.replace(spec, "%%INPUT_FORMAT_TYPE%%", InputFormatDetails.JSON.getInputFormatType());
spec = StringUtils.replace(spec, "%%PARTITIONS_SPEC%%", jsonMapper.writeValueAsString(new DynamicPartitionsSpec(null, null)));
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_TYPE%%", "oss");
spec = StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_KEY%%", inputSource.lhs);
return StringUtils.replace(spec, "%%INPUT_SOURCE_PROPERTY_VALUE%%", inputSourceValue);
} catch (Exception e) {
throw new RuntimeException(e);
}
};
doIndexTest(indexDatasource, INDEX_TASK, propsTransform, INDEX_QUERIES_RESOURCE, false, true, true, segmentAvailabilityConfirmationPair);
}
}
Aggregations