use of org.elasticsearch.index.Index in project elasticsearch by elastic.
the class DiskThresholdDeciderUnitTests method testSizeShrinkIndex.
public void testSizeShrinkIndex() {
ImmutableOpenMap.Builder<String, Long> shardSizes = ImmutableOpenMap.builder();
shardSizes.put("[test][0][p]", 10L);
shardSizes.put("[test][1][p]", 100L);
shardSizes.put("[test][2][p]", 500L);
shardSizes.put("[test][3][p]", 500L);
ClusterInfo info = new DevNullClusterInfo(ImmutableOpenMap.of(), ImmutableOpenMap.of(), shardSizes.build());
MetaData.Builder metaBuilder = MetaData.builder();
metaBuilder.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put("index.uuid", "1234")).numberOfShards(4).numberOfReplicas(0));
metaBuilder.put(IndexMetaData.builder("target").settings(settings(Version.CURRENT).put("index.uuid", "5678").put("index.shrink.source.name", "test").put("index.shrink.source.uuid", "1234")).numberOfShards(1).numberOfReplicas(0));
metaBuilder.put(IndexMetaData.builder("target2").settings(settings(Version.CURRENT).put("index.uuid", "9101112").put("index.shrink.source.name", "test").put("index.shrink.source.uuid", "1234")).numberOfShards(2).numberOfReplicas(0));
MetaData metaData = metaBuilder.build();
RoutingTable.Builder routingTableBuilder = RoutingTable.builder();
routingTableBuilder.addAsNew(metaData.index("test"));
routingTableBuilder.addAsNew(metaData.index("target"));
routingTableBuilder.addAsNew(metaData.index("target2"));
ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTableBuilder.build()).build();
AllocationService allocationService = createAllocationService();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().add(newNode("node1"))).build();
clusterState = allocationService.reroute(clusterState, "foo");
clusterState = allocationService.applyStartedShards(clusterState, clusterState.getRoutingTable().index("test").shardsWithState(ShardRoutingState.UNASSIGNED));
RoutingAllocation allocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, info, 0, false);
final Index index = new Index("test", "1234");
ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
test_0 = ShardRoutingHelper.initialize(test_0, "node1");
test_0 = ShardRoutingHelper.moveToStarted(test_0);
ShardRouting test_1 = ShardRouting.newUnassigned(new ShardId(index, 1), true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
test_1 = ShardRoutingHelper.initialize(test_1, "node2");
test_1 = ShardRoutingHelper.moveToStarted(test_1);
ShardRouting test_2 = ShardRouting.newUnassigned(new ShardId(index, 2), true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
test_2 = ShardRoutingHelper.initialize(test_2, "node1");
ShardRouting test_3 = ShardRouting.newUnassigned(new ShardId(index, 3), true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
test_3 = ShardRoutingHelper.initialize(test_3, "node1");
assertEquals(500L, DiskThresholdDecider.getExpectedShardSize(test_3, allocation, 0));
assertEquals(500L, DiskThresholdDecider.getExpectedShardSize(test_2, allocation, 0));
assertEquals(100L, DiskThresholdDecider.getExpectedShardSize(test_1, allocation, 0));
assertEquals(10L, DiskThresholdDecider.getExpectedShardSize(test_0, allocation, 0));
ShardRouting target = ShardRouting.newUnassigned(new ShardId(new Index("target", "5678"), 0), true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
assertEquals(1110L, DiskThresholdDecider.getExpectedShardSize(target, allocation, 0));
ShardRouting target2 = ShardRouting.newUnassigned(new ShardId(new Index("target2", "9101112"), 0), true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
assertEquals(110L, DiskThresholdDecider.getExpectedShardSize(target2, allocation, 0));
target2 = ShardRouting.newUnassigned(new ShardId(new Index("target2", "9101112"), 1), true, LocalShardsRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
assertEquals(1000L, DiskThresholdDecider.getExpectedShardSize(target2, allocation, 0));
}
use of org.elasticsearch.index.Index in project elasticsearch by elastic.
the class IcuTokenizerFactoryTests method createTestAnalysis.
private static TestAnalysis createTestAnalysis() throws IOException {
InputStream keywords = IcuTokenizerFactoryTests.class.getResourceAsStream("KeywordTokenizer.rbbi");
InputStream latin = IcuTokenizerFactoryTests.class.getResourceAsStream("Latin-dont-break-on-hyphens.rbbi");
Path home = createTempDir();
Path config = home.resolve("config");
Files.createDirectory(config);
Files.copy(keywords, config.resolve("KeywordTokenizer.rbbi"));
Files.copy(latin, config.resolve("Latin-dont-break-on-hyphens.rbbi"));
String json = "/org/elasticsearch/index/analysis/icu_analysis.json";
Settings settings = Settings.builder().loadFromStream(json, IcuTokenizerFactoryTests.class.getResourceAsStream(json)).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
Settings nodeSettings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();
return createTestAnalysis(new Index("test", "_na_"), nodeSettings, settings, new AnalysisICUPlugin());
}
use of org.elasticsearch.index.Index in project elasticsearch by elastic.
the class SimpleIcuAnalysisTests method testDefaultsIcuAnalysis.
public void testDefaultsIcuAnalysis() throws IOException {
TestAnalysis analysis = createTestAnalysis(new Index("test", "_na_"), Settings.EMPTY, new AnalysisICUPlugin());
TokenizerFactory tokenizerFactory = analysis.tokenizer.get("icu_tokenizer");
assertThat(tokenizerFactory, instanceOf(IcuTokenizerFactory.class));
TokenFilterFactory filterFactory = analysis.tokenFilter.get("icu_normalizer");
assertThat(filterFactory, instanceOf(IcuNormalizerTokenFilterFactory.class));
filterFactory = analysis.tokenFilter.get("icu_folding");
assertThat(filterFactory, instanceOf(IcuFoldingTokenFilterFactory.class));
filterFactory = analysis.tokenFilter.get("icu_collation");
assertThat(filterFactory, instanceOf(IcuCollationTokenFilterFactory.class));
filterFactory = analysis.tokenFilter.get("icu_transform");
assertThat(filterFactory, instanceOf(IcuTransformTokenFilterFactory.class));
CharFilterFactory charFilterFactory = analysis.charFilter.get("icu_normalizer");
assertThat(charFilterFactory, instanceOf(IcuNormalizerCharFilterFactory.class));
}
use of org.elasticsearch.index.Index in project elasticsearch by elastic.
the class SimpleIcuCollationTokenFilterTests method testSecondaryStrength.
/*
* Test secondary strength, for english case is not significant.
*/
public void testSecondaryStrength() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.myCollator.type", "icu_collation").put("index.analysis.filter.myCollator.language", "en").put("index.analysis.filter.myCollator.strength", "secondary").put("index.analysis.filter.myCollator.decomposition", "no").build();
TestAnalysis analysis = createTestAnalysis(new Index("test", "_na_"), settings, new AnalysisICUPlugin());
TokenFilterFactory filterFactory = analysis.tokenFilter.get("myCollator");
assertCollatesToSame(filterFactory, "TESTING", "testing");
}
use of org.elasticsearch.index.Index in project elasticsearch by elastic.
the class SimpleIcuCollationTokenFilterTests method testIgnoreWhitespace.
/*
* Setting alternate=shifted and variableTop to shift whitespace, but not
* punctuation or symbols, to quaternary level
*/
public void testIgnoreWhitespace() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.myCollator.type", "icu_collation").put("index.analysis.filter.myCollator.language", "en").put("index.analysis.filter.myCollator.strength", "primary").put("index.analysis.filter.myCollator.alternate", "shifted").put("index.analysis.filter.myCollator.variableTop", " ").build();
TestAnalysis analysis = createTestAnalysis(new Index("test", "_na_"), settings, new AnalysisICUPlugin());
TokenFilterFactory filterFactory = analysis.tokenFilter.get("myCollator");
assertCollatesToSame(filterFactory, "foo bar", "foobar");
// now assert that punctuation still matters: foo-bar < foo bar
assertCollation(filterFactory, "foo-bar", "foo bar", -1);
}
Aggregations