Search in sources :

Example 31 with SimpleEntry

use of java.util.AbstractMap.SimpleEntry in project leopard by tanhaichao.

the class RedisUtil method toEntryList.

/**
 * 将有序集转成List.
 *
 * @param set 有序集
 * @return
 */
public static List<Entry<String, Double>> toEntryList(Set<Tuple> set) {
    if (set == null || set.isEmpty()) {
        return null;
    }
    List<Entry<String, Double>> result = new ArrayList<Entry<String, Double>>();
    for (Tuple tuple : set) {
        String element = tuple.getElement();
        Double score = tuple.getScore();
        Entry<String, Double> entry = new SimpleEntry<String, Double>(element, score);
        result.add(entry);
    }
    return result;
}
Also used : Entry(java.util.Map.Entry) SimpleEntry(java.util.AbstractMap.SimpleEntry) SimpleEntry(java.util.AbstractMap.SimpleEntry) ArrayList(java.util.ArrayList) Tuple(redis.clients.jedis.Tuple)

Example 32 with SimpleEntry

use of java.util.AbstractMap.SimpleEntry in project pravega by pravega.

the class StreamMetadataStoreTest method sizeTest.

@Test
public void sizeTest() throws Exception {
    final String scope = "ScopeSize";
    final String stream = "StreamSize";
    final ScalingPolicy policy = ScalingPolicy.fixed(2);
    final RetentionPolicy retentionPolicy = RetentionPolicy.builder().retentionType(RetentionPolicy.RetentionType.SIZE).retentionParam(100L).build();
    final StreamConfiguration configuration = StreamConfiguration.builder().scope(scope).streamName(stream).scalingPolicy(policy).retentionPolicy(retentionPolicy).build();
    long start = System.currentTimeMillis();
    store.createScope(scope).get();
    store.createStream(scope, stream, configuration, start, null, executor).get();
    store.setState(scope, stream, State.ACTIVE, null, executor).get();
    store.addUpdateStreamForAutoStreamCut(scope, stream, retentionPolicy, null, executor).get();
    List<String> streams = store.getStreamsForBucket(0, executor).get();
    assertTrue(streams.contains(String.format("%s/%s", scope, stream)));
    // region Size Computation on stream cuts on epoch 0
    Map<Integer, Long> map1 = new HashMap<>();
    map1.put(0, 10L);
    map1.put(1, 10L);
    Long size = store.getSizeTillStreamCut(scope, stream, map1, null, executor).join();
    assertTrue(size == 20L);
    long recordingTime = System.currentTimeMillis();
    StreamCutRecord streamCut1 = new StreamCutRecord(recordingTime, size, map1);
    store.addStreamCutToRetentionSet(scope, stream, streamCut1, null, executor).get();
    Map<Integer, Long> map2 = new HashMap<>();
    map2.put(0, 20L);
    map2.put(1, 20L);
    size = store.getSizeTillStreamCut(scope, stream, map2, null, executor).join();
    assertTrue(size == 40L);
    StreamCutRecord streamCut2 = new StreamCutRecord(recordingTime + 10, size, map2);
    store.addStreamCutToRetentionSet(scope, stream, streamCut2, null, executor).get();
    Map<Integer, Long> map3 = new HashMap<>();
    map3.put(0, 30L);
    map3.put(1, 30L);
    size = store.getSizeTillStreamCut(scope, stream, map3, null, executor).join();
    assertTrue(size == 60L);
    StreamCutRecord streamCut3 = new StreamCutRecord(recordingTime + 20, 60L, map3);
    store.addStreamCutToRetentionSet(scope, stream, streamCut3, null, executor).get();
    // endregion
    // region Size Computation on multiple epochs
    long scaleTs = System.currentTimeMillis();
    SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.0, 0.5);
    SimpleEntry<Double, Double> segment3 = new SimpleEntry<>(0.5, 1.0);
    List<Integer> scale1SealedSegments = Lists.newArrayList(0, 1);
    StartScaleResponse response = store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment2, segment3), scaleTs, false, null, executor).join();
    final List<Segment> scale1SegmentsCreated = response.getSegmentsCreated();
    store.setState(scope, stream, State.SCALING, null, executor).get();
    store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
    store.scaleSegmentsSealed(scope, stream, scale1SealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 40L)), scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
    // complex stream cut - across two epochs
    Map<Integer, Long> map4 = new HashMap<>();
    map4.put(0, 40L);
    map4.put(3, 10L);
    size = store.getSizeTillStreamCut(scope, stream, map4, null, executor).join();
    assertTrue(size == 90L);
    StreamCutRecord streamCut4 = new StreamCutRecord(recordingTime + 30, size, map4);
    store.addStreamCutToRetentionSet(scope, stream, streamCut4, null, executor).get();
    // simple stream cut on epoch 2
    Map<Integer, Long> map5 = new HashMap<>();
    map5.put(2, 10L);
    map5.put(3, 10L);
    size = store.getSizeTillStreamCut(scope, stream, map5, null, executor).join();
    assertTrue(size == 100L);
    StreamCutRecord streamCut5 = new StreamCutRecord(recordingTime + 30, size, map5);
    store.addStreamCutToRetentionSet(scope, stream, streamCut5, null, executor).get();
// endregion
}
Also used : Arrays(java.util.Arrays) AssertExtensions(io.pravega.test.common.AssertExtensions) RetentionPolicy(io.pravega.client.stream.RetentionPolicy) Exceptions(io.pravega.common.Exceptions) HashMap(java.util.HashMap) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) AtomicReference(java.util.concurrent.atomic.AtomicReference) Lists(com.google.common.collect.Lists) After(org.junit.After) Duration(java.time.Duration) Map(java.util.Map) Timeout(org.junit.rules.Timeout) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) SimpleEntry(java.util.AbstractMap.SimpleEntry) TaskExceptions(io.pravega.controller.server.eventProcessor.requesthandlers.TaskExceptions) BucketChangeListener(io.pravega.controller.server.retention.BucketChangeListener) DeleteScopeStatus(io.pravega.controller.stream.api.grpc.v1.Controller.DeleteScopeStatus) Before(org.junit.Before) Assert.assertNotNull(org.junit.Assert.assertNotNull) StreamTruncationRecord(io.pravega.controller.store.stream.tables.StreamTruncationRecord) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) UUID(java.util.UUID) State(io.pravega.controller.store.stream.tables.State) Collectors(java.util.stream.Collectors) TxnResource(io.pravega.controller.store.task.TxnResource) Executors(java.util.concurrent.Executors) Assert.assertNotEquals(org.junit.Assert.assertNotEquals) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Rule(org.junit.Rule) Assert.assertNull(org.junit.Assert.assertNull) Assert.assertFalse(org.junit.Assert.assertFalse) Optional(java.util.Optional) Assert(org.junit.Assert) Collections(java.util.Collections) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Futures(io.pravega.common.concurrent.Futures) Assert.assertEquals(org.junit.Assert.assertEquals) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) HashMap(java.util.HashMap) SimpleEntry(java.util.AbstractMap.SimpleEntry) RetentionPolicy(io.pravega.client.stream.RetentionPolicy) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Test(org.junit.Test)

Example 33 with SimpleEntry

use of java.util.AbstractMap.SimpleEntry in project pravega by pravega.

the class StreamMetadataStoreTest method scaleTest.

@Test
public void scaleTest() throws Exception {
    final String scope = "ScopeScale";
    final String stream = "StreamScale";
    final ScalingPolicy policy = ScalingPolicy.fixed(2);
    final StreamConfiguration configuration = StreamConfiguration.builder().scope(scope).streamName(stream).scalingPolicy(policy).build();
    long start = System.currentTimeMillis();
    store.createScope(scope).get();
    store.createStream(scope, stream, configuration, start, null, executor).get();
    store.setState(scope, stream, State.ACTIVE, null, executor).get();
    // region idempotent
    long scaleTs = System.currentTimeMillis();
    SimpleEntry<Double, Double> segment1 = new SimpleEntry<>(0.5, 0.75);
    SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.75, 1.0);
    List<Integer> scale1SealedSegments = Collections.singletonList(1);
    // test run only if started
    AssertExtensions.assertThrows("", () -> store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment1, segment2), scaleTs, true, null, executor).join(), e -> Exceptions.unwrap(e) instanceof TaskExceptions.StartException);
    // 1. start scale
    StartScaleResponse response = store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment1, segment2), scaleTs, false, null, executor).join();
    final List<Segment> scale1SegmentsCreated = response.getSegmentsCreated();
    final int scale1ActiveEpoch = response.getActiveEpoch();
    // rerun start scale
    response = store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment1, segment2), scaleTs, false, null, executor).join();
    assertEquals(response.getSegmentsCreated(), scale1SegmentsCreated);
    store.setState(scope, stream, State.SCALING, null, executor).get();
    // 2. scale new segments created
    store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
    // rerun start scale and new segments created
    response = store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment1, segment2), scaleTs, false, null, executor).join();
    assertEquals(response.getSegmentsCreated(), scale1SegmentsCreated);
    store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
    // 3. scale segments sealed -- this will complete scale
    store.scaleSegmentsSealed(scope, stream, scale1SealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
    // rerun -- illegal state exception
    AssertExtensions.assertThrows("", () -> store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, scale1ActiveEpoch, scaleTs, null, executor).join(), e -> Exceptions.unwrap(e) instanceof StoreException.IllegalStateException);
    // rerun  -- illegal state exception
    AssertExtensions.assertThrows("", () -> store.scaleSegmentsSealed(scope, stream, scale1SealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), scale1SegmentsCreated, scale1ActiveEpoch, scaleTs, null, executor).join(), e -> Exceptions.unwrap(e) instanceof StoreException.IllegalStateException);
    // rerun start scale -- should fail with precondition failure
    AssertExtensions.assertThrows("", () -> store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment1, segment2), scaleTs, false, null, executor).join(), e -> Exceptions.unwrap(e) instanceof ScaleOperationExceptions.ScalePreConditionFailureException);
    // endregion
    // 2 different conflicting scale operations
    // region run concurrent conflicting scale
    SimpleEntry<Double, Double> segment3 = new SimpleEntry<>(0.0, 0.5);
    SimpleEntry<Double, Double> segment4 = new SimpleEntry<>(0.5, 0.75);
    SimpleEntry<Double, Double> segment5 = new SimpleEntry<>(0.75, 1.0);
    List<Integer> scale2SealedSegments = Arrays.asList(0, 2, 3);
    long scaleTs2 = System.currentTimeMillis();
    response = store.startScale(scope, stream, scale2SealedSegments, Arrays.asList(segment3, segment4, segment5), scaleTs2, false, null, executor).get();
    final List<Segment> scale2SegmentsCreated = response.getSegmentsCreated();
    final int scale2ActiveEpoch = response.getActiveEpoch();
    store.setState(scope, stream, State.SCALING, null, executor).get();
    // rerun of scale 1 -- should fail with precondition failure
    AssertExtensions.assertThrows("", () -> store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment1, segment2), scaleTs, false, null, executor).join(), e -> Exceptions.unwrap(e) instanceof ScaleOperationExceptions.ScaleConflictException);
    // rerun of scale 1's new segments created method
    AssertExtensions.assertThrows("", () -> store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, scale1ActiveEpoch, scaleTs, null, executor).join(), e -> Exceptions.unwrap(e) instanceof ScaleOperationExceptions.ScaleConditionInvalidException);
    store.scaleNewSegmentsCreated(scope, stream, scale2SealedSegments, scale2SegmentsCreated, scale2ActiveEpoch, scaleTs2, null, executor).get();
    // rerun of scale 1's new segments created method
    AssertExtensions.assertThrows("", () -> store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, scale1ActiveEpoch, scaleTs, null, executor).join(), e -> Exceptions.unwrap(e) instanceof ScaleOperationExceptions.ScaleConditionInvalidException);
    store.scaleSegmentsSealed(scope, stream, scale1SealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), scale1SegmentsCreated, scale2ActiveEpoch, scaleTs2, null, executor).get();
    store.setState(scope, stream, State.SCALING, null, executor).get();
    // rerun of scale 1's new segments created method
    AssertExtensions.assertThrows("", () -> store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, scale1ActiveEpoch, scaleTs, null, executor).join(), e -> Exceptions.unwrap(e) instanceof ScaleOperationExceptions.ScaleConditionInvalidException);
    store.setState(scope, stream, State.ACTIVE, null, executor).get();
// endregion
}
Also used : Arrays(java.util.Arrays) AssertExtensions(io.pravega.test.common.AssertExtensions) RetentionPolicy(io.pravega.client.stream.RetentionPolicy) Exceptions(io.pravega.common.Exceptions) HashMap(java.util.HashMap) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) AtomicReference(java.util.concurrent.atomic.AtomicReference) Lists(com.google.common.collect.Lists) After(org.junit.After) Duration(java.time.Duration) Map(java.util.Map) Timeout(org.junit.rules.Timeout) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) SimpleEntry(java.util.AbstractMap.SimpleEntry) TaskExceptions(io.pravega.controller.server.eventProcessor.requesthandlers.TaskExceptions) BucketChangeListener(io.pravega.controller.server.retention.BucketChangeListener) DeleteScopeStatus(io.pravega.controller.stream.api.grpc.v1.Controller.DeleteScopeStatus) Before(org.junit.Before) Assert.assertNotNull(org.junit.Assert.assertNotNull) StreamTruncationRecord(io.pravega.controller.store.stream.tables.StreamTruncationRecord) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) UUID(java.util.UUID) State(io.pravega.controller.store.stream.tables.State) Collectors(java.util.stream.Collectors) TxnResource(io.pravega.controller.store.task.TxnResource) Executors(java.util.concurrent.Executors) Assert.assertNotEquals(org.junit.Assert.assertNotEquals) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Rule(org.junit.Rule) Assert.assertNull(org.junit.Assert.assertNull) Assert.assertFalse(org.junit.Assert.assertFalse) Optional(java.util.Optional) Assert(org.junit.Assert) Collections(java.util.Collections) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Futures(io.pravega.common.concurrent.Futures) Assert.assertEquals(org.junit.Assert.assertEquals) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) TaskExceptions(io.pravega.controller.server.eventProcessor.requesthandlers.TaskExceptions) SimpleEntry(java.util.AbstractMap.SimpleEntry) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Test(org.junit.Test)

Example 34 with SimpleEntry

use of java.util.AbstractMap.SimpleEntry in project pravega by pravega.

the class ZKStreamMetadataStoreTest method testSplitsMerges.

@Test
public void testSplitsMerges() throws Exception {
    String scope = "testScopeScale";
    String stream = "testStreamScale";
    ScalingPolicy policy = ScalingPolicy.fixed(2);
    StreamConfiguration configuration = StreamConfiguration.builder().scope(scope).streamName(stream).scalingPolicy(policy).build();
    store.createScope(scope).get();
    store.createStream(scope, stream, configuration, System.currentTimeMillis(), null, executor).get();
    store.setState(scope, stream, State.ACTIVE, null, executor).get();
    // Case: Initial state, splits = 0, merges = 0
    // time t0, total segments 2, S0 {0.0 - 0.5} S1 {0.5 - 1.0}
    List<ScaleMetadata> scaleRecords = store.getScaleMetadata(scope, stream, null, executor).get();
    assertTrue(scaleRecords.size() == 1);
    SimpleEntry<Long, Long> simpleEntrySplitsMerges = store.findNumSplitsMerges(scope, stream, executor).get();
    assertEquals("Number of splits ", new Long(0), simpleEntrySplitsMerges.getKey());
    assertEquals("Number of merges", new Long(0), simpleEntrySplitsMerges.getValue());
    // Case: Only splits, S0 split into S2, S3, S4 and S1 split into S5, S6,
    // total splits = 2, total merges = 3
    // time t1, total segments 5, S2 {0.0, 0.2}, S3 {0.2, 0.4}, S4 {0.4, 0.5}, S5 {0.5, 0.7}, S6 {0.7, 1.0}
    SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.0, 0.2);
    SimpleEntry<Double, Double> segment3 = new SimpleEntry<>(0.2, 0.4);
    SimpleEntry<Double, Double> segment4 = new SimpleEntry<>(0.4, 0.5);
    SimpleEntry<Double, Double> segment5 = new SimpleEntry<>(0.5, 0.7);
    SimpleEntry<Double, Double> segment6 = new SimpleEntry<>(0.7, 1.0);
    List<SimpleEntry<Double, Double>> newRanges1 = Arrays.asList(segment2, segment3, segment4, segment5, segment6);
    scale(scope, stream, scaleRecords.get(0).getSegments(), newRanges1);
    scaleRecords = store.getScaleMetadata(scope, stream, null, executor).get();
    assertTrue(scaleRecords.size() == 2);
    SimpleEntry<Long, Long> simpleEntrySplitsMerges1 = store.findNumSplitsMerges(scope, stream, executor).get();
    assertEquals("Number of splits ", new Long(2), simpleEntrySplitsMerges1.getKey());
    assertEquals("Number of merges", new Long(0), simpleEntrySplitsMerges1.getValue());
    // Case: Splits and merges both, S2 and S3 merged to S7,  S4 and S5 merged to S8,  S6 split to S9 and S10
    // total splits = 3, total merges = 2
    // time t2, total segments 4, S7 {0.0, 0.4}, S8 {0.4, 0.7}, S9 {0.7, 0.8}, S10 {0.8, 1.0}
    SimpleEntry<Double, Double> segment7 = new SimpleEntry<>(0.0, 0.4);
    SimpleEntry<Double, Double> segment8 = new SimpleEntry<>(0.4, 0.7);
    SimpleEntry<Double, Double> segment9 = new SimpleEntry<>(0.7, 0.8);
    SimpleEntry<Double, Double> segment10 = new SimpleEntry<>(0.8, 1.0);
    List<SimpleEntry<Double, Double>> newRanges2 = Arrays.asList(segment7, segment8, segment9, segment10);
    scale(scope, stream, scaleRecords.get(0).getSegments(), newRanges2);
    scaleRecords = store.getScaleMetadata(scope, stream, null, executor).get();
    SimpleEntry<Long, Long> simpleEntrySplitsMerges2 = store.findNumSplitsMerges(scope, stream, executor).get();
    assertEquals("Number of splits ", new Long(3), simpleEntrySplitsMerges2.getKey());
    assertEquals("Number of merges", new Long(2), simpleEntrySplitsMerges2.getValue());
    // Case: Only merges , S7 and S8 merged to S11,  S9 and S10 merged to S12
    // total splits = 3, total merges = 4
    // time t3, total segments 2, S11 {0.0, 0.7}, S12 {0.7, 1.0}
    SimpleEntry<Double, Double> segment11 = new SimpleEntry<>(0.0, 0.7);
    SimpleEntry<Double, Double> segment12 = new SimpleEntry<>(0.7, 1.0);
    List<SimpleEntry<Double, Double>> newRanges3 = Arrays.asList(segment11, segment12);
    scale(scope, stream, scaleRecords.get(0).getSegments(), newRanges3);
    SimpleEntry<Long, Long> simpleEntrySplitsMerges3 = store.findNumSplitsMerges(scope, stream, executor).get();
    assertEquals("Number of splits ", new Long(3), simpleEntrySplitsMerges3.getKey());
    assertEquals("Number of merges", new Long(4), simpleEntrySplitsMerges3.getValue());
}
Also used : ScalingPolicy(io.pravega.client.stream.ScalingPolicy) SimpleEntry(java.util.AbstractMap.SimpleEntry) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Test(org.junit.Test)

Example 35 with SimpleEntry

use of java.util.AbstractMap.SimpleEntry in project hazelcast by hazelcast.

the class ClientReplicatedMapTest method testEntrySet.

private void testEntrySet(Config config) throws Exception {
    HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance(config);
    HazelcastInstance instance2 = hazelcastFactory.newHazelcastClient();
    final ReplicatedMap<Integer, Integer> map1 = instance1.getReplicatedMap("default");
    final ReplicatedMap<Integer, Integer> map2 = instance2.getReplicatedMap("default");
    final SimpleEntry<Integer, Integer>[] testValues = buildTestValues();
    int half = testValues.length / 2;
    for (int i = 0; i < testValues.length; i++) {
        ReplicatedMap<Integer, Integer> map = i < half ? map1 : map2;
        SimpleEntry<Integer, Integer> entry = testValues[i];
        map.put(entry.getKey(), entry.getValue());
    }
    Set<Entry<Integer, Integer>> entrySet1 = new HashSet<Entry<Integer, Integer>>(map1.entrySet());
    Set<Entry<Integer, Integer>> entrySet2 = new HashSet<Entry<Integer, Integer>>(map2.entrySet());
    for (Entry<Integer, Integer> entry : entrySet2) {
        Integer value = findValue(entry.getKey(), testValues);
        assertEquals(value, entry.getValue());
    }
    for (Entry<Integer, Integer> entry : entrySet1) {
        Integer value = findValue(entry.getKey(), testValues);
        assertEquals(value, entry.getValue());
    }
}
Also used : SimpleEntry(java.util.AbstractMap.SimpleEntry) Entry(java.util.Map.Entry) HazelcastInstance(com.hazelcast.core.HazelcastInstance) SimpleEntry(java.util.AbstractMap.SimpleEntry) HashSet(java.util.HashSet)

Aggregations

SimpleEntry (java.util.AbstractMap.SimpleEntry)59 Test (org.junit.Test)32 ArrayList (java.util.ArrayList)23 HashMap (java.util.HashMap)19 Map (java.util.Map)13 Entry (java.util.Map.Entry)12 CucumberFeature (cucumber.runtime.model.CucumberFeature)10 Result (gherkin.formatter.model.Result)10 List (java.util.List)10 HashSet (java.util.HashSet)9 Metacard (ddf.catalog.data.Metacard)7 Serializable (java.io.Serializable)7 HazelcastInstance (com.hazelcast.core.HazelcastInstance)6 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)6 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)6 UpdateRequest (ddf.catalog.operation.UpdateRequest)5 Collections (java.util.Collections)5 Set (java.util.Set)5 Before (org.junit.Before)5 ZookeeperConfigurationProvider (com.kixeye.chassis.bootstrap.configuration.zookeeper.ZookeeperConfigurationProvider)4