Search in sources :

Example 1 with RoutingStrategyFactory

use of voldemort.routing.RoutingStrategyFactory in project voldemort by voldemort.

the class HadoopStoreBuilderCollisionTest method testCollisionWithParams.

@SuppressWarnings({ "unchecked" })
public void testCollisionWithParams(int totalElements, int maxCollisions) throws Exception {
    assertEquals(totalElements % maxCollisions, 0);
    // create test data
    Map<String, String> values = new HashMap<String, String>();
    List<String> valuesLeft = Lists.newArrayList();
    File testDir = TestUtils.createTempDir();
    File tempDir = new File(testDir, "temp");
    File outputDir = new File(testDir, "output");
    File storeDir = TestUtils.createTempDir(testDir);
    for (int i = 0; i < totalElements; i++) {
        values.put(Integer.toString(i), Integer.toString(i));
        valuesLeft.add(Integer.toString(i));
    }
    String storeName = "test";
    SerializerDefinition serDef = new SerializerDefinition("string");
    Cluster cluster = ServerTestUtils.getLocalCluster(1);
    Serializer<Object> serializer = (Serializer<Object>) new DefaultSerializerFactory().getSerializer(serDef);
    // write test data to text file
    File inputFile = File.createTempFile("input", ".txt", testDir);
    inputFile.deleteOnExit();
    StringBuilder contents = new StringBuilder();
    byte[] currentMd5 = TestUtils.randomBytes(2 * ByteUtils.SIZE_OF_INT);
    int entryId = 0;
    for (Map.Entry<String, String> entry : values.entrySet()) {
        if (entryId % maxCollisions == 0) {
            currentMd5 = TestUtils.randomBytes(2 * ByteUtils.SIZE_OF_INT);
        }
        contents.append(entry.getKey() + "\t" + entry.getValue() + "\n");
        byte[] oldMd5 = ByteUtils.copy(ByteUtils.md5(serializer.toBytes(entry.getKey())), 0, 2 * ByteUtils.SIZE_OF_INT);
        oldMd5ToNewMd5.put(new ByteArray(oldMd5), currentMd5);
        entryId++;
    }
    FileUtils.writeStringToFile(inputFile, contents.toString());
    StoreDefinition def = new StoreDefinitionBuilder().setName(storeName).setType(ReadOnlyStorageConfiguration.TYPE_NAME).setKeySerializer(serDef).setValueSerializer(serDef).setRoutingPolicy(RoutingTier.CLIENT).setRoutingStrategyType(RoutingStrategyType.CONSISTENT_STRATEGY).setReplicationFactor(1).setPreferredReads(1).setRequiredReads(1).setPreferredWrites(1).setRequiredWrites(1).build();
    HadoopStoreBuilder builder = new HadoopStoreBuilder("testCollisionWithParams", new Props(), new JobConf(), CollidingTextStoreMapper.class, TextInputFormat.class, cluster, def, new Path(tempDir.getAbsolutePath()), new Path(outputDir.getAbsolutePath()), new Path(inputFile.getAbsolutePath()), CheckSumType.MD5, true, false, 1024 * 1024 * 1024, false, null, false);
    builder.build();
    File nodeFile = new File(outputDir, "node-0");
    File versionDir = new File(storeDir, "version-0");
    HdfsFetcher fetcher = new HdfsFetcher();
    fetcher.fetch(nodeFile.getAbsolutePath(), versionDir.getAbsolutePath());
    // Test if we work in the normal collision scenario open store
    ReadOnlyStorageEngine engine = new ReadOnlyStorageEngine(storeName, new CustomBinarySearchStrategy(), new RoutingStrategyFactory().updateRoutingStrategy(def, cluster), 0, storeDir, 1);
    Store<Object, Object, Object> store = SerializingStore.wrap(engine, serializer, serializer, serializer);
    // check values
    for (Map.Entry<String, String> entry : values.entrySet()) {
        List<Versioned<Object>> found = store.get(entry.getKey(), null);
        Assert.assertEquals("Incorrect number of results", 1, found.size());
        Assert.assertEquals(entry.getValue(), found.get(0).getValue());
    }
    // also check the iterator - first key iterator...
    List<String> valuesLeft2 = Lists.newArrayList(valuesLeft);
    ClosableIterator<ByteArray> keyIterator = engine.keys();
    int numElements = 0;
    while (keyIterator.hasNext()) {
        Object object = serializer.toObject(keyIterator.next().get());
        assertEquals(valuesLeft.remove(object), true);
        Assert.assertTrue(values.containsKey(object));
        numElements++;
    }
    Assert.assertEquals(numElements, values.size());
    Assert.assertEquals(valuesLeft.size(), 0);
    // ... and entry iterator
    ClosableIterator<Pair<ByteArray, Versioned<byte[]>>> entryIterator = engine.entries();
    numElements = 0;
    while (entryIterator.hasNext()) {
        Pair<ByteArray, Versioned<byte[]>> entry = entryIterator.next();
        assertEquals(valuesLeft2.remove(serializer.toObject(entry.getFirst().get())), true);
        Assert.assertEquals(values.get(serializer.toObject(entry.getFirst().get())), serializer.toObject(entry.getSecond().getValue()));
        numElements++;
    }
    Assert.assertEquals(numElements, values.size());
    Assert.assertEquals(valuesLeft2.size(), 0);
}
Also used : Versioned(voldemort.versioning.Versioned) HashMap(java.util.HashMap) RoutingStrategyFactory(voldemort.routing.RoutingStrategyFactory) Props(voldemort.utils.Props) StoreDefinition(voldemort.store.StoreDefinition) ByteArray(voldemort.utils.ByteArray) JobConf(org.apache.hadoop.mapred.JobConf) Serializer(voldemort.serialization.Serializer) Pair(voldemort.utils.Pair) StoreDefinitionBuilder(voldemort.store.StoreDefinitionBuilder) Path(org.apache.hadoop.fs.Path) Cluster(voldemort.cluster.Cluster) ReadOnlyStorageEngine(voldemort.store.readonly.ReadOnlyStorageEngine) DefaultSerializerFactory(voldemort.serialization.DefaultSerializerFactory) HdfsFetcher(voldemort.store.readonly.fetcher.HdfsFetcher) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map) SerializerDefinition(voldemort.serialization.SerializerDefinition)

Example 2 with RoutingStrategyFactory

use of voldemort.routing.RoutingStrategyFactory in project voldemort by voldemort.

the class RequestFileFilter method main.

/**
     * Filter requests specified in a file, generating a new file containing
     * only requests destined for a specific node.
     * 
     * @param args See usage for more information
     * @throws Exception In case of I/O or Voldemort-specific errors
     */
public static void main(String[] args) throws Exception {
    OptionParser parser = new OptionParser();
    parser.accepts("help", "print usage information");
    parser.accepts("node", "[REQUIRED] node id").withRequiredArg().ofType(Integer.class).describedAs("node id");
    parser.accepts("store-name", "[REQUIRED] store name").withRequiredArg().describedAs("store name");
    parser.accepts("url", "[REQUIRED] bootstrap URL").withRequiredArg().describedAs("bootstrap-url");
    parser.accepts("input", "[REQUIRED] input request file").withRequiredArg().describedAs("input-file");
    parser.accepts("output", "[REQUIRED] output file").withRequiredArg().describedAs("output-file");
    parser.accepts("string-keys");
    OptionSet options = parser.parse(args);
    if (options.has("help")) {
        parser.printHelpOn(System.out);
        System.exit(0);
    }
    Set<String> missing = CmdUtils.missing(options, "node", "store-name", "url", "input", "output");
    if (missing.size() > 0) {
        System.err.println("Missing required arguments: " + Joiner.on(", ").join(missing));
        parser.printHelpOn(System.err);
        System.exit(1);
    }
    int nodeId = (Integer) options.valueOf("node");
    String storeName = (String) options.valueOf("store-name");
    String bootstrapURL = (String) options.valueOf("url");
    String inputFile = (String) options.valueOf("input");
    String outputFile = (String) options.valueOf("output");
    boolean stringKeys = options.has("string-keys");
    AdminClient adminClient = new AdminClient(bootstrapURL);
    List<StoreDefinition> storeDefinitionList = adminClient.metadataMgmtOps.getRemoteStoreDefList(nodeId).getValue();
    StoreDefinition storeDefinition = null;
    for (StoreDefinition def : storeDefinitionList) {
        if (storeName.equals(def.getName())) {
            storeDefinition = def;
        }
    }
    if (storeDefinition == null) {
        Utils.croak("No store found with name\"" + storeName + "\"");
    }
    Cluster cluster = adminClient.metadataMgmtOps.getRemoteCluster(nodeId).getValue();
    Node node = null;
    try {
        node = cluster.getNodeById(nodeId);
    } catch (VoldemortException e) {
        Utils.croak("Can't find a node with id " + nodeId);
    }
    RoutingStrategy routingStrategy = new RoutingStrategyFactory().updateRoutingStrategy(storeDefinition, cluster);
    try {
        new RequestFileFilter(storeDefinition, routingStrategy, inputFile, outputFile, node, stringKeys).filter();
    } catch (FileNotFoundException e) {
        Utils.croak(e.getMessage());
    }
}
Also used : RoutingStrategyFactory(voldemort.routing.RoutingStrategyFactory) Node(voldemort.cluster.Node) FileNotFoundException(java.io.FileNotFoundException) Cluster(voldemort.cluster.Cluster) OptionParser(joptsimple.OptionParser) VoldemortException(voldemort.VoldemortException) StoreDefinition(voldemort.store.StoreDefinition) RoutingStrategy(voldemort.routing.RoutingStrategy) OptionSet(joptsimple.OptionSet) AdminClient(voldemort.client.protocol.admin.AdminClient)

Example 3 with RoutingStrategyFactory

use of voldemort.routing.RoutingStrategyFactory in project voldemort by voldemort.

the class ReadOnlyStorePerformanceTest method main.

public static void main(String[] args) throws FileNotFoundException, IOException {
    OptionParser parser = new OptionParser();
    parser.accepts("help", "print usage information");
    parser.accepts("threads", "number of threads").withRequiredArg().ofType(Integer.class);
    parser.accepts("requests", "[REQUIRED] number of requests").withRequiredArg().ofType(Integer.class);
    parser.accepts("store-dir", "[REQUIRED] store directory").withRequiredArg().describedAs("directory");
    parser.accepts("cluster-xml", "Path to cluster.xml").withRequiredArg().describedAs("path");
    parser.accepts("node-id", "Id of node").withRequiredArg().ofType(Integer.class).describedAs("node-id");
    parser.accepts("search-strategy", "class of the search strategy to use").withRequiredArg().describedAs("class_name");
    parser.accepts("build", "If present, first build the data");
    parser.accepts("num-values", "The number of values in the store").withRequiredArg().describedAs("count").ofType(Integer.class);
    parser.accepts("num-chunks", "The number of chunks per partition").withRequiredArg().describedAs("chunks").ofType(Integer.class);
    parser.accepts("internal-sort-size", "The number of items to sort in memory at a time").withRequiredArg().describedAs("size").ofType(Integer.class);
    parser.accepts("value-size", "The size of the values in the store").withRequiredArg().describedAs("size").ofType(Integer.class);
    parser.accepts("working-dir", "The directory in which to store temporary data").withRequiredArg().describedAs("dir");
    parser.accepts("gzip", "Compress the intermediate temp files used in building the store");
    parser.accepts("request-file", "file get request ids from").withRequiredArg();
    parser.accepts("version", "Version of read-only store [" + ReadOnlyStorageFormat.READONLY_V0 + "," + ReadOnlyStorageFormat.READONLY_V1 + "," + ReadOnlyStorageFormat.READONLY_V2 + " (default)]").withRequiredArg().describedAs("version");
    parser.accepts("test-gz", "Path to gzip containing data. Works with --build only").withRequiredArg().describedAs("path");
    OptionSet options = parser.parse(args);
    if (options.has("help")) {
        parser.printHelpOn(System.out);
        System.exit(0);
    }
    CmdUtils.croakIfMissing(parser, options, "requests", "store-dir");
    final int numThreads = CmdUtils.valueOf(options, "threads", 10);
    final int numRequests = (Integer) options.valueOf("requests");
    final int internalSortSize = CmdUtils.valueOf(options, "internal-sort-size", 500000);
    int numValues = numRequests;
    final String inputFile = (String) options.valueOf("request-file");
    final String searcherClass = CmdUtils.valueOf(options, "search-strategy", BinarySearchStrategy.class.getName()).trim();
    final boolean gzipIntermediate = options.has("gzip");
    final SearchStrategy searcher = (SearchStrategy) ReflectUtils.callConstructor(ReflectUtils.loadClass(searcherClass));
    final File workingDir = new File(CmdUtils.valueOf(options, "working-dir", System.getProperty("java.io.tmpdir")));
    String storeDir = (String) options.valueOf("store-dir");
    ReadOnlyStorageFormat format = ReadOnlyStorageFormat.fromCode(CmdUtils.valueOf(options, "version", ReadOnlyStorageFormat.READONLY_V2.toString()));
    Cluster cluster = null;
    int nodeId = 0;
    SerializerDefinition sdef = new SerializerDefinition("json", "'string'");
    StoreDefinition storeDef = new StoreDefinitionBuilder().setName("test").setKeySerializer(sdef).setValueSerializer(sdef).setRequiredReads(1).setReplicationFactor(1).setRequiredWrites(1).setType("read-only").setRoutingStrategyType(RoutingStrategyType.CONSISTENT_STRATEGY).setRoutingPolicy(RoutingTier.CLIENT).build();
    if (options.has("build")) {
        CmdUtils.croakIfMissing(parser, options, "num-values", "value-size");
        numValues = (Integer) options.valueOf("num-values");
        int numChunks = 1;
        if (options.has("num-chunks"))
            numChunks = (Integer) options.valueOf("num-chunks");
        int valueSize = (Integer) options.valueOf("value-size");
        // generate test data
        File temp = null;
        if (options.has("test-gz")) {
            temp = new File((String) options.valueOf("test-gz"));
        } else {
            temp = File.createTempFile("json-data", ".txt.gz", workingDir);
            temp.deleteOnExit();
            System.out.println("Generating test data in " + temp);
            OutputStream outputStream = new GZIPOutputStream(new FileOutputStream(temp));
            Writer writer = new BufferedWriter(new OutputStreamWriter(outputStream), 10 * 1024 * 1024);
            String value = TestUtils.randomLetters(valueSize);
            for (int i = 0; i < numValues; i++) {
                writer.write("\"");
                writer.write(Integer.toString(i));
                writer.write("\" \"");
                writer.write(value);
                writer.write("\"");
                writer.write("\n");
            }
            writer.close();
            writer = null;
        }
        System.out.println("Building store.");
        InputStream inputStream = new GZIPInputStream(new FileInputStream(temp));
        Reader r = new BufferedReader(new InputStreamReader(inputStream), 1 * 1024 * 1024);
        File output = TestUtils.createTempDir(workingDir);
        File tempDir = TestUtils.createTempDir(workingDir);
        cluster = ServerTestUtils.getLocalCluster(1);
        nodeId = 0;
        JsonStoreBuilder builder = new JsonStoreBuilder(new JsonReader(r), cluster, storeDef, new ConsistentRoutingStrategy(cluster, 1), output, tempDir, internalSortSize, 2, numChunks, 64 * 1024, gzipIntermediate);
        builder.build(format);
        // copy to store dir
        File dir = new File(storeDir);
        Utils.rm(dir);
        dir.mkdirs();
        System.out.println("Moving store data from " + output + " to " + dir);
        boolean copyWorked = new File(output, "node-0").renameTo(new File(dir, "version-0"));
        if (!copyWorked)
            Utils.croak("Copy of data from " + output + " to " + dir + " failed.");
    } else {
        CmdUtils.croakIfMissing(parser, options, "cluster-xml", "node-id");
        String clusterXmlPath = (String) options.valueOf("cluster-xml");
        nodeId = (Integer) options.valueOf("node-id");
        File clusterXml = new File(clusterXmlPath);
        if (!clusterXml.exists()) {
            Utils.croak("Cluster.xml does not exist");
        }
        cluster = new ClusterMapper().readCluster(clusterXml);
    }
    final Store<ByteArray, byte[], byte[]> store = new ReadOnlyStorageEngine("test", searcher, new RoutingStrategyFactory().updateRoutingStrategy(storeDef, cluster), nodeId, new File(storeDir), 0);
    final AtomicInteger obsoletes = new AtomicInteger(0);
    final AtomicInteger nullResults = new AtomicInteger(0);
    final AtomicInteger totalResults = new AtomicInteger(0);
    final BlockingQueue<String> requestIds = new ArrayBlockingQueue<String>(20000);
    final Executor executor = Executors.newFixedThreadPool(1);
    // if they have given us a file make a request generator that reads from
    // it, otherwise just generate random values
    final int numVals = numValues;
    Runnable requestGenerator;
    if (inputFile == null) {
        requestGenerator = new Runnable() {

            public void run() {
                System.out.println("Generating random requests.");
                Random random = new Random();
                try {
                    while (true) requestIds.put(Integer.toString(random.nextInt(numRequests) % numVals));
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        };
    } else {
        requestGenerator = new Runnable() {

            public void run() {
                try {
                    System.out.println("Using request file to generate requests.");
                    BufferedReader reader = new BufferedReader(new FileReader(inputFile), 1000000);
                    while (true) {
                        String line = reader.readLine();
                        if (line == null)
                            return;
                        requestIds.put(line.trim());
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        };
    }
    executor.execute(requestGenerator);
    final Serializer<Object> keySerializer = new JsonTypeSerializer(JsonTypeDefinition.fromJson("'string'"), true);
    final AtomicInteger current = new AtomicInteger();
    final int progressIncrement = numRequests / 5;
    PerformanceTest readWriteTest = new PerformanceTest() {

        @Override
        public void doOperation(int index) throws Exception {
            try {
                totalResults.incrementAndGet();
                int curr = current.getAndIncrement();
                List<Versioned<byte[]>> results = store.get(new ByteArray(keySerializer.toBytes(requestIds.take())), null);
                if (curr % progressIncrement == 0)
                    System.out.println(curr);
                if (results.size() == 0)
                    nullResults.incrementAndGet();
            } catch (ObsoleteVersionException e) {
                obsoletes.incrementAndGet();
            }
        }
    };
    System.out.println("Running test...");
    readWriteTest.run(numRequests, numThreads);
    System.out.println("Random Access Read Only store Results:");
    System.out.println("Null reads ratio:" + (nullResults.doubleValue()) / totalResults.doubleValue());
    readWriteTest.printStats();
    System.exit(0);
}
Also used : BinarySearchStrategy(voldemort.store.readonly.BinarySearchStrategy) SearchStrategy(voldemort.store.readonly.SearchStrategy) GZIPOutputStream(java.util.zip.GZIPOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) Random(java.util.Random) GZIPOutputStream(java.util.zip.GZIPOutputStream) ByteArray(voldemort.utils.ByteArray) StoreDefinitionBuilder(voldemort.store.StoreDefinitionBuilder) ReadOnlyStorageEngine(voldemort.store.readonly.ReadOnlyStorageEngine) ReadOnlyStorageFormat(voldemort.store.readonly.ReadOnlyStorageFormat) ClusterMapper(voldemort.xml.ClusterMapper) FileInputStream(java.io.FileInputStream) JsonStoreBuilder(voldemort.store.readonly.JsonStoreBuilder) ObsoleteVersionException(voldemort.versioning.ObsoleteVersionException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) FileOutputStream(java.io.FileOutputStream) File(java.io.File) JsonTypeSerializer(voldemort.serialization.json.JsonTypeSerializer) Versioned(voldemort.versioning.Versioned) RoutingStrategyFactory(voldemort.routing.RoutingStrategyFactory) Reader(java.io.Reader) JsonReader(voldemort.serialization.json.JsonReader) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) OptionParser(joptsimple.OptionParser) BufferedWriter(java.io.BufferedWriter) GZIPInputStream(java.util.zip.GZIPInputStream) Executor(java.util.concurrent.Executor) StoreDefinition(voldemort.store.StoreDefinition) JsonReader(voldemort.serialization.json.JsonReader) ConsistentRoutingStrategy(voldemort.routing.ConsistentRoutingStrategy) FileReader(java.io.FileReader) InputStreamReader(java.io.InputStreamReader) GZIPInputStream(java.util.zip.GZIPInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) Cluster(voldemort.cluster.Cluster) FileNotFoundException(java.io.FileNotFoundException) ObsoleteVersionException(voldemort.versioning.ObsoleteVersionException) IOException(java.io.IOException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) OptionSet(joptsimple.OptionSet) SerializerDefinition(voldemort.serialization.SerializerDefinition) Writer(java.io.Writer) OutputStreamWriter(java.io.OutputStreamWriter) BufferedWriter(java.io.BufferedWriter)

Example 4 with RoutingStrategyFactory

use of voldemort.routing.RoutingStrategyFactory in project voldemort by voldemort.

the class AbstractAdminServiceFilterTest method testDeleteStreamWithFilter.

@Test
public void testDeleteStreamWithFilter() {
    // user store should be present
    Store<ByteArray, byte[], byte[]> store = getStore(0, testStoreName);
    assertNotSame("Store '" + testStoreName + "' should not be null", null, store);
    Set<Pair<ByteArray, Versioned<byte[]>>> entrySet = createEntries();
    VoldemortFilter filter = new VoldemortFilterImpl();
    RoutingStrategy strategy = new RoutingStrategyFactory().updateRoutingStrategy(getStoreDef(), getCluster());
    for (Pair<ByteArray, Versioned<byte[]>> pair : entrySet) {
        if (Utils.nodeListToNodeIdList(strategy.routeRequest(pair.getFirst().get())).contains(0))
            store.put(pair.getFirst(), pair.getSecond(), null);
    }
    // make delete stream call with filter
    getAdminClient().storeMntOps.deletePartitions(0, testStoreName, Lists.newArrayList(0, 1), filter);
    // assert none of the filtered entries are returned.
    for (Pair<ByteArray, Versioned<byte[]>> entry : entrySet) {
        if (Utils.nodeListToNodeIdList(strategy.routeRequest(entry.getFirst().get())).contains(0)) {
            if (filter.accept(entry.getFirst(), entry.getSecond())) {
                assertEquals("All entries should be deleted except the filtered ones.", 0, store.get(entry.getFirst(), null).size());
            } else {
                assertNotSame("filtered entry should be still present.", 0, store.get(entry.getFirst(), null).size());
                assertEquals("values should match", new String(entry.getSecond().getValue()), new String(store.get(entry.getFirst(), null).get(0).getValue()));
            }
        }
    }
}
Also used : Versioned(voldemort.versioning.Versioned) RoutingStrategyFactory(voldemort.routing.RoutingStrategyFactory) RoutingStrategy(voldemort.routing.RoutingStrategy) ByteArray(voldemort.utils.ByteArray) VoldemortFilter(voldemort.client.protocol.VoldemortFilter) Pair(voldemort.utils.Pair) Test(org.junit.Test)

Example 5 with RoutingStrategyFactory

use of voldemort.routing.RoutingStrategyFactory in project voldemort by voldemort.

the class AbstractAdminServiceFilterTest method testFetchAsStreamWithFilter.

@Test
public void testFetchAsStreamWithFilter() {
    // user store should be present
    Store<ByteArray, byte[], byte[]> store = getStore(0, testStoreName);
    assertNotSame("Store '" + testStoreName + "' should not be null", null, store);
    VoldemortFilter filter = new VoldemortFilterImpl();
    int shouldFilterCount = 0;
    RoutingStrategy strategy = new RoutingStrategyFactory().updateRoutingStrategy(getStoreDef(), getCluster());
    for (Pair<ByteArray, Versioned<byte[]>> pair : createEntries()) {
        if (Utils.nodeListToNodeIdList(strategy.routeRequest(pair.getFirst().get())).contains(0)) {
            store.put(pair.getFirst(), pair.getSecond(), null);
            if (!filter.accept(pair.getFirst(), pair.getSecond())) {
                shouldFilterCount++;
            }
        }
    }
    // make fetch stream call with filter
    Iterator<Pair<ByteArray, Versioned<byte[]>>> entryIterator = getAdminClient().bulkFetchOps.fetchEntries(0, testStoreName, getCluster().getNodeById(0).getPartitionIds(), filter, false);
    // assert none of the filtered entries are returned.
    while (entryIterator.hasNext()) {
        Pair<ByteArray, Versioned<byte[]>> entry = entryIterator.next();
        if (!filter.accept(entry.getFirst(), entry.getSecond())) {
            fail();
        }
    }
}
Also used : Versioned(voldemort.versioning.Versioned) RoutingStrategyFactory(voldemort.routing.RoutingStrategyFactory) RoutingStrategy(voldemort.routing.RoutingStrategy) ByteArray(voldemort.utils.ByteArray) VoldemortFilter(voldemort.client.protocol.VoldemortFilter) Pair(voldemort.utils.Pair) Test(org.junit.Test)

Aggregations

RoutingStrategyFactory (voldemort.routing.RoutingStrategyFactory)32 RoutingStrategy (voldemort.routing.RoutingStrategy)23 ByteArray (voldemort.utils.ByteArray)20 StoreDefinition (voldemort.store.StoreDefinition)19 Node (voldemort.cluster.Node)15 Cluster (voldemort.cluster.Cluster)14 File (java.io.File)9 Test (org.junit.Test)9 VoldemortException (voldemort.VoldemortException)9 Versioned (voldemort.versioning.Versioned)9 ArrayList (java.util.ArrayList)8 HashMap (java.util.HashMap)8 Pair (voldemort.utils.Pair)7 StoreDefinitionsMapper (voldemort.xml.StoreDefinitionsMapper)6 IOException (java.io.IOException)5 SerializerDefinition (voldemort.serialization.SerializerDefinition)5 VoldemortServer (voldemort.server.VoldemortServer)5 AdminClient (voldemort.client.protocol.admin.AdminClient)4 DefaultSerializerFactory (voldemort.serialization.DefaultSerializerFactory)4 Serializer (voldemort.serialization.Serializer)4