use of voldemort.serialization.Serializer in project voldemort by voldemort.
the class MockStoreClientFactory method getRawStore.
private <K1, V1, T1> Store<K1, V1, T1> getRawStore(String storeName) {
List<StoreDefinition> storeDefs = storeMapper.readStoreList(new StringReader(storesXml));
StoreDefinition storeDef = null;
for (StoreDefinition d : storeDefs) if (d.getName().equals(storeName))
storeDef = d;
if (storeDef == null)
throw new BootstrapFailureException("Unknown store '" + storeName + "'.");
DefaultSerializerFactory serializerFactory = new DefaultSerializerFactory();
Serializer<K1> keySerializer = (Serializer<K1>) serializerFactory.getSerializer(storeDef.getKeySerializer());
Serializer<V1> valueSerializer = (Serializer<V1>) serializerFactory.getSerializer(storeDef.getValueSerializer());
Serializer<T1> transformsSerializer = null;
if (storeDef.isView())
transformsSerializer = (Serializer<T1>) serializerFactory.getSerializer(storeDef.getTransformsSerializer());
// Add inconsistency resolving decorator, using their inconsistency
// resolver (if they gave us one)
InconsistencyResolver<Versioned<V1>> secondaryResolver = new TimeBasedInconsistencyResolver();
StorageEngine engine;
if (storeDef.isView())
engine = new InMemoryStorageEngine(storeDef.getViewTargetStoreName());
else
engine = new InMemoryStorageEngine(storeDef.getName());
if (storeDef.isView()) {
// instantiate view
String targetName = storeDef.getViewTargetStoreName();
StoreDefinition targetDef = StoreUtils.getStoreDef(storeDefs, targetName);
engine = new ViewStorageEngine(storeName, engine, this.viewValueSerializer != null ? this.viewValueSerializer : serializerFactory.getSerializer(storeDef.getValueSerializer()), this.transformsSerializer != null ? this.transformsSerializer : serializerFactory.getSerializer(storeDef.getTransformsSerializer()), this.keySerializer != null ? this.keySerializer : serializerFactory.getSerializer(targetDef.getKeySerializer()), this.valueSerializer != null ? this.valueSerializer : serializerFactory.getSerializer(targetDef.getValueSerializer()), null, ViewStorageConfiguration.loadTransformation(storeDef.getValueTransformation()));
}
Store store = new VersionIncrementingStore(engine, nodeId, time);
store = new SerializingStore(store, this.keySerializer != null ? this.keySerializer : keySerializer, this.valueSerializer != null ? this.valueSerializer : valueSerializer, this.transformsSerializer != null ? this.transformsSerializer : transformsSerializer);
Store<K1, V1, T1> consistentStore = new InconsistencyResolvingStore<K1, V1, T1>(store, new ChainedResolver<Versioned<V1>>(new VectorClockInconsistencyResolver(), secondaryResolver));
return consistentStore;
}
use of voldemort.serialization.Serializer in project voldemort by voldemort.
the class VoldemortAdminTool method executeFetchKeys.
private static void executeFetchKeys(Integer nodeId, AdminClient adminClient, List<Integer> partitionIdList, String outputDir, List<String> storeNames, boolean useAscii, boolean fetchOrphaned) throws IOException {
List<StoreDefinition> storeDefinitionList = getStoreDefinitions(adminClient, nodeId);
Map<String, StoreDefinition> storeDefinitionMap = Maps.newHashMap();
for (StoreDefinition storeDefinition : storeDefinitionList) {
storeDefinitionMap.put(storeDefinition.getName(), storeDefinition);
}
File directory = null;
if (outputDir != null) {
directory = new File(outputDir);
if (!(directory.exists() || directory.mkdir())) {
Utils.croak("Can't find or create directory " + outputDir);
}
}
List<String> stores = storeNames;
if (stores == null) {
stores = Lists.newArrayList();
stores.addAll(storeDefinitionMap.keySet());
} else {
// add system stores to the map so they can be fetched when
// specified explicitly
storeDefinitionMap.putAll(getSystemStoreDefs());
}
// Pick up all the partitions
if (partitionIdList == null) {
partitionIdList = Lists.newArrayList();
for (Node node : adminClient.getAdminClientCluster().getNodes()) {
partitionIdList.addAll(node.getPartitionIds());
}
}
StoreDefinition storeDefinition = null;
for (String store : stores) {
storeDefinition = storeDefinitionMap.get(store);
if (null == storeDefinition) {
System.out.println("No store found under the name \'" + store + "\'");
continue;
}
Iterator<ByteArray> keyIteratorRef = null;
if (fetchOrphaned) {
System.out.println("Fetching orphaned keys of " + store);
keyIteratorRef = adminClient.bulkFetchOps.fetchOrphanedKeys(nodeId, store);
} else {
System.out.println("Fetching keys in partitions " + Joiner.on(", ").join(partitionIdList) + " of " + store);
keyIteratorRef = adminClient.bulkFetchOps.fetchKeys(nodeId, store, partitionIdList, null, false);
}
File outputFile = null;
if (directory != null) {
outputFile = new File(directory, store + ".keys");
}
final Iterator<ByteArray> keyIterator = keyIteratorRef;
if (useAscii) {
final SerializerDefinition serializerDef = storeDefinition.getKeySerializer();
final SerializerFactory serializerFactory = new DefaultSerializerFactory();
@SuppressWarnings("unchecked") final Serializer<Object> serializer = (Serializer<Object>) serializerFactory.getSerializer(serializerDef);
final CompressionStrategy keysCompressionStrategy;
if (serializerDef != null && serializerDef.hasCompression()) {
keysCompressionStrategy = new CompressionStrategyFactory().get(serializerDef.getCompression());
} else {
keysCompressionStrategy = null;
}
writeAscii(outputFile, new Writable() {
@Override
public void writeTo(BufferedWriter out) throws IOException {
while (keyIterator.hasNext()) {
final JsonGenerator generator = new JsonFactory(new ObjectMapper()).createJsonGenerator(out);
byte[] keyBytes = keyIterator.next().get();
Object keyObject = serializer.toObject((null == keysCompressionStrategy) ? keyBytes : keysCompressionStrategy.inflate(keyBytes));
if (keyObject instanceof GenericRecord) {
out.write(keyObject.toString());
} else {
generator.writeObject(keyObject);
}
out.write('\n');
}
}
});
} else {
writeBinary(outputFile, new Printable() {
@Override
public void printTo(DataOutputStream out) throws IOException {
while (keyIterator.hasNext()) {
byte[] keyBytes = keyIterator.next().get();
out.writeChars(ByteUtils.toHexString(keyBytes) + "\n");
}
}
});
}
if (outputFile != null)
System.out.println("Fetched keys from " + store + " to " + outputFile);
}
}
use of voldemort.serialization.Serializer in project voldemort by voldemort.
the class VoldemortAdminTool method executeQueryKey.
private static void executeQueryKey(final Integer nodeId, AdminClient adminClient, List<String> storeNames, String keyString, String keyFormat) throws IOException {
// decide queryingNode(s) for Key
List<Integer> queryingNodes = new ArrayList<Integer>();
if (nodeId < 0) {
// means all nodes
for (Node node : adminClient.getAdminClientCluster().getNodes()) {
queryingNodes.add(node.getId());
}
} else {
queryingNodes.add(nodeId);
}
// get basic info
List<StoreDefinition> storeDefinitionList = getStoreDefinitions(adminClient, nodeId);
Map<String, StoreDefinition> storeDefinitions = new HashMap<String, StoreDefinition>();
for (StoreDefinition storeDef : storeDefinitionList) {
storeDefinitions.put(storeDef.getName(), storeDef);
}
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(System.out));
// iterate through stores
for (final String storeName : storeNames) {
// store definition
StoreDefinition storeDefinition = storeDefinitions.get(storeName);
if (storeDefinition == null) {
throw new StoreNotFoundException("Store " + storeName + " not found");
}
out.write("STORE_NAME: " + storeDefinition.getName() + "\n");
// k-v serializer
final SerializerDefinition keySerializerDef = storeDefinition.getKeySerializer();
final SerializerDefinition valueSerializerDef = storeDefinition.getValueSerializer();
SerializerFactory serializerFactory = new DefaultSerializerFactory();
@SuppressWarnings("unchecked") final Serializer<Object> keySerializer = (Serializer<Object>) serializerFactory.getSerializer(keySerializerDef);
@SuppressWarnings("unchecked") final Serializer<Object> valueSerializer = (Serializer<Object>) serializerFactory.getSerializer(valueSerializerDef);
// compression strategy
final CompressionStrategy keyCompressionStrategy;
final CompressionStrategy valueCompressionStrategy;
if (keySerializerDef != null && keySerializerDef.hasCompression()) {
keyCompressionStrategy = new CompressionStrategyFactory().get(keySerializerDef.getCompression());
} else {
keyCompressionStrategy = null;
}
if (valueSerializerDef != null && valueSerializerDef.hasCompression()) {
valueCompressionStrategy = new CompressionStrategyFactory().get(valueSerializerDef.getCompression());
} else {
valueCompressionStrategy = null;
}
if (keyCompressionStrategy == null) {
out.write("KEY_COMPRESSION_STRATEGY: None\n");
} else {
out.write("KEY_COMPRESSION_STRATEGY: " + keyCompressionStrategy.getType() + "\n");
}
out.write("KEY_SERIALIZER_NAME: " + keySerializerDef.getName() + "\n");
for (Map.Entry<Integer, String> entry : keySerializerDef.getAllSchemaInfoVersions().entrySet()) {
out.write(String.format("KEY_SCHEMA VERSION=%d\n", entry.getKey()));
out.write("====================================\n");
out.write(entry.getValue());
out.write("\n====================================\n");
}
out.write("\n");
if (valueCompressionStrategy == null) {
out.write("VALUE_COMPRESSION_STRATEGY: None\n");
} else {
out.write("VALUE_COMPRESSION_STRATEGY: " + valueCompressionStrategy.getType() + "\n");
}
out.write("VALUE_SERIALIZER_NAME: " + valueSerializerDef.getName() + "\n");
for (Map.Entry<Integer, String> entry : valueSerializerDef.getAllSchemaInfoVersions().entrySet()) {
out.write(String.format("VALUE_SCHEMA %d\n", entry.getKey()));
out.write("====================================\n");
out.write(entry.getValue());
out.write("\n====================================\n");
}
out.write("\n");
// although the streamingOps support multiple keys, we only query
// one key here
ByteArray key;
try {
if (keyFormat.equals("readable")) {
Object keyObject;
String keySerializerName = keySerializerDef.getName();
if (isAvroSchema(keySerializerName)) {
Schema keySchema = Schema.parse(keySerializerDef.getCurrentSchemaInfo());
JsonDecoder decoder = new JsonDecoder(keySchema, keyString);
GenericDatumReader<Object> datumReader = new GenericDatumReader<Object>(keySchema);
keyObject = datumReader.read(null, decoder);
} else if (keySerializerName.equals(DefaultSerializerFactory.JSON_SERIALIZER_TYPE_NAME)) {
JsonReader jsonReader = new JsonReader(new StringReader(keyString));
keyObject = jsonReader.read();
} else {
keyObject = keyString;
}
key = new ByteArray(keySerializer.toBytes(keyObject));
} else {
key = new ByteArray(ByteUtils.fromHexString(keyString));
}
} catch (SerializationException se) {
System.err.println("Error serializing key " + keyString);
System.err.println("If this is a JSON key, you need to include escaped quotation marks in the command line if it is a string");
se.printStackTrace();
return;
} catch (DecoderException de) {
System.err.println("Error decoding key " + keyString);
de.printStackTrace();
return;
} catch (IOException io) {
System.err.println("Error parsing avro string " + keyString);
io.printStackTrace();
return;
}
boolean printedKey = false;
// A Map<> could have been used instead of List<Entry<>> if
// Versioned supported correct hash codes. Read the comment in
// Versioned about the issue
List<Entry<List<Versioned<byte[]>>, List<Integer>>> nodeValues = new ArrayList<Entry<List<Versioned<byte[]>>, List<Integer>>>();
for (final Integer queryNodeId : queryingNodes) {
Iterator<QueryKeyResult> iterator;
iterator = adminClient.streamingOps.queryKeys(queryNodeId, storeName, Arrays.asList(key).iterator());
final StringWriter stringWriter = new StringWriter();
QueryKeyResult queryKeyResult = iterator.next();
if (!printedKey) {
// de-serialize and write key
byte[] keyBytes = queryKeyResult.getKey().get();
Object keyObject = keySerializer.toObject((null == keyCompressionStrategy) ? keyBytes : keyCompressionStrategy.inflate(keyBytes));
writeVoldKeyOrValueInternal(keyBytes, keySerializer, keyCompressionStrategy, "KEY", out);
printedKey = true;
}
// iterate through, de-serialize and write values
if (queryKeyResult.hasValues() && queryKeyResult.getValues().size() > 0) {
int elementId = -1;
for (int i = 0; i < nodeValues.size(); i++) {
if (Objects.equal(nodeValues.get(i).getKey(), queryKeyResult.getValues())) {
elementId = i;
break;
}
}
if (elementId == -1) {
ArrayList<Integer> nodes = new ArrayList<Integer>();
nodes.add(queryNodeId);
nodeValues.add(new AbstractMap.SimpleEntry<List<Versioned<byte[]>>, List<Integer>>(queryKeyResult.getValues(), nodes));
} else {
nodeValues.get(elementId).getValue().add(queryNodeId);
}
out.write(String.format("\nQueried node %d on store %s\n", queryNodeId, storeName));
int versionCount = 0;
if (queryKeyResult.getValues().size() > 1) {
out.write("VALUE " + versionCount + "\n");
}
for (Versioned<byte[]> versioned : queryKeyResult.getValues()) {
// write version
VectorClock version = (VectorClock) versioned.getVersion();
out.write("VECTOR_CLOCK_BYTE: " + ByteUtils.toHexString(version.toBytes()) + "\n");
out.write("VECTOR_CLOCK_TEXT: " + version.toString() + '[' + new Date(version.getTimestamp()).toString() + "]\n");
// write value
byte[] valueBytes = versioned.getValue();
writeVoldKeyOrValueInternal(valueBytes, valueSerializer, valueCompressionStrategy, "VALUE", out);
versionCount++;
}
} else // exception.
if (queryKeyResult.hasException()) {
boolean isInvalidMetadataException = queryKeyResult.getException() instanceof InvalidMetadataException;
// you are querying only a single node.
if (!isInvalidMetadataException || queryingNodes.size() == 1) {
out.write(String.format("\nNode %d on store %s returned exception\n", queryNodeId, storeName));
out.write(queryKeyResult.getException().toString());
out.write("\n====================================\n");
}
} else {
if (queryingNodes.size() == 1) {
out.write(String.format("\nNode %d on store %s returned NULL\n", queryNodeId, storeName));
out.write("\n====================================\n");
}
}
out.flush();
}
out.write("\n====================================\n");
for (Map.Entry<List<Versioned<byte[]>>, List<Integer>> nodeValue : nodeValues) {
out.write("Nodes with same Value " + Arrays.toString(nodeValue.getValue().toArray()));
out.write("\n====================================\n");
}
if (nodeValues.size() > 1) {
out.write("\n*** Multiple (" + nodeValues.size() + ") versions of key/value exist for the key ***\n");
}
out.flush();
}
}
use of voldemort.serialization.Serializer in project voldemort by voldemort.
the class HadoopStoreBuilderTest method testHadoopBuild.
@Test
public void testHadoopBuild() throws Exception {
// create test data
Map<String, String> values = new HashMap<String, String>();
File testDir = TestUtils.createTempDir();
File tempDir = new File(testDir, "temp"), tempDir2 = new File(testDir, "temp2");
File outputDir = new File(testDir, "output"), outputDir2 = new File(testDir, "output2");
File storeDir = TestUtils.createTempDir(testDir);
for (int i = 0; i < 200; i++) values.put(Integer.toString(i), Integer.toBinaryString(i));
// write test data to text file
File inputFile = File.createTempFile("input", ".txt", testDir);
inputFile.deleteOnExit();
StringBuilder contents = new StringBuilder();
for (Map.Entry<String, String> entry : values.entrySet()) contents.append(entry.getKey() + "\t" + entry.getValue() + "\n");
FileUtils.writeStringToFile(inputFile, contents.toString());
String storeName = "test";
SerializerDefinition serDef = new SerializerDefinition("string");
Cluster cluster = ServerTestUtils.getLocalCluster(1);
// Test backwards compatibility
StoreDefinition def = new StoreDefinitionBuilder().setName(storeName).setType(ReadOnlyStorageConfiguration.TYPE_NAME).setKeySerializer(serDef).setValueSerializer(serDef).setRoutingPolicy(RoutingTier.CLIENT).setRoutingStrategyType(RoutingStrategyType.CONSISTENT_STRATEGY).setReplicationFactor(1).setPreferredReads(1).setRequiredReads(1).setPreferredWrites(1).setRequiredWrites(1).build();
HadoopStoreBuilder builder = new HadoopStoreBuilder("testHadoopBuild", new Props(), new JobConf(), TextStoreMapper.class, TextInputFormat.class, cluster, def, new Path(tempDir2.getAbsolutePath()), new Path(outputDir2.getAbsolutePath()), new Path(inputFile.getAbsolutePath()), CheckSumType.MD5, saveKeys, false, 64 * 1024, false, null, false);
builder.build();
builder = new HadoopStoreBuilder("testHadoopBuild", new Props(), new JobConf(), TextStoreMapper.class, TextInputFormat.class, cluster, def, new Path(tempDir.getAbsolutePath()), new Path(outputDir.getAbsolutePath()), new Path(inputFile.getAbsolutePath()), CheckSumType.MD5, saveKeys, false, 64 * 1024, false, null, false);
builder.build();
// Check if checkSum is generated in outputDir
File nodeFile = new File(outputDir, "node-0");
// Check if metadata file exists
File metadataFile = new File(nodeFile, ".metadata");
Assert.assertTrue("Metadata file should exist!", metadataFile.exists());
ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata(metadataFile);
if (saveKeys)
Assert.assertEquals("In saveKeys mode, the metadata format should be READONLY_V2!", metadata.get(ReadOnlyStorageMetadata.FORMAT), ReadOnlyStorageFormat.READONLY_V2.getCode());
else
Assert.assertEquals("In legacy mode (saveKeys==false), the metadata format should be READONLY_V1!", metadata.get(ReadOnlyStorageMetadata.FORMAT), ReadOnlyStorageFormat.READONLY_V1.getCode());
Assert.assertEquals("Checksum type should be MD5!", metadata.get(ReadOnlyStorageMetadata.CHECKSUM_TYPE), CheckSum.toString(CheckSumType.MD5));
// Check contents of checkSum file
byte[] md5 = Hex.decodeHex(((String) metadata.get(ReadOnlyStorageMetadata.CHECKSUM)).toCharArray());
byte[] checkSumBytes = CheckSumTests.calculateCheckSum(nodeFile.listFiles(), CheckSumType.MD5);
Assert.assertEquals("Checksum is not as excepted!", 0, ByteUtils.compare(checkSumBytes, md5));
// check if fetching works
HdfsFetcher fetcher = new HdfsFetcher();
// Fetch to version directory
File versionDir = new File(storeDir, "version-0");
fetcher.fetch(nodeFile.getAbsolutePath(), versionDir.getAbsolutePath());
Assert.assertTrue("Version directory should exist!", versionDir.exists());
// open store
@SuppressWarnings("unchecked") Serializer<Object> serializer = (Serializer<Object>) new DefaultSerializerFactory().getSerializer(serDef);
ReadOnlyStorageEngine engine = new ReadOnlyStorageEngine(storeName, searchStrategy, new RoutingStrategyFactory().updateRoutingStrategy(def, cluster), 0, storeDir, 1);
Store<Object, Object, Object> store = SerializingStore.wrap(engine, serializer, serializer, serializer);
// check values
for (Map.Entry<String, String> entry : values.entrySet()) {
String key = entry.getKey();
try {
List<Versioned<Object>> found = store.get(key, null);
Assert.assertEquals("Incorrect number of results", 1, found.size());
Assert.assertEquals(entry.getValue(), found.get(0).getValue());
} catch (VoldemortException e) {
throw new VoldemortException("Got an exception while trying to get key '" + key + "'.", e);
}
}
// also check the iterator - first key iterator...
try {
ClosableIterator<ByteArray> keyIterator = engine.keys();
if (!saveKeys) {
fail("Should have thrown an exception since this RO format does not support iterators");
}
int numElements = 0;
while (keyIterator.hasNext()) {
Assert.assertTrue(values.containsKey(serializer.toObject(keyIterator.next().get())));
numElements++;
}
Assert.assertEquals(numElements, values.size());
} catch (UnsupportedOperationException e) {
if (saveKeys) {
fail("Should not have thrown an exception since this RO format does support iterators");
}
}
// ... and entry iterator
try {
ClosableIterator<Pair<ByteArray, Versioned<byte[]>>> entryIterator = engine.entries();
if (!saveKeys) {
fail("Should have thrown an exception since this RO format does not support iterators");
}
int numElements = 0;
while (entryIterator.hasNext()) {
Pair<ByteArray, Versioned<byte[]>> entry = entryIterator.next();
Assert.assertEquals(values.get(serializer.toObject(entry.getFirst().get())), serializer.toObject(entry.getSecond().getValue()));
numElements++;
}
Assert.assertEquals(numElements, values.size());
} catch (UnsupportedOperationException e) {
if (saveKeys) {
fail("Should not have thrown an exception since this RO format does support iterators");
}
}
}
use of voldemort.serialization.Serializer in project voldemort by voldemort.
the class VoldemortAdminTool method executeFetchEntries.
private static void executeFetchEntries(Integer nodeId, AdminClient adminClient, List<Integer> partitionIdList, String outputDir, List<String> storeNames, boolean useAscii, boolean fetchOrphaned) throws IOException {
List<StoreDefinition> storeDefinitionList = getStoreDefinitions(adminClient, nodeId);
HashMap<String, StoreDefinition> storeDefinitionMap = Maps.newHashMap();
for (StoreDefinition storeDefinition : storeDefinitionList) {
storeDefinitionMap.put(storeDefinition.getName(), storeDefinition);
}
File directory = null;
if (outputDir != null) {
directory = new File(outputDir);
if (!(directory.exists() || directory.mkdir())) {
Utils.croak("Can't find or create directory " + outputDir);
}
}
List<String> stores = storeNames;
if (stores == null) {
// when no stores specified, all user defined store will be fetched,
// but not system stores.
stores = Lists.newArrayList();
stores.addAll(storeDefinitionMap.keySet());
} else {
// add system stores to the map so they can be fetched when
// specified explicitly
storeDefinitionMap.putAll(getSystemStoreDefs());
}
// Pick up all the partitions
if (partitionIdList == null) {
partitionIdList = Lists.newArrayList();
for (Node node : adminClient.getAdminClientCluster().getNodes()) {
partitionIdList.addAll(node.getPartitionIds());
}
}
StoreDefinition storeDefinition = null;
for (String store : stores) {
storeDefinition = storeDefinitionMap.get(store);
if (null == storeDefinition) {
System.out.println("No store found under the name \'" + store + "\'");
continue;
}
Iterator<Pair<ByteArray, Versioned<byte[]>>> entriesIteratorRef = null;
if (fetchOrphaned) {
System.out.println("Fetching orphaned entries of " + store);
entriesIteratorRef = adminClient.bulkFetchOps.fetchOrphanedEntries(nodeId, store);
} else {
System.out.println("Fetching entries in partitions " + Joiner.on(", ").join(partitionIdList) + " of " + store);
entriesIteratorRef = adminClient.bulkFetchOps.fetchEntries(nodeId, store, partitionIdList, null, false);
}
final Iterator<Pair<ByteArray, Versioned<byte[]>>> entriesIterator = entriesIteratorRef;
File outputFile = null;
if (directory != null) {
outputFile = new File(directory, store + ".entries");
}
if (useAscii) {
// k-v serializer
SerializerDefinition keySerializerDef = storeDefinition.getKeySerializer();
SerializerDefinition valueSerializerDef = storeDefinition.getValueSerializer();
SerializerFactory serializerFactory = new DefaultSerializerFactory();
@SuppressWarnings("unchecked") final Serializer<Object> keySerializer = (Serializer<Object>) serializerFactory.getSerializer(keySerializerDef);
@SuppressWarnings("unchecked") final Serializer<Object> valueSerializer = (Serializer<Object>) serializerFactory.getSerializer(valueSerializerDef);
// compression strategy
final CompressionStrategy keyCompressionStrategy;
final CompressionStrategy valueCompressionStrategy;
if (keySerializerDef != null && keySerializerDef.hasCompression()) {
keyCompressionStrategy = new CompressionStrategyFactory().get(keySerializerDef.getCompression());
} else {
keyCompressionStrategy = null;
}
if (valueSerializerDef != null && valueSerializerDef.hasCompression()) {
valueCompressionStrategy = new CompressionStrategyFactory().get(valueSerializerDef.getCompression());
} else {
valueCompressionStrategy = null;
}
writeAscii(outputFile, new Writable() {
@Override
public void writeTo(BufferedWriter out) throws IOException {
while (entriesIterator.hasNext()) {
final JsonGenerator generator = new JsonFactory(new ObjectMapper()).createJsonGenerator(out);
Pair<ByteArray, Versioned<byte[]>> kvPair = entriesIterator.next();
byte[] keyBytes = kvPair.getFirst().get();
byte[] valueBytes = kvPair.getSecond().getValue();
VectorClock version = (VectorClock) kvPair.getSecond().getVersion();
Object keyObject = keySerializer.toObject((null == keyCompressionStrategy) ? keyBytes : keyCompressionStrategy.inflate(keyBytes));
Object valueObject = valueSerializer.toObject((null == valueCompressionStrategy) ? valueBytes : valueCompressionStrategy.inflate(valueBytes));
if (keyObject instanceof GenericRecord) {
out.write(keyObject.toString());
} else {
generator.writeObject(keyObject);
}
out.write(' ' + version.toString() + ' ');
if (valueObject instanceof GenericRecord) {
out.write(valueObject.toString());
} else {
generator.writeObject(valueObject);
}
out.write('\n');
}
}
});
} else {
writeBinary(outputFile, new Printable() {
@Override
public void printTo(DataOutputStream out) throws IOException {
while (entriesIterator.hasNext()) {
Pair<ByteArray, Versioned<byte[]>> kvPair = entriesIterator.next();
byte[] keyBytes = kvPair.getFirst().get();
VectorClock clock = ((VectorClock) kvPair.getSecond().getVersion());
byte[] valueBytes = kvPair.getSecond().getValue();
out.writeChars(ByteUtils.toHexString(keyBytes));
out.writeChars(",");
out.writeChars(clock.toString());
out.writeChars(",");
out.writeChars(ByteUtils.toHexString(valueBytes));
out.writeChars("\n");
}
}
});
}
if (outputFile != null)
System.out.println("Fetched keys from " + store + " to " + outputFile);
}
}
Aggregations