use of voldemort.versioning.VectorClock in project voldemort by voldemort.
the class R2Store method parseGetAllResults.
private Map<ByteArray, List<Versioned<byte[]>>> parseGetAllResults(ByteString entity) {
Map<ByteArray, List<Versioned<byte[]>>> results = new HashMap<ByteArray, List<Versioned<byte[]>>>();
try {
// Build the multipart object
byte[] bytes = new byte[entity.length()];
entity.copyBytes(bytes, 0);
// Get the outer multipart object
ByteArrayDataSource ds = new ByteArrayDataSource(bytes, "multipart/mixed");
MimeMultipart mp = new MimeMultipart(ds);
for (int i = 0; i < mp.getCount(); i++) {
// Get an individual part. This contains all the versioned
// values for a particular key referenced by content-location
MimeBodyPart part = (MimeBodyPart) mp.getBodyPart(i);
// Get the key
String contentLocation = part.getHeader("Content-Location")[0];
String base64Key = contentLocation.split("/")[2];
ByteArray key = new ByteArray(RestUtils.decodeVoldemortKey(base64Key));
if (logger.isDebugEnabled()) {
logger.debug("Content-Location : " + contentLocation);
logger.debug("Base 64 key : " + base64Key);
}
// Create an array list for holding all the (versioned values)
List<Versioned<byte[]>> valueResultList = new ArrayList<Versioned<byte[]>>();
/*
* Get the nested Multi-part object. This contains one part for each unique versioned value.
*
* GetContent method can corrupt the embedded data, for example 0x8c be converted to 0xc2, 0x8c,
* hence use getInputStream.
*
* This thread tracks this question
* http://stackoverflow.com/questions/23023583/mimebodypart-getcontent-corrupts-binary-data
*
* getInputStream() : Return a decoded input stream for this Message's "content.
*
* getRawInputStream() : Return an InputStream to the raw data with any Content-Transfer-Encoding
* intact. This method is useful if the "Content-Transfer-Encoding" header is incorrect or corrupt,
* which would prevent the getInputStream method from returning the correct data. In such a case
* the application may use this method and attempt to decode the raw data itself.
*
*/
ByteArrayDataSource nestedDS = new ByteArrayDataSource(part.getInputStream(), "multipart/mixed");
MimeMultipart valueParts = new MimeMultipart(nestedDS);
for (int valueId = 0; valueId < valueParts.getCount(); valueId++) {
MimeBodyPart valuePart = (MimeBodyPart) valueParts.getBodyPart(valueId);
String serializedVC = valuePart.getHeader(RestMessageHeaders.X_VOLD_VECTOR_CLOCK)[0];
int contentLength = Integer.parseInt(valuePart.getHeader(RestMessageHeaders.CONTENT_LENGTH)[0]);
if (logger.isDebugEnabled()) {
logger.debug("Received serialized Vector Clock : " + serializedVC);
}
VectorClockWrapper vcWrapper = mapper.readValue(serializedVC, VectorClockWrapper.class);
// get the value bytes
InputStream input = valuePart.getInputStream();
byte[] bodyPartBytes = new byte[contentLength];
input.read(bodyPartBytes);
VectorClock clock = new VectorClock(vcWrapper.getVersions(), vcWrapper.getTimestamp());
valueResultList.add(new Versioned<byte[]>(bodyPartBytes, clock));
}
results.put(key, valueResultList);
}
} catch (MessagingException e) {
throw new VoldemortException("Messaging exception while trying to parse GET response " + e.getMessage(), e);
} catch (JsonParseException e) {
throw new VoldemortException("JSON parsing exception while trying to parse GET response " + e.getMessage(), e);
} catch (JsonMappingException e) {
throw new VoldemortException("JSON mapping exception while trying to parse GET response " + e.getMessage(), e);
} catch (IOException e) {
throw new VoldemortException("IO exception while trying to parse GET response " + e.getMessage(), e);
}
return results;
}
use of voldemort.versioning.VectorClock in project voldemort by voldemort.
the class VoldemortAdminTool method readEntriesBinary.
private static Iterator<Pair<ByteArray, Versioned<byte[]>>> readEntriesBinary(File inputDir, String storeName) throws IOException {
File inputFile = new File(inputDir, storeName + ".entries");
if (!inputFile.exists()) {
throw new FileNotFoundException("File " + inputFile.getAbsolutePath() + " does not exist!");
}
final DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(inputFile)));
return new AbstractIterator<Pair<ByteArray, Versioned<byte[]>>>() {
@Override
protected Pair<ByteArray, Versioned<byte[]>> computeNext() {
try {
int length = dis.readInt();
byte[] keyBytes = new byte[length];
ByteUtils.read(dis, keyBytes);
length = dis.readInt();
byte[] versionBytes = new byte[length];
ByteUtils.read(dis, versionBytes);
length = dis.readInt();
byte[] valueBytes = new byte[length];
ByteUtils.read(dis, valueBytes);
ByteArray key = new ByteArray(keyBytes);
VectorClock version = new VectorClock(versionBytes);
Versioned<byte[]> value = new Versioned<byte[]>(valueBytes, version);
return new Pair<ByteArray, Versioned<byte[]>>(key, value);
} catch (EOFException e) {
try {
dis.close();
} catch (IOException ie) {
ie.printStackTrace();
}
return endOfData();
} catch (IOException e) {
try {
dis.close();
} catch (IOException ie) {
ie.printStackTrace();
}
throw new VoldemortException("Error reading from input file ", e);
}
}
};
}
use of voldemort.versioning.VectorClock in project voldemort by voldemort.
the class VoldemortAdminTool method executeFetchEntries.
private static void executeFetchEntries(Integer nodeId, AdminClient adminClient, List<Integer> partitionIdList, String outputDir, List<String> storeNames, boolean useAscii, boolean fetchOrphaned) throws IOException {
List<StoreDefinition> storeDefinitionList = getStoreDefinitions(adminClient, nodeId);
HashMap<String, StoreDefinition> storeDefinitionMap = Maps.newHashMap();
for (StoreDefinition storeDefinition : storeDefinitionList) {
storeDefinitionMap.put(storeDefinition.getName(), storeDefinition);
}
File directory = null;
if (outputDir != null) {
directory = new File(outputDir);
if (!(directory.exists() || directory.mkdir())) {
Utils.croak("Can't find or create directory " + outputDir);
}
}
List<String> stores = storeNames;
if (stores == null) {
// when no stores specified, all user defined store will be fetched,
// but not system stores.
stores = Lists.newArrayList();
stores.addAll(storeDefinitionMap.keySet());
} else {
// add system stores to the map so they can be fetched when
// specified explicitly
storeDefinitionMap.putAll(getSystemStoreDefs());
}
// Pick up all the partitions
if (partitionIdList == null) {
partitionIdList = Lists.newArrayList();
for (Node node : adminClient.getAdminClientCluster().getNodes()) {
partitionIdList.addAll(node.getPartitionIds());
}
}
StoreDefinition storeDefinition = null;
for (String store : stores) {
storeDefinition = storeDefinitionMap.get(store);
if (null == storeDefinition) {
System.out.println("No store found under the name \'" + store + "\'");
continue;
}
Iterator<Pair<ByteArray, Versioned<byte[]>>> entriesIteratorRef = null;
if (fetchOrphaned) {
System.out.println("Fetching orphaned entries of " + store);
entriesIteratorRef = adminClient.bulkFetchOps.fetchOrphanedEntries(nodeId, store);
} else {
System.out.println("Fetching entries in partitions " + Joiner.on(", ").join(partitionIdList) + " of " + store);
entriesIteratorRef = adminClient.bulkFetchOps.fetchEntries(nodeId, store, partitionIdList, null, false);
}
final Iterator<Pair<ByteArray, Versioned<byte[]>>> entriesIterator = entriesIteratorRef;
File outputFile = null;
if (directory != null) {
outputFile = new File(directory, store + ".entries");
}
if (useAscii) {
// k-v serializer
SerializerDefinition keySerializerDef = storeDefinition.getKeySerializer();
SerializerDefinition valueSerializerDef = storeDefinition.getValueSerializer();
SerializerFactory serializerFactory = new DefaultSerializerFactory();
@SuppressWarnings("unchecked") final Serializer<Object> keySerializer = (Serializer<Object>) serializerFactory.getSerializer(keySerializerDef);
@SuppressWarnings("unchecked") final Serializer<Object> valueSerializer = (Serializer<Object>) serializerFactory.getSerializer(valueSerializerDef);
// compression strategy
final CompressionStrategy keyCompressionStrategy;
final CompressionStrategy valueCompressionStrategy;
if (keySerializerDef != null && keySerializerDef.hasCompression()) {
keyCompressionStrategy = new CompressionStrategyFactory().get(keySerializerDef.getCompression());
} else {
keyCompressionStrategy = null;
}
if (valueSerializerDef != null && valueSerializerDef.hasCompression()) {
valueCompressionStrategy = new CompressionStrategyFactory().get(valueSerializerDef.getCompression());
} else {
valueCompressionStrategy = null;
}
writeAscii(outputFile, new Writable() {
@Override
public void writeTo(BufferedWriter out) throws IOException {
while (entriesIterator.hasNext()) {
final JsonGenerator generator = new JsonFactory(new ObjectMapper()).createJsonGenerator(out);
Pair<ByteArray, Versioned<byte[]>> kvPair = entriesIterator.next();
byte[] keyBytes = kvPair.getFirst().get();
byte[] valueBytes = kvPair.getSecond().getValue();
VectorClock version = (VectorClock) kvPair.getSecond().getVersion();
Object keyObject = keySerializer.toObject((null == keyCompressionStrategy) ? keyBytes : keyCompressionStrategy.inflate(keyBytes));
Object valueObject = valueSerializer.toObject((null == valueCompressionStrategy) ? valueBytes : valueCompressionStrategy.inflate(valueBytes));
if (keyObject instanceof GenericRecord) {
out.write(keyObject.toString());
} else {
generator.writeObject(keyObject);
}
out.write(' ' + version.toString() + ' ');
if (valueObject instanceof GenericRecord) {
out.write(valueObject.toString());
} else {
generator.writeObject(valueObject);
}
out.write('\n');
}
}
});
} else {
writeBinary(outputFile, new Printable() {
@Override
public void printTo(DataOutputStream out) throws IOException {
while (entriesIterator.hasNext()) {
Pair<ByteArray, Versioned<byte[]>> kvPair = entriesIterator.next();
byte[] keyBytes = kvPair.getFirst().get();
VectorClock clock = ((VectorClock) kvPair.getSecond().getVersion());
byte[] valueBytes = kvPair.getSecond().getValue();
out.writeChars(ByteUtils.toHexString(keyBytes));
out.writeChars(",");
out.writeChars(clock.toString());
out.writeChars(",");
out.writeChars(ByteUtils.toHexString(valueBytes));
out.writeChars("\n");
}
}
});
}
if (outputFile != null)
System.out.println("Fetched keys from " + store + " to " + outputFile);
}
}
use of voldemort.versioning.VectorClock in project voldemort by voldemort.
the class RestClientTest method testPutIfNotObsolete.
@Override
@Test
public void testPutIfNotObsolete() {
VectorClock vc = new VectorClock();
vc.incrementVersion(this.nodeId, System.currentTimeMillis());
VectorClock initialVC = vc.clone();
client.putIfNotObsolete("k", new Versioned<String>("v", vc));
assertEquals("PUT of non-obsolete version should succeed.", "v", client.getValue("k"));
assertFalse(client.putIfNotObsolete("k", new Versioned<String>("v2", initialVC)));
assertEquals("Failed PUT should not change the value stored.", "v", client.getValue("k"));
}
use of voldemort.versioning.VectorClock in project voldemort by voldemort.
the class RestClientTest method testPutVersioned.
@Override
@Test
public void testPutVersioned() {
VectorClock vc = new VectorClock();
vc.incrementVersion(this.nodeId, System.currentTimeMillis());
VectorClock initialVC = vc.clone();
client.put("k", new Versioned<String>("v", vc));
Versioned<String> v = client.get("k");
assertEquals("GET should return the version set by PUT.", "v", v.getValue());
VectorClock expected = initialVC.clone();
expected.incrementVersion(this.nodeId, System.currentTimeMillis());
assertEquals("The version should be incremented after a put.", expected.getEntries(), ((VectorClock) v.getVersion()).getEntries());
try {
client.put("k", new Versioned<String>("v", initialVC));
fail("Put of obsolete version should throw exception.");
} catch (ObsoleteVersionException e) {
// this is good
}
// PUT of a concurrent version should succeed
client.put("k", new Versioned<String>("v2", new VectorClock().incremented(nodeId + 1, time.getMilliseconds())));
assertEquals("GET should return the new value set by PUT.", "v2", client.getValue("k"));
assertEquals("GET should return the new version set by PUT.", expected.incremented(nodeId + 1, time.getMilliseconds()), client.get("k").getVersion());
}
Aggregations