use of voldemort.serialization.SerializationException in project voldemort by voldemort.
the class ThriftSerializer method toObject.
public T toObject(byte[] bytes) {
MemoryBuffer buffer = new MemoryBuffer();
try {
buffer.write(bytes);
} catch (TTransportException e) {
throw new SerializationException(e);
}
TProtocol protocol = createThriftProtocol(buffer);
T msg = null;
try {
msg = messageClass.newInstance();
msg.read(protocol);
} catch (InstantiationException e) {
throw new SerializationException(e);
} catch (IllegalAccessException e) {
throw new SerializationException(e);
} catch (TException e) {
throw new SerializationException(e);
}
return msg;
}
use of voldemort.serialization.SerializationException in project voldemort by voldemort.
the class ThriftSerializer method toBytes.
public byte[] toBytes(T object) {
MemoryBuffer buffer = new MemoryBuffer();
TProtocol protocol = createThriftProtocol(buffer);
try {
object.write(protocol);
} catch (TException e) {
throw new SerializationException(e);
}
return buffer.toByteArray();
}
use of voldemort.serialization.SerializationException in project voldemort by voldemort.
the class AvroReflectiveSerializer method toBytes.
public byte[] toBytes(T object) {
ByteArrayOutputStream output = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(output);
ReflectDatumWriter<T> datumWriter = null;
try {
datumWriter = new ReflectDatumWriter<T>(clazz);
datumWriter.write(object, encoder);
encoder.flush();
} catch (IOException e) {
throw new SerializationException(e);
} finally {
SerializationUtils.close(output);
}
return output.toByteArray();
}
use of voldemort.serialization.SerializationException in project voldemort by voldemort.
the class AbstractStoreClientFactory method getRawStore.
@SuppressWarnings("unchecked")
public <K, V, T> Store<K, V, T> getRawStore(String storeName, InconsistencyResolver<Versioned<V>> resolver, String customStoresXml, String clusterXmlString, FailureDetector fd) {
logger.info("Client zone-id [" + this.routedStoreConfig.getClientZoneId() + "] Attempting to get raw store [" + storeName + "] ");
if (logger.isDebugEnabled()) {
for (URI uri : bootstrapUrls) {
logger.debug("Client Bootstrap url [" + uri + "]");
}
}
// Get cluster and store metadata
String clusterXml = clusterXmlString;
if (clusterXml == null) {
logger.debug("Fetching cluster.xml ...");
clusterXml = bootstrapMetadataWithRetries(MetadataStore.CLUSTER_KEY, bootstrapUrls);
}
this.cluster = clusterMapper.readCluster(new StringReader(clusterXml), false);
String storesXml = customStoresXml;
if (storesXml == null) {
String storesKey = storeName;
if (config.isFetchAllStoresXmlInBootstrap()) {
storesKey = MetadataStore.STORES_KEY;
}
if (logger.isDebugEnabled()) {
logger.debug("Fetching store definition for Store " + storeName + " key " + storesKey);
}
storesXml = bootstrapMetadataWithRetries(storesKey, bootstrapUrls);
}
if (logger.isDebugEnabled()) {
logger.debug("Obtained cluster metadata xml" + clusterXml);
logger.debug("Obtained stores metadata xml" + storesXml);
}
storeDefs = storeMapper.readStoreList(new StringReader(storesXml), false);
StoreDefinition storeDef = null;
for (StoreDefinition d : storeDefs) if (d.getName().equals(storeName))
storeDef = d;
if (storeDef == null) {
logger.error("Bootstrap - unknown store: " + storeName);
throw new BootstrapFailureException("Unknown store '" + storeName + "'.");
}
if (logger.isDebugEnabled()) {
logger.debug(this.cluster.toString(true));
logger.debug(storeDef.toString());
}
boolean repairReads = !storeDef.isView();
// construct mapping
Map<Integer, Store<ByteArray, byte[], byte[]>> clientMapping = Maps.newHashMap();
Map<Integer, NonblockingStore> nonblockingStores = Maps.newHashMap();
Map<Integer, NonblockingStore> nonblockingSlopStores = Maps.newHashMap();
Map<Integer, Store<ByteArray, Slop, byte[]>> slopStores = null;
if (storeDef.hasHintedHandoffStrategyType())
slopStores = Maps.newHashMap();
for (Node node : this.cluster.getNodes()) {
Store<ByteArray, byte[], byte[]> store = getStore(storeDef.getName(), node.getHost(), getPort(node), this.requestFormatType);
clientMapping.put(node.getId(), store);
NonblockingStore nonblockingStore = routedStoreFactory.toNonblockingStore(store);
nonblockingStores.put(node.getId(), nonblockingStore);
if (slopStores != null) {
Store<ByteArray, byte[], byte[]> rawSlopStore = getStore("slop", node.getHost(), getPort(node), this.requestFormatType);
Store<ByteArray, Slop, byte[]> slopStore = SerializingStore.wrap(rawSlopStore, slopKeySerializer, slopValueSerializer, new IdentitySerializer());
slopStores.put(node.getId(), slopStore);
nonblockingSlopStores.put(node.getId(), routedStoreFactory.toNonblockingStore(rawSlopStore));
}
}
/*
* Check if we need to retrieve a reference to the failure detector. For
* system stores - the FD reference would be passed in.
*/
FailureDetector failureDetectorRef = fd;
if (failureDetectorRef == null) {
failureDetectorRef = getFailureDetector();
} else {
logger.debug("Using existing failure detector.");
}
this.routedStoreConfig.setRepairReads(repairReads);
Store<ByteArray, byte[], byte[]> store = routedStoreFactory.create(this.cluster, storeDef, clientMapping, nonblockingStores, slopStores, nonblockingSlopStores, failureDetectorRef, this.routedStoreConfig);
store = new LoggingStore(store);
if (isJmxEnabled) {
StatTrackingStore statStore = new StatTrackingStore(store, this.aggregateStats, this.cachedStoreStats);
statStore.getStats().registerJmx(identifierString);
store = statStore;
}
if (this.config.isEnableCompressionLayer()) {
if (storeDef.getKeySerializer().hasCompression() || storeDef.getValueSerializer().hasCompression()) {
store = new CompressingStore(store, getCompressionStrategy(storeDef.getKeySerializer()), getCompressionStrategy(storeDef.getValueSerializer()));
}
}
/*
* Initialize the finalstore object only once the store object itself is
* wrapped by a StatrackingStore seems like the finalstore object is
* redundant?
*/
Store<K, V, T> finalStore = (Store<K, V, T>) store;
if (this.config.isEnableSerializationLayer()) {
Serializer<K> keySerializer = (Serializer<K>) serializerFactory.getSerializer(storeDef.getKeySerializer());
Serializer<V> valueSerializer = (Serializer<V>) serializerFactory.getSerializer(storeDef.getValueSerializer());
if (storeDef.isView() && (storeDef.getTransformsSerializer() == null))
throw new SerializationException("Transforms serializer must be specified with a view ");
Serializer<T> transformsSerializer = (Serializer<T>) serializerFactory.getSerializer(storeDef.getTransformsSerializer() != null ? storeDef.getTransformsSerializer() : new SerializerDefinition("identity"));
finalStore = SerializingStore.wrap(store, keySerializer, valueSerializer, transformsSerializer);
}
// resolver (if they gave us one)
if (this.config.isEnableInconsistencyResolvingLayer()) {
InconsistencyResolver<Versioned<V>> secondaryResolver = resolver == null ? new TimeBasedInconsistencyResolver() : resolver;
finalStore = new InconsistencyResolvingStore<K, V, T>(finalStore, new ChainedResolver<Versioned<V>>(new VectorClockInconsistencyResolver(), secondaryResolver));
}
return finalStore;
}
use of voldemort.serialization.SerializationException in project voldemort by voldemort.
the class VoldemortAdminTool method executeQueryKey.
private static void executeQueryKey(final Integer nodeId, AdminClient adminClient, List<String> storeNames, String keyString, String keyFormat) throws IOException {
// decide queryingNode(s) for Key
List<Integer> queryingNodes = new ArrayList<Integer>();
if (nodeId < 0) {
// means all nodes
for (Node node : adminClient.getAdminClientCluster().getNodes()) {
queryingNodes.add(node.getId());
}
} else {
queryingNodes.add(nodeId);
}
// get basic info
List<StoreDefinition> storeDefinitionList = getStoreDefinitions(adminClient, nodeId);
Map<String, StoreDefinition> storeDefinitions = new HashMap<String, StoreDefinition>();
for (StoreDefinition storeDef : storeDefinitionList) {
storeDefinitions.put(storeDef.getName(), storeDef);
}
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(System.out));
// iterate through stores
for (final String storeName : storeNames) {
// store definition
StoreDefinition storeDefinition = storeDefinitions.get(storeName);
if (storeDefinition == null) {
throw new StoreNotFoundException("Store " + storeName + " not found");
}
out.write("STORE_NAME: " + storeDefinition.getName() + "\n");
// k-v serializer
final SerializerDefinition keySerializerDef = storeDefinition.getKeySerializer();
final SerializerDefinition valueSerializerDef = storeDefinition.getValueSerializer();
SerializerFactory serializerFactory = new DefaultSerializerFactory();
@SuppressWarnings("unchecked") final Serializer<Object> keySerializer = (Serializer<Object>) serializerFactory.getSerializer(keySerializerDef);
@SuppressWarnings("unchecked") final Serializer<Object> valueSerializer = (Serializer<Object>) serializerFactory.getSerializer(valueSerializerDef);
// compression strategy
final CompressionStrategy keyCompressionStrategy;
final CompressionStrategy valueCompressionStrategy;
if (keySerializerDef != null && keySerializerDef.hasCompression()) {
keyCompressionStrategy = new CompressionStrategyFactory().get(keySerializerDef.getCompression());
} else {
keyCompressionStrategy = null;
}
if (valueSerializerDef != null && valueSerializerDef.hasCompression()) {
valueCompressionStrategy = new CompressionStrategyFactory().get(valueSerializerDef.getCompression());
} else {
valueCompressionStrategy = null;
}
if (keyCompressionStrategy == null) {
out.write("KEY_COMPRESSION_STRATEGY: None\n");
} else {
out.write("KEY_COMPRESSION_STRATEGY: " + keyCompressionStrategy.getType() + "\n");
}
out.write("KEY_SERIALIZER_NAME: " + keySerializerDef.getName() + "\n");
for (Map.Entry<Integer, String> entry : keySerializerDef.getAllSchemaInfoVersions().entrySet()) {
out.write(String.format("KEY_SCHEMA VERSION=%d\n", entry.getKey()));
out.write("====================================\n");
out.write(entry.getValue());
out.write("\n====================================\n");
}
out.write("\n");
if (valueCompressionStrategy == null) {
out.write("VALUE_COMPRESSION_STRATEGY: None\n");
} else {
out.write("VALUE_COMPRESSION_STRATEGY: " + valueCompressionStrategy.getType() + "\n");
}
out.write("VALUE_SERIALIZER_NAME: " + valueSerializerDef.getName() + "\n");
for (Map.Entry<Integer, String> entry : valueSerializerDef.getAllSchemaInfoVersions().entrySet()) {
out.write(String.format("VALUE_SCHEMA %d\n", entry.getKey()));
out.write("====================================\n");
out.write(entry.getValue());
out.write("\n====================================\n");
}
out.write("\n");
// although the streamingOps support multiple keys, we only query
// one key here
ByteArray key;
try {
if (keyFormat.equals("readable")) {
Object keyObject;
String keySerializerName = keySerializerDef.getName();
if (isAvroSchema(keySerializerName)) {
Schema keySchema = Schema.parse(keySerializerDef.getCurrentSchemaInfo());
JsonDecoder decoder = new JsonDecoder(keySchema, keyString);
GenericDatumReader<Object> datumReader = new GenericDatumReader<Object>(keySchema);
keyObject = datumReader.read(null, decoder);
} else if (keySerializerName.equals(DefaultSerializerFactory.JSON_SERIALIZER_TYPE_NAME)) {
JsonReader jsonReader = new JsonReader(new StringReader(keyString));
keyObject = jsonReader.read();
} else {
keyObject = keyString;
}
key = new ByteArray(keySerializer.toBytes(keyObject));
} else {
key = new ByteArray(ByteUtils.fromHexString(keyString));
}
} catch (SerializationException se) {
System.err.println("Error serializing key " + keyString);
System.err.println("If this is a JSON key, you need to include escaped quotation marks in the command line if it is a string");
se.printStackTrace();
return;
} catch (DecoderException de) {
System.err.println("Error decoding key " + keyString);
de.printStackTrace();
return;
} catch (IOException io) {
System.err.println("Error parsing avro string " + keyString);
io.printStackTrace();
return;
}
boolean printedKey = false;
// A Map<> could have been used instead of List<Entry<>> if
// Versioned supported correct hash codes. Read the comment in
// Versioned about the issue
List<Entry<List<Versioned<byte[]>>, List<Integer>>> nodeValues = new ArrayList<Entry<List<Versioned<byte[]>>, List<Integer>>>();
for (final Integer queryNodeId : queryingNodes) {
Iterator<QueryKeyResult> iterator;
iterator = adminClient.streamingOps.queryKeys(queryNodeId, storeName, Arrays.asList(key).iterator());
final StringWriter stringWriter = new StringWriter();
QueryKeyResult queryKeyResult = iterator.next();
if (!printedKey) {
// de-serialize and write key
byte[] keyBytes = queryKeyResult.getKey().get();
Object keyObject = keySerializer.toObject((null == keyCompressionStrategy) ? keyBytes : keyCompressionStrategy.inflate(keyBytes));
writeVoldKeyOrValueInternal(keyBytes, keySerializer, keyCompressionStrategy, "KEY", out);
printedKey = true;
}
// iterate through, de-serialize and write values
if (queryKeyResult.hasValues() && queryKeyResult.getValues().size() > 0) {
int elementId = -1;
for (int i = 0; i < nodeValues.size(); i++) {
if (Objects.equal(nodeValues.get(i).getKey(), queryKeyResult.getValues())) {
elementId = i;
break;
}
}
if (elementId == -1) {
ArrayList<Integer> nodes = new ArrayList<Integer>();
nodes.add(queryNodeId);
nodeValues.add(new AbstractMap.SimpleEntry<List<Versioned<byte[]>>, List<Integer>>(queryKeyResult.getValues(), nodes));
} else {
nodeValues.get(elementId).getValue().add(queryNodeId);
}
out.write(String.format("\nQueried node %d on store %s\n", queryNodeId, storeName));
int versionCount = 0;
if (queryKeyResult.getValues().size() > 1) {
out.write("VALUE " + versionCount + "\n");
}
for (Versioned<byte[]> versioned : queryKeyResult.getValues()) {
// write version
VectorClock version = (VectorClock) versioned.getVersion();
out.write("VECTOR_CLOCK_BYTE: " + ByteUtils.toHexString(version.toBytes()) + "\n");
out.write("VECTOR_CLOCK_TEXT: " + version.toString() + '[' + new Date(version.getTimestamp()).toString() + "]\n");
// write value
byte[] valueBytes = versioned.getValue();
writeVoldKeyOrValueInternal(valueBytes, valueSerializer, valueCompressionStrategy, "VALUE", out);
versionCount++;
}
} else // exception.
if (queryKeyResult.hasException()) {
boolean isInvalidMetadataException = queryKeyResult.getException() instanceof InvalidMetadataException;
// you are querying only a single node.
if (!isInvalidMetadataException || queryingNodes.size() == 1) {
out.write(String.format("\nNode %d on store %s returned exception\n", queryNodeId, storeName));
out.write(queryKeyResult.getException().toString());
out.write("\n====================================\n");
}
} else {
if (queryingNodes.size() == 1) {
out.write(String.format("\nNode %d on store %s returned NULL\n", queryNodeId, storeName));
out.write("\n====================================\n");
}
}
out.flush();
}
out.write("\n====================================\n");
for (Map.Entry<List<Versioned<byte[]>>, List<Integer>> nodeValue : nodeValues) {
out.write("Nodes with same Value " + Arrays.toString(nodeValue.getValue().toArray()));
out.write("\n====================================\n");
}
if (nodeValues.size() > 1) {
out.write("\n*** Multiple (" + nodeValues.size() + ") versions of key/value exist for the key ***\n");
}
out.flush();
}
}
Aggregations