use of org.infinispan.encoding.DataConversion in project infinispan by infinispan.
the class CacheNotifierImpl method addListenerInternal.
private <C> CompletionStage<Void> addListenerInternal(Object listener, DataConversion keyDataConversion, DataConversion valueDataConversion, CacheEventFilter<? super K, ? super V> filter, CacheEventConverter<? super K, ? super V, C> converter, ClassLoader classLoader, boolean useStorageFormat) {
final Listener l = testListenerClassValidity(listener.getClass());
final UUID generatedId = Util.threadLocalRandomUUID();
final CacheMode cacheMode = config.clustering().cacheMode();
FilterIndexingServiceProvider indexingProvider = null;
boolean foundMethods = false;
// We use identity for null as this means it was invoked by a non encoder cache
DataConversion keyConversion = keyDataConversion == null ? DataConversion.IDENTITY_KEY : keyDataConversion;
DataConversion valueConversion = valueDataConversion == null ? DataConversion.IDENTITY_VALUE : valueDataConversion;
Set<Class<? extends Annotation>> filterAnnotations = findListenerCallbacks(listener);
if (filter instanceof IndexedFilter) {
indexingProvider = findIndexingServiceProvider((IndexedFilter) filter);
if (indexingProvider != null) {
DelegatingCacheInvocationBuilder builder = new DelegatingCacheInvocationBuilder(indexingProvider);
adjustCacheInvocationBuilder(builder, filter, converter, filterAnnotations, l, useStorageFormat, generatedId, keyConversion, valueConversion, classLoader);
foundMethods = validateAndAddListenerInvocations(listener, builder);
builder.registerListenerInvocations();
}
}
if (indexingProvider == null) {
CacheInvocationBuilder builder = new CacheInvocationBuilder();
adjustCacheInvocationBuilder(builder, filter, converter, filterAnnotations, l, useStorageFormat, generatedId, keyConversion, valueConversion, classLoader);
foundMethods = validateAndAddListenerInvocations(listener, builder);
}
CompletionStage<Void> stage = CompletableFutures.completedNull();
if (foundMethods && l.clustered()) {
if (l.observation() == Listener.Observation.PRE) {
throw CONTAINER.clusterListenerRegisteredWithOnlyPreEvents(listener.getClass());
} else if (cacheMode.isInvalidation()) {
throw new UnsupportedOperationException("Cluster listeners cannot be used with Invalidation Caches!");
} else if (clusterListenerOnPrimaryOnly()) {
clusterListenerIDs.put(listener, generatedId);
Address ourAddress;
List<Address> members;
if (rpcManager != null) {
ourAddress = rpcManager.getAddress();
members = rpcManager.getMembers();
} else {
ourAddress = null;
members = null;
}
// If we are the only member don't even worry about sending listeners
if (members != null && members.size() > 1) {
stage = registerClusterListeners(members, generatedId, ourAddress, filter, converter, l, listener, keyDataConversion, valueDataConversion, useStorageFormat);
}
}
}
// If we have a segment listener handler, it means we have to do initial state
QueueingSegmentListener<K, V, ? extends Event<K, V>> handler = segmentHandler.remove(generatedId);
if (handler != null) {
if (log.isTraceEnabled()) {
log.tracef("Listener %s requests initial state for cache", generatedId);
}
Collection<IntermediateOperation<?, ?, ?, ?>> intermediateOperations = new ArrayList<>();
if (keyDataConversion != DataConversion.IDENTITY_KEY && valueDataConversion != DataConversion.IDENTITY_VALUE) {
intermediateOperations.add(new MapOperation<>(EncoderEntryMapper.newCacheEntryMapper(keyDataConversion, valueDataConversion, entryFactory)));
}
if (filter instanceof CacheEventFilterConverter && (filter == converter || converter == null)) {
intermediateOperations.add(new MapOperation<>(CacheFilters.converterToFunction(new CacheEventFilterConverterAsKeyValueFilterConverter<>((CacheEventFilterConverter<?, ?, ?>) filter))));
intermediateOperations.add(new FilterOperation<>(CacheFilters.notNullCacheEntryPredicate()));
} else {
if (filter != null) {
intermediateOperations.add(new FilterOperation<>(CacheFilters.predicate(new CacheEventFilterAsKeyValueFilter<>(filter))));
}
if (converter != null) {
intermediateOperations.add(new MapOperation<>(CacheFilters.function(new CacheEventConverterAsConverter<>(converter))));
}
}
stage = handlePublisher(stage, intermediateOperations, handler, generatedId, l, null, null);
}
return stage;
}
use of org.infinispan.encoding.DataConversion in project infinispan by infinispan.
the class MarshalledValuesFineGrainedTest method testStoreAsBinaryOnBoth.
public void testStoreAsBinaryOnBoth() {
ConfigurationBuilder c = new ConfigurationBuilder();
c.memory().storageType(StorageType.BINARY).build();
ecm = TestCacheManagerFactory.createCacheManager(TestDataSCI.INSTANCE, c);
ecm.getCache().put(key, value);
DataConversion keyDataConversion = ecm.getCache().getAdvancedCache().getKeyDataConversion();
DataConversion valueDataConversion = ecm.getCache().getAdvancedCache().getValueDataConversion();
DataContainer<?, ?> dc = ecm.getCache().getAdvancedCache().getDataContainer();
InternalCacheEntry entry = dc.iterator().next();
Object key = entry.getKey();
Object value = entry.getValue();
assertTrue(key instanceof WrappedBytes);
assertEquals(keyDataConversion.fromStorage(key), this.key);
assertTrue(value instanceof WrappedBytes);
assertEquals(valueDataConversion.fromStorage(value), this.value);
}
use of org.infinispan.encoding.DataConversion in project infinispan by infinispan.
the class CacheImpl method mergeInternalAsync.
CompletableFuture<V> mergeInternalAsync(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction, Metadata metadata, long flags, ContextBuilder contextBuilder) {
assertKeyNotNull(key);
assertValueNotNull(value);
assertFunctionNotNull(remappingFunction);
DataConversion keyDataConversion;
DataConversion valueDataConversion;
// TODO: Correctly propagate DataConversion objects https://issues.redhat.com/browse/ISPN-11584
if (remappingFunction instanceof BiFunctionMapper) {
BiFunctionMapper biFunctionMapper = (BiFunctionMapper) remappingFunction;
keyDataConversion = biFunctionMapper.getKeyDataConversion();
valueDataConversion = biFunctionMapper.getValueDataConversion();
} else {
keyDataConversion = encoderCache.running().getKeyDataConversion();
valueDataConversion = encoderCache.running().getValueDataConversion();
}
ReadWriteKeyCommand<K, V, V> command = commandsFactory.buildReadWriteKeyCommand(key, new MergeFunction<>(value, remappingFunction, metadata), keyPartitioner.getSegment(key), Params.fromFlagsBitSet(flags), keyDataConversion, valueDataConversion);
return invocationHelper.invokeAsync(contextBuilder, command, 1);
}
use of org.infinispan.encoding.DataConversion in project infinispan by infinispan.
the class CacheImpl method mergeInternal.
V mergeInternal(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction, Metadata metadata, long flags, ContextBuilder contextBuilder) {
assertKeyNotNull(key);
assertValueNotNull(value);
assertFunctionNotNull(remappingFunction);
DataConversion keyDataConversion;
DataConversion valueDataConversion;
// TODO: Correctly propagate DataConversion objects https://issues.redhat.com/browse/ISPN-11584
if (remappingFunction instanceof BiFunctionMapper) {
BiFunctionMapper biFunctionMapper = (BiFunctionMapper) remappingFunction;
keyDataConversion = biFunctionMapper.getKeyDataConversion();
valueDataConversion = biFunctionMapper.getValueDataConversion();
} else {
keyDataConversion = encoderCache.running().getKeyDataConversion();
valueDataConversion = encoderCache.running().getValueDataConversion();
}
ReadWriteKeyCommand<K, V, V> command = commandsFactory.buildReadWriteKeyCommand(key, new MergeFunction<>(value, remappingFunction, metadata), keyPartitioner.getSegment(key), Params.fromFlagsBitSet(flags), keyDataConversion, valueDataConversion);
return invocationHelper.invoke(contextBuilder, command, 1);
}
use of org.infinispan.encoding.DataConversion in project infinispan by infinispan.
the class DefaultIterationManager method start.
@Override
public IterationState start(AdvancedCache cache, BitSet segments, String filterConverterFactory, List<byte[]> filterConverterParams, MediaType requestValueType, int batch, boolean metadata) {
String iterationId = Util.threadLocalRandomUUID().toString();
EmbeddedCacheManager cacheManager = SecurityActions.getEmbeddedCacheManager(cache);
EncoderRegistry encoderRegistry = SecurityActions.getGlobalComponentRegistry(cacheManager).getComponent(EncoderRegistry.class);
DataConversion valueDataConversion = cache.getValueDataConversion();
Function<Object, Object> unmarshaller = p -> encoderRegistry.convert(p, requestValueType, APPLICATION_OBJECT);
MediaType storageMediaType = cache.getValueDataConversion().getStorageMediaType();
IterationSegmentsListener segmentListener = new IterationSegmentsListener();
CacheStream<CacheEntry<Object, Object>> stream;
Stream<CacheEntry<Object, Object>> filteredStream;
Function<Object, Object> resultTransformer = Function.identity();
AdvancedCache iterationCache = cache;
if (filterConverterFactory == null) {
stream = cache.cacheEntrySet().stream();
if (segments != null) {
stream.filterKeySegments(IntSets.from(segments.stream().iterator()));
}
filteredStream = stream.segmentCompletionListener(segmentListener);
} else {
KeyValueFilterConverterFactory factory = getFactory(filterConverterFactory);
KeyValuePair<KeyValueFilterConverter, Boolean> filter = buildFilter(factory, filterConverterParams.toArray(Util.EMPTY_BYTE_ARRAY_ARRAY), unmarshaller);
KeyValueFilterConverter customFilter = filter.getKey();
MediaType filterMediaType = customFilter.format();
if (filterMediaType != null && filterMediaType.equals(storageMediaType)) {
iterationCache = cache.withMediaType(filterMediaType, filterMediaType);
}
stream = iterationCache.cacheEntrySet().stream();
if (segments != null) {
stream.filterKeySegments(IntSets.from(segments.stream().iterator()));
}
IterationFilter iterationFilter = new IterationFilter(storageMediaType, requestValueType, Optional.of(filter.getKey()));
filteredStream = filterAndConvert(stream.segmentCompletionListener(segmentListener), iterationFilter);
if (filterMediaType != null && !storageMediaType.equals(requestValueType)) {
resultTransformer = valueDataConversion::fromStorage;
}
}
Iterator<CacheEntry<Object, Object>> iterator = filteredStream.iterator();
DefaultIterationState iterationState = new DefaultIterationState(iterationId, segmentListener, iterator, stream, batch, metadata, resultTransformer, new IterationReaper(this, iterationId));
iterationStateMap.put(iterationId, iterationState);
if (log.isTraceEnabled())
log.tracef("Started iteration %s", iterationId);
return iterationState;
}
Aggregations