use of org.apache.kafka.timeline.TimelineHashMap in project kafka by apache.
the class ClientQuotaControlManager method replay.
/**
* Apply a quota record to the in-memory state.
*
* @param record A ClientQuotaRecord instance.
*/
public void replay(ClientQuotaRecord record) {
Map<String, String> entityMap = new HashMap<>(2);
record.entity().forEach(entityData -> entityMap.put(entityData.entityType(), entityData.entityName()));
ClientQuotaEntity entity = new ClientQuotaEntity(entityMap);
TimelineHashMap<String, Double> quotas = clientQuotaData.get(entity);
if (quotas == null) {
quotas = new TimelineHashMap<>(snapshotRegistry, 0);
clientQuotaData.put(entity, quotas);
}
if (record.remove()) {
quotas.remove(record.key());
if (quotas.size() == 0) {
clientQuotaData.remove(entity);
}
} else {
quotas.put(record.key(), record.value());
}
}
use of org.apache.kafka.timeline.TimelineHashMap in project kafka by apache.
the class ConfigurationControlManager method validateAlterConfig.
private ApiError validateAlterConfig(ConfigResource configResource, List<ApiMessageAndVersion> newRecords, Consumer<ConfigResource> existenceChecker) {
Map<String, String> newConfigs = new HashMap<>();
TimelineHashMap<String, String> existingConfigs = configData.get(configResource);
if (existingConfigs != null)
newConfigs.putAll(existingConfigs);
for (ApiMessageAndVersion newRecord : newRecords) {
ConfigRecord configRecord = (ConfigRecord) newRecord.message();
if (configRecord.value() == null) {
newConfigs.remove(configRecord.name());
} else {
newConfigs.put(configRecord.name(), configRecord.value());
}
}
try {
validator.validate(configResource, newConfigs);
existenceChecker.accept(configResource);
if (alterConfigPolicy.isPresent()) {
alterConfigPolicy.get().validate(new RequestMetadata(configResource, newConfigs));
}
} catch (ConfigException e) {
return new ApiError(INVALID_CONFIG, e.getMessage());
} catch (Throwable e) {
return ApiError.fromThrowable(e);
}
return ApiError.NONE;
}
use of org.apache.kafka.timeline.TimelineHashMap in project kafka by apache.
the class ConfigurationControlManager method describeConfigs.
public Map<ConfigResource, ResultOrError<Map<String, String>>> describeConfigs(long lastCommittedOffset, Map<ConfigResource, Collection<String>> resources) {
Map<ConfigResource, ResultOrError<Map<String, String>>> results = new HashMap<>();
for (Entry<ConfigResource, Collection<String>> resourceEntry : resources.entrySet()) {
ConfigResource resource = resourceEntry.getKey();
try {
validator.validate(resource, Collections.emptyMap());
} catch (Throwable e) {
results.put(resource, new ResultOrError<>(ApiError.fromThrowable(e)));
continue;
}
Map<String, String> foundConfigs = new HashMap<>();
TimelineHashMap<String, String> configs = configData.get(resource, lastCommittedOffset);
if (configs != null) {
Collection<String> targetConfigs = resourceEntry.getValue();
if (targetConfigs.isEmpty()) {
Iterator<Entry<String, String>> iter = configs.entrySet(lastCommittedOffset).iterator();
while (iter.hasNext()) {
Entry<String, String> entry = iter.next();
foundConfigs.put(entry.getKey(), entry.getValue());
}
} else {
for (String key : targetConfigs) {
String value = configs.get(key, lastCommittedOffset);
if (value != null) {
foundConfigs.put(key, value);
}
}
}
}
results.put(resource, new ResultOrError<>(foundConfigs));
}
return results;
}
use of org.apache.kafka.timeline.TimelineHashMap in project kafka by apache.
the class TimelineHashMapBenchmark method testAddEntriesInTimelineMap.
@Benchmark
public Map<Integer, String> testAddEntriesInTimelineMap() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
TimelineHashMap<Integer, String> map = new TimelineHashMap<>(snapshotRegistry, NUM_ENTRIES);
for (int i = 0; i < NUM_ENTRIES; i++) {
int key = (int) (0xffffffff & ((i * 2862933555777941757L) + 3037000493L));
map.put(key, String.valueOf(key));
}
return map;
}
use of org.apache.kafka.timeline.TimelineHashMap in project kafka by apache.
the class TimelineHashMapBenchmark method testAddEntriesWithSnapshots.
@Benchmark
public Map<Integer, String> testAddEntriesWithSnapshots() {
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
TimelineHashMap<Integer, String> map = new TimelineHashMap<>(snapshotRegistry, NUM_ENTRIES);
long epoch = 0;
int j = 0;
for (int i = 0; i < NUM_ENTRIES; i++) {
int key = (int) (0xffffffff & ((i * 2862933555777941757L) + 3037000493L));
if (j > 10 && key % 3 == 0) {
snapshotRegistry.deleteSnapshotsUpTo(epoch - 1000);
snapshotRegistry.getOrCreateSnapshot(epoch);
j = 0;
} else {
j++;
}
map.put(key, String.valueOf(key));
epoch++;
}
return map;
}
Aggregations