use of org.apache.accumulo.core.data.thrift.IterInfo in project accumulo by apache.
the class IteratorUtil method loadIterators.
public static <K extends WritableComparable<?>, V extends Writable> SortedKeyValueIterator<K, V> loadIterators(IteratorScope scope, SortedKeyValueIterator<K, V> source, KeyExtent extent, AccumuloConfiguration conf, List<IteratorSetting> iterators, IteratorEnvironment env) throws IOException {
List<IterInfo> ssiList = new ArrayList<>();
Map<String, Map<String, String>> ssio = new HashMap<>();
for (IteratorSetting is : iterators) {
ssiList.add(new IterInfo(is.getPriority(), is.getIteratorClass(), is.getName()));
ssio.put(is.getName(), is.getOptions());
}
return loadIterators(scope, source, extent, conf, ssiList, ssio, env, true);
}
use of org.apache.accumulo.core.data.thrift.IterInfo in project accumulo by apache.
the class IteratorUtil method parseIterConf.
public static void parseIterConf(IteratorScope scope, List<IterInfo> iters, Map<String, Map<String, String>> allOptions, AccumuloConfiguration conf) {
final Property scopeProperty = getProperty(scope);
final String scopePropertyKey = scopeProperty.getKey();
for (Entry<String, String> entry : conf.getAllPropertiesWithPrefix(scopeProperty).entrySet()) {
String suffix = entry.getKey().substring(scopePropertyKey.length());
String[] suffixSplit = suffix.split("\\.", 3);
if (suffixSplit.length == 1) {
String[] sa = entry.getValue().split(",");
int prio = Integer.parseInt(sa[0]);
String className = sa[1];
iters.add(new IterInfo(prio, className, suffixSplit[0]));
} else if (suffixSplit.length == 3 && suffixSplit[1].equals("opt")) {
String iterName = suffixSplit[0];
String optName = suffixSplit[2];
Map<String, String> options = allOptions.get(iterName);
if (options == null) {
options = new HashMap<>();
allOptions.put(iterName, options);
}
options.put(optName, entry.getValue());
} else {
throw new IllegalArgumentException("Invalid iterator format: " + entry.getKey());
}
}
Collections.sort(iters, new IterInfoComparator());
}
use of org.apache.accumulo.core.data.thrift.IterInfo in project accumulo by apache.
the class IteratorUtil method loadIterators.
public static <K extends WritableComparable<?>, V extends Writable> SortedKeyValueIterator<K, V> loadIterators(SortedKeyValueIterator<K, V> source, Collection<IterInfo> iters, Map<String, Map<String, String>> iterOpts, IteratorEnvironment env, boolean useAccumuloClassLoader, String context, Map<String, Class<? extends SortedKeyValueIterator<K, V>>> classCache) throws IOException {
// wrap the source in a SynchronizedIterator in case any of the additional configured iterators want to use threading
SortedKeyValueIterator<K, V> prev = source;
try {
for (IterInfo iterInfo : iters) {
Class<? extends SortedKeyValueIterator<K, V>> clazz = null;
log.trace("Attempting to load iterator class {}", iterInfo.className);
if (classCache != null) {
clazz = classCache.get(iterInfo.className);
if (clazz == null) {
clazz = loadClass(useAccumuloClassLoader, context, iterInfo);
classCache.put(iterInfo.className, clazz);
}
} else {
clazz = loadClass(useAccumuloClassLoader, context, iterInfo);
}
SortedKeyValueIterator<K, V> skvi = clazz.newInstance();
Map<String, String> options = iterOpts.get(iterInfo.iterName);
if (options == null)
options = Collections.emptyMap();
skvi.init(prev, options, env);
prev = skvi;
}
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
log.error(e.toString());
throw new RuntimeException(e);
}
return prev;
}
use of org.apache.accumulo.core.data.thrift.IterInfo in project accumulo by apache.
the class CollectTabletStats method readFilesUsingIterStack.
private static int readFilesUsingIterStack(VolumeManager fs, ServerConfigurationFactory aconf, List<FileRef> files, Authorizations auths, KeyExtent ke, String[] columns, boolean useTableIterators) throws Exception {
SortedKeyValueIterator<Key, Value> reader;
List<SortedKeyValueIterator<Key, Value>> readers = new ArrayList<>(files.size());
for (FileRef file : files) {
FileSystem ns = fs.getVolumeByPath(file.path()).getFileSystem();
readers.add(FileOperations.getInstance().newReaderBuilder().forFile(file.path().toString(), ns, ns.getConf()).withTableConfiguration(aconf.getSystemConfiguration()).build());
}
List<IterInfo> emptyIterinfo = Collections.emptyList();
Map<String, Map<String, String>> emptySsio = Collections.emptyMap();
TableConfiguration tconf = aconf.getTableConfiguration(ke.getTableId());
reader = createScanIterator(ke, readers, auths, new byte[] {}, new HashSet<>(), emptyIterinfo, emptySsio, useTableIterators, tconf);
HashSet<ByteSequence> columnSet = createColumnBSS(columns);
reader.seek(new Range(ke.getPrevEndRow(), false, ke.getEndRow(), true), columnSet, columnSet.size() == 0 ? false : true);
int count = 0;
while (reader.hasTop()) {
count++;
reader.next();
}
return count;
}
use of org.apache.accumulo.core.data.thrift.IterInfo in project accumulo by apache.
the class CompactionInfo method toThrift.
public ActiveCompaction toThrift() {
CompactionType type;
if (compactor.hasIMM())
if (compactor.getFilesToCompact().size() > 0)
type = CompactionType.MERGE;
else
type = CompactionType.MINOR;
else if (!compactor.willPropogateDeletes())
type = CompactionType.FULL;
else
type = CompactionType.MAJOR;
CompactionReason reason;
if (compactor.hasIMM()) {
switch(compactor.getMinCReason()) {
case USER:
reason = CompactionReason.USER;
break;
case CLOSE:
reason = CompactionReason.CLOSE;
break;
case SYSTEM:
default:
reason = CompactionReason.SYSTEM;
break;
}
} else {
switch(compactor.getMajorCompactionReason()) {
case USER:
reason = CompactionReason.USER;
break;
case CHOP:
reason = CompactionReason.CHOP;
break;
case IDLE:
reason = CompactionReason.IDLE;
break;
case NORMAL:
default:
reason = CompactionReason.SYSTEM;
break;
}
}
List<IterInfo> iiList = new ArrayList<>();
Map<String, Map<String, String>> iterOptions = new HashMap<>();
for (IteratorSetting iterSetting : compactor.getIterators()) {
iiList.add(new IterInfo(iterSetting.getPriority(), iterSetting.getIteratorClass(), iterSetting.getName()));
iterOptions.put(iterSetting.getName(), iterSetting.getOptions());
}
List<String> filesToCompact = new ArrayList<>();
for (FileRef ref : compactor.getFilesToCompact()) filesToCompact.add(ref.toString());
return new ActiveCompaction(compactor.extent.toThrift(), System.currentTimeMillis() - compactor.getStartTime(), filesToCompact, compactor.getOutputFile(), type, reason, localityGroup, entriesRead, entriesWritten, iiList, iterOptions);
}
Aggregations