use of org.apache.commons.lang3.tuple.Pair in project hive by apache.
the class MapJoinOperator method completeInitializationOp.
@SuppressWarnings("unchecked")
@Override
protected void completeInitializationOp(Object[] os) throws HiveException {
if (os.length != 0) {
Pair<MapJoinTableContainer[], MapJoinTableContainerSerDe[]> pair = (Pair<MapJoinTableContainer[], MapJoinTableContainerSerDe[]>) os[0];
boolean spilled = false;
for (MapJoinTableContainer container : pair.getLeft()) {
if (container != null) {
spilled = spilled || container.hasSpill();
}
}
if (spilled) {
// we can't use the cached table because it has spilled.
loadHashTable(getExecContext(), MapredContext.get());
} else {
if (LOG.isDebugEnabled()) {
String s = "Using tables from cache: [";
for (MapJoinTableContainer c : pair.getLeft()) {
s += ((c == null) ? "null" : c.getClass().getSimpleName()) + ", ";
}
LOG.debug(s + "]");
}
// let's use the table from the cache.
mapJoinTables = pair.getLeft();
mapJoinTableSerdes = pair.getRight();
}
hashTblInitedOnce = true;
}
if (this.getExecContext() != null) {
// reset exec context so that initialization of the map operator happens
// properly
this.getExecContext().setLastInputPath(null);
this.getExecContext().setCurrentInputPath(null);
}
}
use of org.apache.commons.lang3.tuple.Pair in project hive by apache.
the class Copy method runFromLocal.
/**
* Run COPY FROM LOCAL statement
*/
public Integer runFromLocal(HplsqlParser.Copy_from_local_stmtContext ctx) {
trace(ctx, "COPY FROM LOCAL");
initFileOptions(ctx.copy_file_option());
HashMap<String, Pair<String, Long>> srcFiles = new HashMap<String, Pair<String, Long>>();
String src = evalPop(ctx.copy_source(0)).toString();
String dest = evalPop(ctx.copy_target()).toString();
int srcItems = ctx.copy_source().size();
for (int i = 0; i < srcItems; i++) {
createLocalFileList(srcFiles, evalPop(ctx.copy_source(i)).toString(), null);
}
if (info) {
info(ctx, "Files to copy: " + srcFiles.size() + " (" + Utils.formatSizeInBytes(srcSizeInBytes) + ")");
}
if (srcFiles.size() == 0) {
exec.setHostCode(2);
return 2;
}
timer.start();
File file = new File();
FileSystem fs = null;
int succeed = 0;
int failed = 0;
long copiedSize = 0;
try {
fs = file.createFs();
boolean multi = false;
if (srcFiles.size() > 1) {
multi = true;
}
for (Map.Entry<String, Pair<String, Long>> i : srcFiles.entrySet()) {
try {
Path s = new Path(i.getKey());
Path d = null;
if (multi) {
String relativePath = i.getValue().getLeft();
if (relativePath == null) {
d = new Path(dest, s.getName());
} else {
d = new Path(dest, relativePath + Path.SEPARATOR + s.getName());
}
} else {
// Path to file is specified (can be relative), so treat target as a file name (hadoop fs -put behavior)
if (srcItems == 1 && i.getKey().endsWith(src)) {
d = new Path(dest);
} else // Source directory is specified, so treat the target as a directory
{
d = new Path(dest + Path.SEPARATOR + s.getName());
}
}
fs.copyFromLocalFile(delete, overwrite, s, d);
succeed++;
long size = i.getValue().getRight();
copiedSize += size;
if (info) {
info(ctx, "Copied: " + file.resolvePath(d) + " (" + Utils.formatSizeInBytes(size) + ")");
}
} catch (IOException e) {
failed++;
if (!ignore) {
throw e;
}
}
}
} catch (IOException e) {
exec.signal(e);
exec.setHostCode(1);
return 1;
} finally {
long elapsed = timer.stop();
if (info) {
info(ctx, "COPY completed: " + succeed + " succeed, " + failed + " failed, " + timer.format() + ", " + Utils.formatSizeInBytes(copiedSize) + ", " + Utils.formatBytesPerSec(copiedSize, elapsed));
}
if (failed == 0) {
exec.setHostCode(0);
} else {
exec.setHostCode(1);
}
file.close();
}
return 0;
}
use of org.apache.commons.lang3.tuple.Pair in project deeplearning4j by deeplearning4j.
the class FirstIterationFunctionAdapter method call.
@Override
public Iterable<Map.Entry<VocabWord, INDArray>> call(Iterator<Tuple2<List<VocabWord>, Long>> pairIter) {
while (pairIter.hasNext()) {
List<Pair<List<VocabWord>, Long>> batch = new ArrayList<>();
while (pairIter.hasNext() && batch.size() < batchSize) {
Tuple2<List<VocabWord>, Long> pair = pairIter.next();
List<VocabWord> vocabWordsList = pair._1();
Long sentenceCumSumCount = pair._2();
batch.add(Pair.of(vocabWordsList, sentenceCumSumCount));
}
for (int i = 0; i < iterations; i++) {
//System.out.println("Training sentence: " + vocabWordsList);
for (Pair<List<VocabWord>, Long> pair : batch) {
List<VocabWord> vocabWordsList = pair.getKey();
Long sentenceCumSumCount = pair.getValue();
double currentSentenceAlpha = Math.max(minAlpha, alpha - (alpha - minAlpha) * (sentenceCumSumCount / (double) totalWordCount));
trainSentence(vocabWordsList, currentSentenceAlpha);
}
}
}
return indexSyn0VecMap.entrySet();
}
use of org.apache.commons.lang3.tuple.Pair in project deeplearning4j by deeplearning4j.
the class SecondIterationFunctionAdapter method call.
@Override
public Iterable<Entry<VocabWord, INDArray>> call(Iterator<Tuple2<List<VocabWord>, Long>> pairIter) {
this.vocabHolder = VocabHolder.getInstance();
this.vocabHolder.setSeed(seed, vectorLength);
if (negative > 0) {
negativeHolder = NegativeHolder.getInstance();
negativeHolder.initHolder(vocab, expTable, this.vectorLength);
}
while (pairIter.hasNext()) {
List<Pair<List<VocabWord>, Long>> batch = new ArrayList<>();
while (pairIter.hasNext() && batch.size() < batchSize) {
Tuple2<List<VocabWord>, Long> pair = pairIter.next();
List<VocabWord> vocabWordsList = pair._1();
Long sentenceCumSumCount = pair._2();
batch.add(Pair.of(vocabWordsList, sentenceCumSumCount));
}
for (int i = 0; i < iterations; i++) {
//System.out.println("Training sentence: " + vocabWordsList);
for (Pair<List<VocabWord>, Long> pair : batch) {
List<VocabWord> vocabWordsList = pair.getKey();
Long sentenceCumSumCount = pair.getValue();
double currentSentenceAlpha = Math.max(minAlpha, alpha - (alpha - minAlpha) * (sentenceCumSumCount / (double) totalWordCount));
trainSentence(vocabWordsList, currentSentenceAlpha);
}
}
}
return vocabHolder.getSplit(vocab);
}
use of org.apache.commons.lang3.tuple.Pair in project PneumaticCraft by MineMaarten.
the class PneumaticRecipeRegistry method getSingleStack.
public static ItemStack getSingleStack(Object o) {
if (o instanceof ItemStack) {
return (ItemStack) o;
} else {
Pair<String, Integer> pair = (Pair) o;
ItemStack s = OreDictionaryHelper.getOreDictEntries(pair.getKey()).get(0);
s = s.copy();
s.stackSize = pair.getValue();
return s;
}
}
Aggregations