use of org.apache.hadoop.io.IntWritable in project hadoop by apache.
the class TestCombineSequenceFileInputFormat method testFormat.
@Test(timeout = 10000)
public void testFormat() throws Exception {
JobConf job = new JobConf(conf);
Reporter reporter = Reporter.NULL;
Random random = new Random();
long seed = random.nextLong();
LOG.info("seed = " + seed);
random.setSeed(seed);
localFs.delete(workDir, true);
FileInputFormat.setInputPaths(job, workDir);
final int length = 10000;
final int numFiles = 10;
// create a file with various lengths
createFiles(length, numFiles, random);
// create a combine split for the files
InputFormat<IntWritable, BytesWritable> format = new CombineSequenceFileInputFormat<IntWritable, BytesWritable>();
IntWritable key = new IntWritable();
BytesWritable value = new BytesWritable();
for (int i = 0; i < 3; i++) {
int numSplits = random.nextInt(length / (SequenceFile.SYNC_INTERVAL / 20)) + 1;
LOG.info("splitting: requesting = " + numSplits);
InputSplit[] splits = format.getSplits(job, numSplits);
LOG.info("splitting: got = " + splits.length);
// we should have a single split as the length is comfortably smaller than
// the block size
assertEquals("We got more than one splits!", 1, splits.length);
InputSplit split = splits[0];
assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass());
// check each split
BitSet bits = new BitSet(length);
RecordReader<IntWritable, BytesWritable> reader = format.getRecordReader(split, job, reporter);
try {
while (reader.next(key, value)) {
assertFalse("Key in multiple partitions.", bits.get(key.get()));
bits.set(key.get());
}
} finally {
reader.close();
}
assertEquals("Some keys in no partition.", length, bits.cardinality());
}
}
use of org.apache.hadoop.io.IntWritable in project storm by apache.
the class TestHdfsSpout method createSeqFile.
private static void createSeqFile(FileSystem fs, Path file, int rowCount) throws IOException {
Configuration conf = new Configuration();
try {
if (fs.exists(file)) {
fs.delete(file, false);
}
SequenceFile.Writer w = SequenceFile.createWriter(fs, conf, file, IntWritable.class, Text.class);
for (int i = 0; i < rowCount; i++) {
w.append(new IntWritable(i), new Text("line " + i));
}
w.close();
System.out.println("done");
} catch (IOException e) {
e.printStackTrace();
}
}
use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class MaskTransformer method getCharArg.
int getCharArg(ObjectInspector[] arguments, int index, int defaultValue) {
int ret = defaultValue;
ObjectInspector arg = (arguments != null && arguments.length > index) ? arguments[index] : null;
if (arg != null) {
if (arg instanceof WritableConstantIntObjectInspector) {
IntWritable value = ((WritableConstantIntObjectInspector) arg).getWritableConstantValue();
if (value != null) {
ret = value.get();
}
} else if (arg instanceof WritableConstantLongObjectInspector) {
LongWritable value = ((WritableConstantLongObjectInspector) arg).getWritableConstantValue();
if (value != null) {
ret = (int) value.get();
}
} else if (arg instanceof WritableConstantShortObjectInspector) {
ShortWritable value = ((WritableConstantShortObjectInspector) arg).getWritableConstantValue();
if (value != null) {
ret = value.get();
}
} else if (arg instanceof ConstantObjectInspector) {
Object value = ((ConstantObjectInspector) arg).getWritableConstantValue();
if (value != null) {
String strValue = value.toString();
if (strValue != null && strValue.length() > 0) {
ret = strValue.charAt(0);
}
}
}
}
return ret;
}
use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class TestVectorDateExpressions method compareToUDFWeekOfYearDate.
private void compareToUDFWeekOfYearDate(long t, int y) {
UDFWeekOfYear udf = new UDFWeekOfYear();
TimestampWritable tsw = toTimestampWritable(t);
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
use of org.apache.hadoop.io.IntWritable in project hive by apache.
the class TestVectorExpressionWriters method testStructLong.
private void testStructLong(TypeInfo type) throws HiveException {
LongColumnVector icv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false, vectorSize, new Random(10));
icv.isNull[3] = true;
LongColumnVector bcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false, vectorSize, new Random(10));
bcv.isNull[2] = true;
ArrayList<Object>[] values = (ArrayList<Object>[]) new ArrayList[this.vectorSize];
StructObjectInspector soi = genStructOI();
VectorExpressionWriter[] vew = VectorExpressionWriterFactory.getExpressionWriters(soi);
for (int i = 0; i < vectorSize; i++) {
values[i] = new ArrayList<Object>(2);
values[i].add(null);
values[i].add(null);
vew[0].setValue(values[i], icv, i);
vew[1].setValue(values[i], bcv, i);
Object theInt = values[i].get(0);
if (theInt == null) {
Assert.assertTrue(icv.isNull[i]);
} else {
IntWritable w = (IntWritable) theInt;
Assert.assertEquals((int) icv.vector[i], w.get());
}
Object theBool = values[i].get(1);
if (theBool == null) {
Assert.assertTrue(bcv.isNull[i]);
} else {
BooleanWritable w = (BooleanWritable) theBool;
Assert.assertEquals(bcv.vector[i] == 0 ? false : true, w.get());
}
}
}
Aggregations