use of org.apache.hadoop.mapred.InputSplit in project hive by apache.
the class TestHiveAccumuloTypes method testBinaryTypes.
@Test
public void testBinaryTypes() throws Exception {
final String tableName = test.getMethodName(), user = "root", pass = "";
MockInstance mockInstance = new MockInstance(test.getMethodName());
Connector conn = mockInstance.getConnector(user, new PasswordToken(pass));
HiveAccumuloTableInputFormat inputformat = new HiveAccumuloTableInputFormat();
JobConf conf = new JobConf();
conf.set(AccumuloSerDeParameters.TABLE_NAME, tableName);
conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
conf.set(AccumuloSerDeParameters.USER_NAME, user);
conf.set(AccumuloSerDeParameters.USER_PASS, pass);
// not used for mock, but
conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181");
// required by input format.
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, AccumuloHiveConstants.ROWID + ",cf:string,cf:boolean,cf:tinyint,cf:smallint,cf:int,cf:bigint" + ",cf:float,cf:double,cf:decimal,cf:date,cf:timestamp,cf:char,cf:varchar");
conf.set(serdeConstants.LIST_COLUMNS, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, "binary");
conn.tableOperations().create(tableName);
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = conn.createBatchWriter(tableName, writerConf);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
String cf = "cf";
byte[] cfBytes = cf.getBytes();
Mutation m = new Mutation("row1");
// string
String stringValue = "string";
JavaStringObjectInspector stringOI = (JavaStringObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
LazyUtils.writePrimitiveUTF8(baos, stringOI.create(stringValue), stringOI, false, (byte) 0, null);
m.put(cfBytes, "string".getBytes(), baos.toByteArray());
// boolean
boolean booleanValue = true;
baos.reset();
JavaBooleanObjectInspector booleanOI = (JavaBooleanObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyUtils.writePrimitive(baos, booleanOI.create(booleanValue), booleanOI);
m.put(cfBytes, "boolean".getBytes(), baos.toByteArray());
// tinyint
byte tinyintValue = -127;
baos.reset();
JavaByteObjectInspector byteOI = (JavaByteObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, tinyintValue, byteOI);
m.put(cfBytes, "tinyint".getBytes(), baos.toByteArray());
// smallint
short smallintValue = Short.MAX_VALUE;
baos.reset();
JavaShortObjectInspector shortOI = (JavaShortObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, smallintValue, shortOI);
m.put(cfBytes, "smallint".getBytes(), baos.toByteArray());
// int
int intValue = Integer.MAX_VALUE;
baos.reset();
JavaIntObjectInspector intOI = (JavaIntObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyUtils.writePrimitive(baos, intValue, intOI);
m.put(cfBytes, "int".getBytes(), baos.toByteArray());
// bigint
long bigintValue = Long.MAX_VALUE;
baos.reset();
JavaLongObjectInspector longOI = (JavaLongObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyUtils.writePrimitive(baos, bigintValue, longOI);
m.put(cfBytes, "bigint".getBytes(), baos.toByteArray());
// float
float floatValue = Float.MAX_VALUE;
baos.reset();
JavaFloatObjectInspector floatOI = (JavaFloatObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyUtils.writePrimitive(baos, floatValue, floatOI);
m.put(cfBytes, "float".getBytes(), baos.toByteArray());
// double
double doubleValue = Double.MAX_VALUE;
baos.reset();
JavaDoubleObjectInspector doubleOI = (JavaDoubleObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyUtils.writePrimitive(baos, doubleValue, doubleOI);
m.put(cfBytes, "double".getBytes(), baos.toByteArray());
// decimal
baos.reset();
HiveDecimal decimalValue = HiveDecimal.create(65536l);
HiveDecimalWritable decimalWritable = new HiveDecimalWritable(decimalValue);
decimalWritable.write(out);
m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());
// date
baos.reset();
Date now = new Date(System.currentTimeMillis());
DateWritable dateWritable = new DateWritable(now);
Date dateValue = dateWritable.get();
dateWritable.write(out);
m.put(cfBytes, "date".getBytes(), baos.toByteArray());
// tiemestamp
baos.reset();
Timestamp timestampValue = new Timestamp(now.getTime());
ByteStream.Output output = new ByteStream.Output();
TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime()));
timestampWritable.write(new DataOutputStream(output));
output.close();
m.put(cfBytes, "timestamp".getBytes(), output.toByteArray());
// char
baos.reset();
HiveChar charValue = new HiveChar("char", 4);
JavaHiveCharObjectInspector charOI = (JavaHiveCharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new CharTypeInfo(4));
LazyUtils.writePrimitiveUTF8(baos, charOI.create(charValue), charOI, false, (byte) 0, null);
m.put(cfBytes, "char".getBytes(), baos.toByteArray());
baos.reset();
HiveVarchar varcharValue = new HiveVarchar("varchar", 7);
JavaHiveVarcharObjectInspector varcharOI = (JavaHiveVarcharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new VarcharTypeInfo(7));
LazyUtils.writePrimitiveUTF8(baos, varcharOI.create(varcharValue), varcharOI, false, (byte) 0, null);
m.put(cfBytes, "varchar".getBytes(), baos.toByteArray());
writer.addMutation(m);
writer.close();
for (Entry<Key, Value> e : conn.createScanner(tableName, new Authorizations())) {
System.out.println(e);
}
// Create the RecordReader
FileInputFormat.addInputPath(conf, new Path("unused"));
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text key = reader.createKey();
AccumuloHiveRow value = reader.createValue();
reader.next(key, value);
Assert.assertEquals(13, value.getTuples().size());
ByteArrayRef byteRef = new ByteArrayRef();
// string
Text cfText = new Text(cf), cqHolder = new Text();
cqHolder.set("string");
byte[] valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyStringObjectInspector lazyStringOI = LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false, (byte) 0);
LazyString lazyString = (LazyString) LazyFactory.createLazyObject(lazyStringOI);
lazyString.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(stringValue, lazyString.getWritableObject().toString());
// boolean
cqHolder.set("boolean");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyBooleanObjectInspector lazyBooleanOI = (LazyBooleanObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
LazyBoolean lazyBoolean = (LazyBoolean) LazyFactory.createLazyPrimitiveBinaryClass(lazyBooleanOI);
lazyBoolean.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(booleanValue, lazyBoolean.getWritableObject().get());
// tinyint
cqHolder.set("tinyint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyByteObjectInspector lazyByteOI = (LazyByteObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
LazyByte lazyByte = (LazyByte) LazyFactory.createLazyPrimitiveBinaryClass(lazyByteOI);
lazyByte.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(tinyintValue, lazyByte.getWritableObject().get());
// smallint
cqHolder.set("smallint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyShortObjectInspector lazyShortOI = (LazyShortObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
LazyShort lazyShort = (LazyShort) LazyFactory.createLazyPrimitiveBinaryClass(lazyShortOI);
lazyShort.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(smallintValue, lazyShort.getWritableObject().get());
// int
cqHolder.set("int");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyIntObjectInspector lazyIntOI = (LazyIntObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyInteger lazyInt = (LazyInteger) LazyFactory.createLazyPrimitiveBinaryClass(lazyIntOI);
lazyInt.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(intValue, lazyInt.getWritableObject().get());
// bigint
cqHolder.set("bigint");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyLongObjectInspector lazyLongOI = (LazyLongObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
LazyLong lazyLong = (LazyLong) LazyFactory.createLazyPrimitiveBinaryClass(lazyLongOI);
lazyLong.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(bigintValue, lazyLong.getWritableObject().get());
// float
cqHolder.set("float");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyFloatObjectInspector lazyFloatOI = (LazyFloatObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
LazyFloat lazyFloat = (LazyFloat) LazyFactory.createLazyPrimitiveBinaryClass(lazyFloatOI);
lazyFloat.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(floatValue, lazyFloat.getWritableObject().get(), 0);
// double
cqHolder.set("double");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyDoubleObjectInspector lazyDoubleOI = (LazyDoubleObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
LazyDouble lazyDouble = (LazyDouble) LazyFactory.createLazyPrimitiveBinaryClass(lazyDoubleOI);
lazyDouble.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(doubleValue, lazyDouble.getWritableObject().get(), 0);
// decimal
cqHolder.set("decimal");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
DataInputStream in = new DataInputStream(bais);
decimalWritable.readFields(in);
Assert.assertEquals(decimalValue, decimalWritable.getHiveDecimal());
// date
cqHolder.set("date");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais = new ByteArrayInputStream(valueBytes);
in = new DataInputStream(bais);
dateWritable.readFields(in);
Assert.assertEquals(dateValue, dateWritable.get());
// timestamp
cqHolder.set("timestamp");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
bais = new ByteArrayInputStream(valueBytes);
in = new DataInputStream(bais);
timestampWritable.readFields(in);
Assert.assertEquals(timestampValue, timestampWritable.getTimestamp());
// char
cqHolder.set("char");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveCharObjectInspector lazyCharOI = (LazyHiveCharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new CharTypeInfo(4));
LazyHiveChar lazyChar = (LazyHiveChar) LazyFactory.createLazyObject(lazyCharOI);
lazyChar.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(charValue, lazyChar.getWritableObject().getHiveChar());
// varchar
cqHolder.set("varchar");
valueBytes = value.getValue(cfText, cqHolder);
Assert.assertNotNull(valueBytes);
byteRef.setData(valueBytes);
LazyHiveVarcharObjectInspector lazyVarcharOI = (LazyHiveVarcharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new VarcharTypeInfo(7));
LazyHiveVarchar lazyVarchar = (LazyHiveVarchar) LazyFactory.createLazyObject(lazyVarcharOI);
lazyVarchar.init(byteRef, 0, valueBytes.length);
Assert.assertEquals(varcharValue.toString(), lazyVarchar.getWritableObject().getHiveVarchar().toString());
}
use of org.apache.hadoop.mapred.InputSplit in project hive by apache.
the class TestHiveAccumuloTableInputFormat method testGetProtectedField.
@Test
public void testGetProtectedField() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
BatchWriterConfig writerConf = new BatchWriterConfig();
BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
Authorizations origAuths = con.securityOperations().getUserAuthorizations(USER);
con.securityOperations().changeUserAuthorizations(USER, new Authorizations(origAuths.toString() + ",foo"));
Mutation m = new Mutation("r4");
m.put(COLUMN_FAMILY, NAME, new ColumnVisibility("foo"), new Value("frank".getBytes()));
m.put(COLUMN_FAMILY, SID, new ColumnVisibility("foo"), new Value(parseIntBytes("4")));
m.put(COLUMN_FAMILY, DEGREES, new ColumnVisibility("foo"), new Value(parseDoubleBytes("60.6")));
m.put(COLUMN_FAMILY, MILLIS, new ColumnVisibility("foo"), new Value(parseLongBytes("777")));
writer.addMutation(m);
writer.close();
conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo");
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text rowId = new Text("r1");
AccumuloHiveRow row = new AccumuloHiveRow();
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
rowId = new Text("r2");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
rowId = new Text("r3");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
rowId = new Text("r4");
assertTrue(reader.next(rowId, row));
assertEquals(row.getRowId(), rowId.toString());
assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "frank".getBytes());
assertFalse(reader.next(rowId, row));
}
use of org.apache.hadoop.mapred.InputSplit in project hive by apache.
the class TestHiveAccumuloTableInputFormat method testGetNone.
@Test
public void testGetNone() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:f1");
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(splits.length, 1);
RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
Text rowId = new Text("r1");
AccumuloHiveRow row = new AccumuloHiveRow();
row.setRowId("r1");
assertFalse(reader.next(rowId, row));
}
use of org.apache.hadoop.mapred.InputSplit in project hive by apache.
the class TestHiveAccumuloTableInputFormat method testIteratorNotInSplitsCompensation.
@Test
public void testIteratorNotInSplitsCompensation() throws Exception {
FileInputFormat.addInputPath(conf, new Path("unused"));
InputSplit[] splits = inputformat.getSplits(conf, 0);
assertEquals(1, splits.length);
InputSplit split = splits[0];
IteratorSetting is = new IteratorSetting(1, PrimitiveComparisonFilter.FILTER_PREFIX + 1, PrimitiveComparisonFilter.class);
is.addOption(PrimitiveComparisonFilter.P_COMPARE_CLASS, StringCompare.class.getName());
is.addOption(PrimitiveComparisonFilter.COMPARE_OPT_CLASS, Equal.class.getName());
is.addOption(PrimitiveComparisonFilter.CONST_VAL, new String(Base64.encodeBase64(new byte[] { '0' })));
is.addOption(PrimitiveComparisonFilter.COLUMN, "cf:cq");
// Mock out the predicate handler because it's just easier
AccumuloPredicateHandler predicateHandler = Mockito.mock(AccumuloPredicateHandler.class);
Mockito.when(predicateHandler.getIterators(Mockito.any(JobConf.class), Mockito.any(ColumnMapper.class))).thenReturn(Arrays.asList(is));
// Set it on our inputformat
inputformat.predicateHandler = predicateHandler;
inputformat.getRecordReader(split, conf, null);
// The code should account for the bug and update the iterators on the split
List<IteratorSetting> settingsOnSplit = ((HiveAccumuloSplit) split).getSplit().getIterators();
assertEquals(1, settingsOnSplit.size());
assertEquals(is, settingsOnSplit.get(0));
}
use of org.apache.hadoop.mapred.InputSplit in project hive by apache.
the class HiveIndexedInputFormat method getSplits.
@Override
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
String indexFileStr = job.get(indexFile);
l4j.info("index_file is " + indexFileStr);
List<String> indexFiles = getIndexFiles(indexFileStr);
HiveIndexResult hiveIndexResult = null;
if (indexFiles != null) {
boolean first = true;
StringBuilder newInputPaths = new StringBuilder();
try {
hiveIndexResult = new HiveIndexResult(indexFiles, job);
} catch (HiveException e) {
l4j.error("Unable to read index..");
throw new IOException(e);
}
Set<String> inputFiles = hiveIndexResult.buckets.keySet();
if (inputFiles == null || inputFiles.size() <= 0) {
// return empty splits if index results were empty
return new InputSplit[0];
}
Iterator<String> iter = inputFiles.iterator();
while (iter.hasNext()) {
String path = iter.next();
if (path.trim().equalsIgnoreCase("")) {
continue;
}
if (!first) {
newInputPaths.append(",");
} else {
first = false;
}
newInputPaths.append(path);
}
FileInputFormat.setInputPaths(job, newInputPaths.toString());
} else {
return super.getSplits(job, numSplits);
}
HiveInputSplit[] splits = (HiveInputSplit[]) this.doGetSplits(job, numSplits);
long maxInputSize = HiveConf.getLongVar(job, ConfVars.HIVE_INDEX_COMPACT_QUERY_MAX_SIZE);
if (maxInputSize < 0) {
maxInputSize = Long.MAX_VALUE;
}
SplitFilter filter = new SplitFilter(hiveIndexResult, maxInputSize);
Collection<HiveInputSplit> newSplits = filter.filter(splits);
return newSplits.toArray(new FileSplit[newSplits.size()]);
}
Aggregations