Search in sources :

Example 6 with AccumuloHiveRow

use of org.apache.hadoop.hive.accumulo.AccumuloHiveRow in project hive by apache.

the class TestHiveAccumuloTypes method testBinaryTypes.

@Test
public void testBinaryTypes() throws Exception {
    final String tableName = test.getMethodName(), user = "root", pass = "";
    MockInstance mockInstance = new MockInstance(test.getMethodName());
    Connector conn = mockInstance.getConnector(user, new PasswordToken(pass));
    HiveAccumuloTableInputFormat inputformat = new HiveAccumuloTableInputFormat();
    JobConf conf = new JobConf();
    conf.set(AccumuloSerDeParameters.TABLE_NAME, tableName);
    conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
    conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
    conf.set(AccumuloSerDeParameters.USER_NAME, user);
    conf.set(AccumuloSerDeParameters.USER_PASS, pass);
    // not used for mock, but
    conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181");
    // required by input format.
    conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, AccumuloHiveConstants.ROWID + ",cf:string,cf:boolean,cf:tinyint,cf:smallint,cf:int,cf:bigint" + ",cf:float,cf:double,cf:decimal,cf:date,cf:timestamp,cf:char,cf:varchar");
    conf.set(serdeConstants.LIST_COLUMNS, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
    conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
    conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, "binary");
    conn.tableOperations().create(tableName);
    BatchWriterConfig writerConf = new BatchWriterConfig();
    BatchWriter writer = conn.createBatchWriter(tableName, writerConf);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(baos);
    String cf = "cf";
    byte[] cfBytes = cf.getBytes();
    Mutation m = new Mutation("row1");
    // string
    String stringValue = "string";
    JavaStringObjectInspector stringOI = (JavaStringObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
    LazyUtils.writePrimitiveUTF8(baos, stringOI.create(stringValue), stringOI, false, (byte) 0, null);
    m.put(cfBytes, "string".getBytes(), baos.toByteArray());
    // boolean
    boolean booleanValue = true;
    baos.reset();
    JavaBooleanObjectInspector booleanOI = (JavaBooleanObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
    LazyUtils.writePrimitive(baos, booleanOI.create(booleanValue), booleanOI);
    m.put(cfBytes, "boolean".getBytes(), baos.toByteArray());
    // tinyint
    byte tinyintValue = -127;
    baos.reset();
    JavaByteObjectInspector byteOI = (JavaByteObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, tinyintValue, byteOI);
    m.put(cfBytes, "tinyint".getBytes(), baos.toByteArray());
    // smallint
    short smallintValue = Short.MAX_VALUE;
    baos.reset();
    JavaShortObjectInspector shortOI = (JavaShortObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, smallintValue, shortOI);
    m.put(cfBytes, "smallint".getBytes(), baos.toByteArray());
    // int
    int intValue = Integer.MAX_VALUE;
    baos.reset();
    JavaIntObjectInspector intOI = (JavaIntObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, intValue, intOI);
    m.put(cfBytes, "int".getBytes(), baos.toByteArray());
    // bigint
    long bigintValue = Long.MAX_VALUE;
    baos.reset();
    JavaLongObjectInspector longOI = (JavaLongObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, bigintValue, longOI);
    m.put(cfBytes, "bigint".getBytes(), baos.toByteArray());
    // float
    float floatValue = Float.MAX_VALUE;
    baos.reset();
    JavaFloatObjectInspector floatOI = (JavaFloatObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, floatValue, floatOI);
    m.put(cfBytes, "float".getBytes(), baos.toByteArray());
    // double
    double doubleValue = Double.MAX_VALUE;
    baos.reset();
    JavaDoubleObjectInspector doubleOI = (JavaDoubleObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
    LazyUtils.writePrimitive(baos, doubleValue, doubleOI);
    m.put(cfBytes, "double".getBytes(), baos.toByteArray());
    // decimal
    baos.reset();
    HiveDecimal decimalValue = HiveDecimal.create(65536l);
    HiveDecimalWritable decimalWritable = new HiveDecimalWritable(decimalValue);
    decimalWritable.write(out);
    m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());
    // date
    baos.reset();
    Date now = new Date(System.currentTimeMillis());
    DateWritable dateWritable = new DateWritable(now);
    Date dateValue = dateWritable.get();
    dateWritable.write(out);
    m.put(cfBytes, "date".getBytes(), baos.toByteArray());
    // tiemestamp
    baos.reset();
    Timestamp timestampValue = new Timestamp(now.getTime());
    ByteStream.Output output = new ByteStream.Output();
    TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime()));
    timestampWritable.write(new DataOutputStream(output));
    output.close();
    m.put(cfBytes, "timestamp".getBytes(), output.toByteArray());
    // char
    baos.reset();
    HiveChar charValue = new HiveChar("char", 4);
    JavaHiveCharObjectInspector charOI = (JavaHiveCharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new CharTypeInfo(4));
    LazyUtils.writePrimitiveUTF8(baos, charOI.create(charValue), charOI, false, (byte) 0, null);
    m.put(cfBytes, "char".getBytes(), baos.toByteArray());
    baos.reset();
    HiveVarchar varcharValue = new HiveVarchar("varchar", 7);
    JavaHiveVarcharObjectInspector varcharOI = (JavaHiveVarcharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new VarcharTypeInfo(7));
    LazyUtils.writePrimitiveUTF8(baos, varcharOI.create(varcharValue), varcharOI, false, (byte) 0, null);
    m.put(cfBytes, "varchar".getBytes(), baos.toByteArray());
    writer.addMutation(m);
    writer.close();
    for (Entry<Key, Value> e : conn.createScanner(tableName, new Authorizations())) {
        System.out.println(e);
    }
    // Create the RecordReader
    FileInputFormat.addInputPath(conf, new Path("unused"));
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text key = reader.createKey();
    AccumuloHiveRow value = reader.createValue();
    reader.next(key, value);
    Assert.assertEquals(13, value.getTuples().size());
    ByteArrayRef byteRef = new ByteArrayRef();
    // string
    Text cfText = new Text(cf), cqHolder = new Text();
    cqHolder.set("string");
    byte[] valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyStringObjectInspector lazyStringOI = LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false, (byte) 0);
    LazyString lazyString = (LazyString) LazyFactory.createLazyObject(lazyStringOI);
    lazyString.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(stringValue, lazyString.getWritableObject().toString());
    // boolean
    cqHolder.set("boolean");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyBooleanObjectInspector lazyBooleanOI = (LazyBooleanObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
    LazyBoolean lazyBoolean = (LazyBoolean) LazyFactory.createLazyPrimitiveBinaryClass(lazyBooleanOI);
    lazyBoolean.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(booleanValue, lazyBoolean.getWritableObject().get());
    // tinyint
    cqHolder.set("tinyint");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyByteObjectInspector lazyByteOI = (LazyByteObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
    LazyByte lazyByte = (LazyByte) LazyFactory.createLazyPrimitiveBinaryClass(lazyByteOI);
    lazyByte.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(tinyintValue, lazyByte.getWritableObject().get());
    // smallint
    cqHolder.set("smallint");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyShortObjectInspector lazyShortOI = (LazyShortObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
    LazyShort lazyShort = (LazyShort) LazyFactory.createLazyPrimitiveBinaryClass(lazyShortOI);
    lazyShort.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(smallintValue, lazyShort.getWritableObject().get());
    // int
    cqHolder.set("int");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyIntObjectInspector lazyIntOI = (LazyIntObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
    LazyInteger lazyInt = (LazyInteger) LazyFactory.createLazyPrimitiveBinaryClass(lazyIntOI);
    lazyInt.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(intValue, lazyInt.getWritableObject().get());
    // bigint
    cqHolder.set("bigint");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyLongObjectInspector lazyLongOI = (LazyLongObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
    LazyLong lazyLong = (LazyLong) LazyFactory.createLazyPrimitiveBinaryClass(lazyLongOI);
    lazyLong.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(bigintValue, lazyLong.getWritableObject().get());
    // float
    cqHolder.set("float");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyFloatObjectInspector lazyFloatOI = (LazyFloatObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
    LazyFloat lazyFloat = (LazyFloat) LazyFactory.createLazyPrimitiveBinaryClass(lazyFloatOI);
    lazyFloat.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(floatValue, lazyFloat.getWritableObject().get(), 0);
    // double
    cqHolder.set("double");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyDoubleObjectInspector lazyDoubleOI = (LazyDoubleObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
    LazyDouble lazyDouble = (LazyDouble) LazyFactory.createLazyPrimitiveBinaryClass(lazyDoubleOI);
    lazyDouble.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(doubleValue, lazyDouble.getWritableObject().get(), 0);
    // decimal
    cqHolder.set("decimal");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
    DataInputStream in = new DataInputStream(bais);
    decimalWritable.readFields(in);
    Assert.assertEquals(decimalValue, decimalWritable.getHiveDecimal());
    // date
    cqHolder.set("date");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    bais = new ByteArrayInputStream(valueBytes);
    in = new DataInputStream(bais);
    dateWritable.readFields(in);
    Assert.assertEquals(dateValue, dateWritable.get());
    // timestamp
    cqHolder.set("timestamp");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    bais = new ByteArrayInputStream(valueBytes);
    in = new DataInputStream(bais);
    timestampWritable.readFields(in);
    Assert.assertEquals(timestampValue, timestampWritable.getTimestamp());
    // char
    cqHolder.set("char");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyHiveCharObjectInspector lazyCharOI = (LazyHiveCharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new CharTypeInfo(4));
    LazyHiveChar lazyChar = (LazyHiveChar) LazyFactory.createLazyObject(lazyCharOI);
    lazyChar.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(charValue, lazyChar.getWritableObject().getHiveChar());
    // varchar
    cqHolder.set("varchar");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyHiveVarcharObjectInspector lazyVarcharOI = (LazyHiveVarcharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new VarcharTypeInfo(7));
    LazyHiveVarchar lazyVarchar = (LazyHiveVarchar) LazyFactory.createLazyObject(lazyVarcharOI);
    lazyVarchar.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(varcharValue.toString(), lazyVarchar.getWritableObject().getHiveVarchar().toString());
}
Also used : LazyHiveVarchar(org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) LazyIntObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector) LazyDoubleObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) LazyHiveChar(org.apache.hadoop.hive.serde2.lazy.LazyHiveChar) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) ByteStream(org.apache.hadoop.hive.serde2.ByteStream) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) JobConf(org.apache.hadoop.mapred.JobConf) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) LazyShortObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector) Authorizations(org.apache.accumulo.core.security.Authorizations) LazyLong(org.apache.hadoop.hive.serde2.lazy.LazyLong) LazyStringObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector) JavaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) JavaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaLongObjectInspector) LazyHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector) LazyBoolean(org.apache.hadoop.hive.serde2.lazy.LazyBoolean) LazyLongObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector) LazyByte(org.apache.hadoop.hive.serde2.lazy.LazyByte) JavaHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveVarcharObjectInspector) JavaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaFloatObjectInspector) ByteArrayRef(org.apache.hadoop.hive.serde2.lazy.ByteArrayRef) ByteArrayInputStream(java.io.ByteArrayInputStream) Value(org.apache.accumulo.core.data.Value) LazyInteger(org.apache.hadoop.hive.serde2.lazy.LazyInteger) Mutation(org.apache.accumulo.core.data.Mutation) LazyDouble(org.apache.hadoop.hive.serde2.lazy.LazyDouble) Key(org.apache.accumulo.core.data.Key) Connector(org.apache.accumulo.core.client.Connector) LazyHiveCharObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveCharObjectInspector) JavaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaIntObjectInspector) DataOutputStream(java.io.DataOutputStream) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) LazyHiveChar(org.apache.hadoop.hive.serde2.lazy.LazyHiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) LazyBooleanObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBooleanObjectInspector) LazyFloat(org.apache.hadoop.hive.serde2.lazy.LazyFloat) Timestamp(java.sql.Timestamp) LazyTimestamp(org.apache.hadoop.hive.serde2.lazy.LazyTimestamp) JavaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaDoubleObjectInspector) LazyShort(org.apache.hadoop.hive.serde2.lazy.LazyShort) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) JavaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaByteObjectInspector) LazyHiveDecimal(org.apache.hadoop.hive.serde2.lazy.LazyHiveDecimal) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) JavaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaBooleanObjectInspector) InputSplit(org.apache.hadoop.mapred.InputSplit) Path(org.apache.hadoop.fs.Path) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Text(org.apache.hadoop.io.Text) ByteArrayOutputStream(java.io.ByteArrayOutputStream) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) LazyHiveVarchar(org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar) LazyFloatObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector) DataInputStream(java.io.DataInputStream) LazyDate(org.apache.hadoop.hive.serde2.lazy.LazyDate) Date(java.sql.Date) LazyByteObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyByteObjectInspector) BatchWriter(org.apache.accumulo.core.client.BatchWriter) JavaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaShortObjectInspector) Test(org.junit.Test)

Example 7 with AccumuloHiveRow

use of org.apache.hadoop.hive.accumulo.AccumuloHiveRow in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testGetProtectedField.

@Test
public void testGetProtectedField() throws Exception {
    FileInputFormat.addInputPath(conf, new Path("unused"));
    BatchWriterConfig writerConf = new BatchWriterConfig();
    BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
    Authorizations origAuths = con.securityOperations().getUserAuthorizations(USER);
    con.securityOperations().changeUserAuthorizations(USER, new Authorizations(origAuths.toString() + ",foo"));
    Mutation m = new Mutation("r4");
    m.put(COLUMN_FAMILY, NAME, new ColumnVisibility("foo"), new Value("frank".getBytes()));
    m.put(COLUMN_FAMILY, SID, new ColumnVisibility("foo"), new Value(parseIntBytes("4")));
    m.put(COLUMN_FAMILY, DEGREES, new ColumnVisibility("foo"), new Value(parseDoubleBytes("60.6")));
    m.put(COLUMN_FAMILY, MILLIS, new ColumnVisibility("foo"), new Value(parseLongBytes("777")));
    writer.addMutation(m);
    writer.close();
    conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo");
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text rowId = new Text("r1");
    AccumuloHiveRow row = new AccumuloHiveRow();
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
    rowId = new Text("r2");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
    rowId = new Text("r3");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
    rowId = new Text("r4");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "frank".getBytes());
    assertFalse(reader.next(rowId, row));
}
Also used : Path(org.apache.hadoop.fs.Path) Authorizations(org.apache.accumulo.core.security.Authorizations) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Text(org.apache.hadoop.io.Text) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) InputSplit(org.apache.hadoop.mapred.InputSplit) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) Test(org.junit.Test)

Example 8 with AccumuloHiveRow

use of org.apache.hadoop.hive.accumulo.AccumuloHiveRow in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testGetNone.

@Test
public void testGetNone() throws Exception {
    FileInputFormat.addInputPath(conf, new Path("unused"));
    conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:f1");
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text rowId = new Text("r1");
    AccumuloHiveRow row = new AccumuloHiveRow();
    row.setRowId("r1");
    assertFalse(reader.next(rowId, row));
}
Also used : Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) InputSplit(org.apache.hadoop.mapred.InputSplit) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) Test(org.junit.Test)

Example 9 with AccumuloHiveRow

use of org.apache.hadoop.hive.accumulo.AccumuloHiveRow in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testHiveAccumuloRecord.

@Test
public void testHiveAccumuloRecord() throws Exception {
    FileInputFormat.addInputPath(conf, new Path("unused"));
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text rowId = new Text("r1");
    AccumuloHiveRow row = new AccumuloHiveRow();
    row.add(COLUMN_FAMILY.toString(), NAME.toString(), "brian".getBytes());
    row.add(COLUMN_FAMILY.toString(), SID.toString(), parseIntBytes("1"));
    row.add(COLUMN_FAMILY.toString(), DEGREES.toString(), parseDoubleBytes("44.5"));
    row.add(COLUMN_FAMILY.toString(), MILLIS.toString(), parseLongBytes("555"));
    assertTrue(reader.next(rowId, row));
    assertEquals(rowId.toString(), row.getRowId());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals("brian".getBytes(), row.getValue(COLUMN_FAMILY, NAME));
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, SID));
    assertArrayEquals(parseIntBytes("1"), row.getValue(COLUMN_FAMILY, SID));
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, DEGREES));
    assertArrayEquals(parseDoubleBytes("44.5"), row.getValue(COLUMN_FAMILY, DEGREES));
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, MILLIS));
    assertArrayEquals(parseLongBytes("555"), row.getValue(COLUMN_FAMILY, MILLIS));
}
Also used : Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) InputSplit(org.apache.hadoop.mapred.InputSplit) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) Test(org.junit.Test)

Example 10 with AccumuloHiveRow

use of org.apache.hadoop.hive.accumulo.AccumuloHiveRow in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testGetOnlyName.

@Test
public void testGetOnlyName() throws Exception {
    FileInputFormat.addInputPath(conf, new Path("unused"));
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text rowId = new Text("r1");
    AccumuloHiveRow row = new AccumuloHiveRow();
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
    rowId = new Text("r2");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
    rowId = new Text("r3");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
    assertFalse(reader.next(rowId, row));
}
Also used : Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) InputSplit(org.apache.hadoop.mapred.InputSplit) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) Test(org.junit.Test)

Aggregations

AccumuloHiveRow (org.apache.hadoop.hive.accumulo.AccumuloHiveRow)12 Test (org.junit.Test)12 LazyString (org.apache.hadoop.hive.serde2.lazy.LazyString)7 Properties (java.util.Properties)6 Configuration (org.apache.hadoop.conf.Configuration)6 Path (org.apache.hadoop.fs.Path)6 LazyAccumuloRow (org.apache.hadoop.hive.accumulo.LazyAccumuloRow)6 Text (org.apache.hadoop.io.Text)6 InputSplit (org.apache.hadoop.mapred.InputSplit)6 Mutation (org.apache.accumulo.core.data.Mutation)4 BatchWriter (org.apache.accumulo.core.client.BatchWriter)3 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)3 Value (org.apache.accumulo.core.data.Value)3 Authorizations (org.apache.accumulo.core.security.Authorizations)3 ByteArrayRef (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef)3 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 Date (java.sql.Date)2 Timestamp (java.sql.Timestamp)2 Connector (org.apache.accumulo.core.client.Connector)2 MockInstance (org.apache.accumulo.core.client.mock.MockInstance)2