Search in sources :

Example 6 with Mutation

use of org.apache.accumulo.core.data.Mutation in project hive by apache.

the class TestHiveAccumuloTableOutputFormat method testBinarySerializationOnStringFallsBackToUtf8.

@Test
public void testBinarySerializationOnStringFallsBackToUtf8() throws Exception {
    Instance inst = new MockInstance(test.getMethodName());
    Connector conn = inst.getConnector("root", new PasswordToken(""));
    HiveAccumuloTableOutputFormat outputFormat = new HiveAccumuloTableOutputFormat();
    String table = test.getMethodName();
    conn.tableOperations().create(table);
    JobConf conf = new JobConf();
    conf.set(AccumuloConnectionParameters.INSTANCE_NAME, inst.getInstanceName());
    conf.set(AccumuloConnectionParameters.USER_NAME, "root");
    conf.set(AccumuloConnectionParameters.USER_PASS, "");
    conf.setBoolean(AccumuloConnectionParameters.USE_MOCK_INSTANCE, true);
    conf.set(AccumuloConnectionParameters.TABLE_NAME, test.getMethodName());
    FileSystem local = FileSystem.getLocal(conf);
    outputFormat.checkOutputSpecs(local, conf);
    RecordWriter<Text, Mutation> recordWriter = outputFormat.getRecordWriter(local, conf, null, null);
    List<String> names = Arrays.asList("row", "col1", "col2");
    List<TypeInfo> types = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
    Properties tableProperties = new Properties();
    tableProperties.setProperty(AccumuloSerDeParameters.COLUMN_MAPPINGS, ":rowID,cf:cq1,cf:cq2");
    tableProperties.setProperty(serdeConstants.FIELD_DELIM, " ");
    tableProperties.setProperty(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(names));
    tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
    tableProperties.setProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, ColumnEncoding.BINARY.getName());
    AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(), tableProperties, AccumuloSerDe.class.getSimpleName());
    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
    AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams, accumuloSerDeParams.getColumnMappings(), AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL, accumuloSerDeParams.getRowIdFactory());
    TypeInfo stringTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
    LazySimpleStructObjectInspector structOI = (LazySimpleStructObjectInspector) LazyFactory.createLazyStructInspector(Arrays.asList("row", "cq1", "cq2"), Arrays.asList(stringTypeInfo, stringTypeInfo, stringTypeInfo), serDeParams.getSeparators(), serDeParams.getNullSequence(), serDeParams.isLastColumnTakesRest(), serDeParams.isEscaped(), serDeParams.getEscapeChar());
    LazyStruct struct = (LazyStruct) LazyFactory.createLazyObject(structOI);
    ByteArrayRef bytes = new ByteArrayRef();
    bytes.setData("row value1 value2".getBytes());
    struct.init(bytes, 0, bytes.getData().length);
    // Serialize the struct into a mutation
    Mutation m = serializer.serialize(struct, structOI);
    // Write the mutation
    recordWriter.write(new Text(table), m);
    // Close the writer
    recordWriter.close(null);
    Iterator<Entry<Key, Value>> iter = conn.createScanner(table, new Authorizations()).iterator();
    Assert.assertTrue("Iterator did not have an element as expected", iter.hasNext());
    Entry<Key, Value> entry = iter.next();
    Key k = entry.getKey();
    Value v = entry.getValue();
    Assert.assertEquals("row", k.getRow().toString());
    Assert.assertEquals("cf", k.getColumnFamily().toString());
    Assert.assertEquals("cq1", k.getColumnQualifier().toString());
    Assert.assertEquals("", k.getColumnVisibility().toString());
    Assert.assertEquals("value1", new String(v.get()));
    Assert.assertTrue("Iterator did not have an element as expected", iter.hasNext());
    entry = iter.next();
    k = entry.getKey();
    v = entry.getValue();
    Assert.assertEquals("row", k.getRow().toString());
    Assert.assertEquals("cf", k.getColumnFamily().toString());
    Assert.assertEquals("cq2", k.getColumnQualifier().toString());
    Assert.assertEquals("", k.getColumnVisibility().toString());
    Assert.assertEquals("value2", new String(v.get()));
    Assert.assertFalse("Iterator unexpectedly had more data", iter.hasNext());
}
Also used : Connector(org.apache.accumulo.core.client.Connector) Configuration(org.apache.hadoop.conf.Configuration) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) Instance(org.apache.accumulo.core.client.Instance) LazySerDeParameters(org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters) Properties(java.util.Properties) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) Entry(java.util.Map.Entry) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) FileSystem(org.apache.hadoop.fs.FileSystem) JobConf(org.apache.hadoop.mapred.JobConf) AccumuloRowSerializer(org.apache.hadoop.hive.accumulo.serde.AccumuloRowSerializer) LazyStruct(org.apache.hadoop.hive.serde2.lazy.LazyStruct) Authorizations(org.apache.accumulo.core.security.Authorizations) LazySimpleStructObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector) Text(org.apache.hadoop.io.Text) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) AccumuloSerDe(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDe) AccumuloSerDeParameters(org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters) ByteArrayRef(org.apache.hadoop.hive.serde2.lazy.ByteArrayRef) Value(org.apache.accumulo.core.data.Value) Mutation(org.apache.accumulo.core.data.Mutation) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 7 with Mutation

use of org.apache.accumulo.core.data.Mutation in project hive by apache.

the class TestHiveAccumuloTypes method testBinaryTypes.

@Test
public void testBinaryTypes() throws Exception {
    final String tableName = test.getMethodName(), user = "root", pass = "";
    MockInstance mockInstance = new MockInstance(test.getMethodName());
    Connector conn = mockInstance.getConnector(user, new PasswordToken(pass));
    HiveAccumuloTableInputFormat inputformat = new HiveAccumuloTableInputFormat();
    JobConf conf = new JobConf();
    conf.set(AccumuloSerDeParameters.TABLE_NAME, tableName);
    conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
    conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
    conf.set(AccumuloSerDeParameters.USER_NAME, user);
    conf.set(AccumuloSerDeParameters.USER_PASS, pass);
    // not used for mock, but
    conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181");
    // required by input format.
    conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, AccumuloHiveConstants.ROWID + ",cf:string,cf:boolean,cf:tinyint,cf:smallint,cf:int,cf:bigint" + ",cf:float,cf:double,cf:decimal,cf:date,cf:timestamp,cf:char,cf:varchar");
    conf.set(serdeConstants.LIST_COLUMNS, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
    conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string,boolean,tinyint,smallint,int,bigint,float,double,decimal,date,timestamp,char(4),varchar(7)");
    conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, "binary");
    conn.tableOperations().create(tableName);
    BatchWriterConfig writerConf = new BatchWriterConfig();
    BatchWriter writer = conn.createBatchWriter(tableName, writerConf);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(baos);
    String cf = "cf";
    byte[] cfBytes = cf.getBytes();
    Mutation m = new Mutation("row1");
    // string
    String stringValue = "string";
    JavaStringObjectInspector stringOI = (JavaStringObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
    LazyUtils.writePrimitiveUTF8(baos, stringOI.create(stringValue), stringOI, false, (byte) 0, null);
    m.put(cfBytes, "string".getBytes(), baos.toByteArray());
    // boolean
    boolean booleanValue = true;
    baos.reset();
    JavaBooleanObjectInspector booleanOI = (JavaBooleanObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
    LazyUtils.writePrimitive(baos, booleanOI.create(booleanValue), booleanOI);
    m.put(cfBytes, "boolean".getBytes(), baos.toByteArray());
    // tinyint
    byte tinyintValue = -127;
    baos.reset();
    JavaByteObjectInspector byteOI = (JavaByteObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, tinyintValue, byteOI);
    m.put(cfBytes, "tinyint".getBytes(), baos.toByteArray());
    // smallint
    short smallintValue = Short.MAX_VALUE;
    baos.reset();
    JavaShortObjectInspector shortOI = (JavaShortObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, smallintValue, shortOI);
    m.put(cfBytes, "smallint".getBytes(), baos.toByteArray());
    // int
    int intValue = Integer.MAX_VALUE;
    baos.reset();
    JavaIntObjectInspector intOI = (JavaIntObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, intValue, intOI);
    m.put(cfBytes, "int".getBytes(), baos.toByteArray());
    // bigint
    long bigintValue = Long.MAX_VALUE;
    baos.reset();
    JavaLongObjectInspector longOI = (JavaLongObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, bigintValue, longOI);
    m.put(cfBytes, "bigint".getBytes(), baos.toByteArray());
    // float
    float floatValue = Float.MAX_VALUE;
    baos.reset();
    JavaFloatObjectInspector floatOI = (JavaFloatObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
    LazyUtils.writePrimitive(baos, floatValue, floatOI);
    m.put(cfBytes, "float".getBytes(), baos.toByteArray());
    // double
    double doubleValue = Double.MAX_VALUE;
    baos.reset();
    JavaDoubleObjectInspector doubleOI = (JavaDoubleObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
    LazyUtils.writePrimitive(baos, doubleValue, doubleOI);
    m.put(cfBytes, "double".getBytes(), baos.toByteArray());
    // decimal
    baos.reset();
    HiveDecimal decimalValue = HiveDecimal.create(65536l);
    HiveDecimalWritable decimalWritable = new HiveDecimalWritable(decimalValue);
    decimalWritable.write(out);
    m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());
    // date
    baos.reset();
    Date now = new Date(System.currentTimeMillis());
    DateWritable dateWritable = new DateWritable(now);
    Date dateValue = dateWritable.get();
    dateWritable.write(out);
    m.put(cfBytes, "date".getBytes(), baos.toByteArray());
    // tiemestamp
    baos.reset();
    Timestamp timestampValue = new Timestamp(now.getTime());
    ByteStream.Output output = new ByteStream.Output();
    TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime()));
    timestampWritable.write(new DataOutputStream(output));
    output.close();
    m.put(cfBytes, "timestamp".getBytes(), output.toByteArray());
    // char
    baos.reset();
    HiveChar charValue = new HiveChar("char", 4);
    JavaHiveCharObjectInspector charOI = (JavaHiveCharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new CharTypeInfo(4));
    LazyUtils.writePrimitiveUTF8(baos, charOI.create(charValue), charOI, false, (byte) 0, null);
    m.put(cfBytes, "char".getBytes(), baos.toByteArray());
    baos.reset();
    HiveVarchar varcharValue = new HiveVarchar("varchar", 7);
    JavaHiveVarcharObjectInspector varcharOI = (JavaHiveVarcharObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(new VarcharTypeInfo(7));
    LazyUtils.writePrimitiveUTF8(baos, varcharOI.create(varcharValue), varcharOI, false, (byte) 0, null);
    m.put(cfBytes, "varchar".getBytes(), baos.toByteArray());
    writer.addMutation(m);
    writer.close();
    for (Entry<Key, Value> e : conn.createScanner(tableName, new Authorizations())) {
        System.out.println(e);
    }
    // Create the RecordReader
    FileInputFormat.addInputPath(conf, new Path("unused"));
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text key = reader.createKey();
    AccumuloHiveRow value = reader.createValue();
    reader.next(key, value);
    Assert.assertEquals(13, value.getTuples().size());
    ByteArrayRef byteRef = new ByteArrayRef();
    // string
    Text cfText = new Text(cf), cqHolder = new Text();
    cqHolder.set("string");
    byte[] valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyStringObjectInspector lazyStringOI = LazyPrimitiveObjectInspectorFactory.getLazyStringObjectInspector(false, (byte) 0);
    LazyString lazyString = (LazyString) LazyFactory.createLazyObject(lazyStringOI);
    lazyString.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(stringValue, lazyString.getWritableObject().toString());
    // boolean
    cqHolder.set("boolean");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyBooleanObjectInspector lazyBooleanOI = (LazyBooleanObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BOOLEAN_TYPE_NAME));
    LazyBoolean lazyBoolean = (LazyBoolean) LazyFactory.createLazyPrimitiveBinaryClass(lazyBooleanOI);
    lazyBoolean.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(booleanValue, lazyBoolean.getWritableObject().get());
    // tinyint
    cqHolder.set("tinyint");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyByteObjectInspector lazyByteOI = (LazyByteObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TINYINT_TYPE_NAME));
    LazyByte lazyByte = (LazyByte) LazyFactory.createLazyPrimitiveBinaryClass(lazyByteOI);
    lazyByte.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(tinyintValue, lazyByte.getWritableObject().get());
    // smallint
    cqHolder.set("smallint");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyShortObjectInspector lazyShortOI = (LazyShortObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME));
    LazyShort lazyShort = (LazyShort) LazyFactory.createLazyPrimitiveBinaryClass(lazyShortOI);
    lazyShort.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(smallintValue, lazyShort.getWritableObject().get());
    // int
    cqHolder.set("int");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyIntObjectInspector lazyIntOI = (LazyIntObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
    LazyInteger lazyInt = (LazyInteger) LazyFactory.createLazyPrimitiveBinaryClass(lazyIntOI);
    lazyInt.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(intValue, lazyInt.getWritableObject().get());
    // bigint
    cqHolder.set("bigint");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyLongObjectInspector lazyLongOI = (LazyLongObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BIGINT_TYPE_NAME));
    LazyLong lazyLong = (LazyLong) LazyFactory.createLazyPrimitiveBinaryClass(lazyLongOI);
    lazyLong.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(bigintValue, lazyLong.getWritableObject().get());
    // float
    cqHolder.set("float");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyFloatObjectInspector lazyFloatOI = (LazyFloatObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.FLOAT_TYPE_NAME));
    LazyFloat lazyFloat = (LazyFloat) LazyFactory.createLazyPrimitiveBinaryClass(lazyFloatOI);
    lazyFloat.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(floatValue, lazyFloat.getWritableObject().get(), 0);
    // double
    cqHolder.set("double");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyDoubleObjectInspector lazyDoubleOI = (LazyDoubleObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
    LazyDouble lazyDouble = (LazyDouble) LazyFactory.createLazyPrimitiveBinaryClass(lazyDoubleOI);
    lazyDouble.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(doubleValue, lazyDouble.getWritableObject().get(), 0);
    // decimal
    cqHolder.set("decimal");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
    DataInputStream in = new DataInputStream(bais);
    decimalWritable.readFields(in);
    Assert.assertEquals(decimalValue, decimalWritable.getHiveDecimal());
    // date
    cqHolder.set("date");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    bais = new ByteArrayInputStream(valueBytes);
    in = new DataInputStream(bais);
    dateWritable.readFields(in);
    Assert.assertEquals(dateValue, dateWritable.get());
    // timestamp
    cqHolder.set("timestamp");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    bais = new ByteArrayInputStream(valueBytes);
    in = new DataInputStream(bais);
    timestampWritable.readFields(in);
    Assert.assertEquals(timestampValue, timestampWritable.getTimestamp());
    // char
    cqHolder.set("char");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyHiveCharObjectInspector lazyCharOI = (LazyHiveCharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new CharTypeInfo(4));
    LazyHiveChar lazyChar = (LazyHiveChar) LazyFactory.createLazyObject(lazyCharOI);
    lazyChar.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(charValue, lazyChar.getWritableObject().getHiveChar());
    // varchar
    cqHolder.set("varchar");
    valueBytes = value.getValue(cfText, cqHolder);
    Assert.assertNotNull(valueBytes);
    byteRef.setData(valueBytes);
    LazyHiveVarcharObjectInspector lazyVarcharOI = (LazyHiveVarcharObjectInspector) LazyPrimitiveObjectInspectorFactory.getLazyObjectInspector(new VarcharTypeInfo(7));
    LazyHiveVarchar lazyVarchar = (LazyHiveVarchar) LazyFactory.createLazyObject(lazyVarcharOI);
    lazyVarchar.init(byteRef, 0, valueBytes.length);
    Assert.assertEquals(varcharValue.toString(), lazyVarchar.getWritableObject().getHiveVarchar().toString());
}
Also used : LazyHiveVarchar(org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) LazyIntObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector) LazyDoubleObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDoubleObjectInspector) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) LazyHiveChar(org.apache.hadoop.hive.serde2.lazy.LazyHiveChar) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) ByteStream(org.apache.hadoop.hive.serde2.ByteStream) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) JobConf(org.apache.hadoop.mapred.JobConf) JavaHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveCharObjectInspector) LazyShortObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyShortObjectInspector) Authorizations(org.apache.accumulo.core.security.Authorizations) LazyLong(org.apache.hadoop.hive.serde2.lazy.LazyLong) LazyStringObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector) JavaStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) JavaLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaLongObjectInspector) LazyHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector) LazyBoolean(org.apache.hadoop.hive.serde2.lazy.LazyBoolean) LazyLongObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector) LazyByte(org.apache.hadoop.hive.serde2.lazy.LazyByte) JavaHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveVarcharObjectInspector) JavaFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaFloatObjectInspector) ByteArrayRef(org.apache.hadoop.hive.serde2.lazy.ByteArrayRef) ByteArrayInputStream(java.io.ByteArrayInputStream) Value(org.apache.accumulo.core.data.Value) LazyInteger(org.apache.hadoop.hive.serde2.lazy.LazyInteger) Mutation(org.apache.accumulo.core.data.Mutation) LazyDouble(org.apache.hadoop.hive.serde2.lazy.LazyDouble) Key(org.apache.accumulo.core.data.Key) Connector(org.apache.accumulo.core.client.Connector) LazyHiveCharObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveCharObjectInspector) JavaIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaIntObjectInspector) DataOutputStream(java.io.DataOutputStream) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) LazyHiveChar(org.apache.hadoop.hive.serde2.lazy.LazyHiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) LazyBooleanObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyBooleanObjectInspector) LazyFloat(org.apache.hadoop.hive.serde2.lazy.LazyFloat) Timestamp(java.sql.Timestamp) LazyTimestamp(org.apache.hadoop.hive.serde2.lazy.LazyTimestamp) JavaDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaDoubleObjectInspector) LazyShort(org.apache.hadoop.hive.serde2.lazy.LazyShort) LazyString(org.apache.hadoop.hive.serde2.lazy.LazyString) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) JavaByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaByteObjectInspector) LazyHiveDecimal(org.apache.hadoop.hive.serde2.lazy.LazyHiveDecimal) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) JavaBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaBooleanObjectInspector) InputSplit(org.apache.hadoop.mapred.InputSplit) Path(org.apache.hadoop.fs.Path) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Text(org.apache.hadoop.io.Text) ByteArrayOutputStream(java.io.ByteArrayOutputStream) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) LazyHiveVarchar(org.apache.hadoop.hive.serde2.lazy.LazyHiveVarchar) LazyFloatObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector) DataInputStream(java.io.DataInputStream) LazyDate(org.apache.hadoop.hive.serde2.lazy.LazyDate) Date(java.sql.Date) LazyByteObjectInspector(org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyByteObjectInspector) BatchWriter(org.apache.accumulo.core.client.BatchWriter) JavaShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaShortObjectInspector) Test(org.junit.Test)

Example 8 with Mutation

use of org.apache.accumulo.core.data.Mutation in project hive by apache.

the class AccumuloRowSerializer method serialize.

public Mutation serialize(Object obj, ObjectInspector objInspector) throws SerDeException, IOException {
    if (objInspector.getCategory() != ObjectInspector.Category.STRUCT) {
        throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + objInspector.getTypeName());
    }
    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> columnValues = soi.getStructFieldsDataAsList(obj);
    // Fail if we try to access an offset out of bounds
    if (rowIdOffset >= fields.size()) {
        throw new IllegalStateException("Attempted to access field outside of definition for struct. Have " + fields.size() + " fields and tried to access offset " + rowIdOffset);
    }
    StructField field = fields.get(rowIdOffset);
    Object value = columnValues.get(rowIdOffset);
    // The ObjectInspector for the row ID
    ObjectInspector fieldObjectInspector = field.getFieldObjectInspector();
    // Serialize the row component using the RowIdFactory. In the normal case, this will just
    // delegate back to the "local" serializeRowId method
    byte[] data = rowIdFactory.serializeRowId(value, field, output);
    // Set that as the row id in the mutation
    Mutation mutation = new Mutation(data);
    // Each column in the row
    for (int i = 0; i < fields.size(); i++) {
        if (rowIdOffset == i) {
            continue;
        }
        // Get the relevant information for this column
        field = fields.get(i);
        value = columnValues.get(i);
        // Despite having a fixed schema from Hive, we have sparse columns in Accumulo
        if (null == value) {
            continue;
        }
        // The ObjectInspector for the current column
        fieldObjectInspector = field.getFieldObjectInspector();
        // Make sure we got the right implementation of a ColumnMapping
        ColumnMapping mapping = mappings.get(i);
        if (mapping instanceof HiveAccumuloColumnMapping) {
            serializeColumnMapping((HiveAccumuloColumnMapping) mapping, fieldObjectInspector, value, mutation);
        } else if (mapping instanceof HiveAccumuloMapColumnMapping) {
            serializeColumnMapping((HiveAccumuloMapColumnMapping) mapping, fieldObjectInspector, value, mutation);
        } else {
            throw new IllegalArgumentException("Mapping for " + field.getFieldName() + " was not a HiveColumnMapping, but was " + mapping.getClass());
        }
    }
    return mutation;
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) HiveAccumuloMapColumnMapping(org.apache.hadoop.hive.accumulo.columns.HiveAccumuloMapColumnMapping) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) Mutation(org.apache.accumulo.core.data.Mutation) HiveAccumuloColumnMapping(org.apache.hadoop.hive.accumulo.columns.HiveAccumuloColumnMapping) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) ColumnMapping(org.apache.hadoop.hive.accumulo.columns.ColumnMapping) HiveAccumuloMapColumnMapping(org.apache.hadoop.hive.accumulo.columns.HiveAccumuloMapColumnMapping) HiveAccumuloColumnMapping(org.apache.hadoop.hive.accumulo.columns.HiveAccumuloColumnMapping) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 9 with Mutation

use of org.apache.accumulo.core.data.Mutation in project hive by apache.

the class TestHiveAccumuloTableInputFormat method testGetProtectedField.

@Test
public void testGetProtectedField() throws Exception {
    FileInputFormat.addInputPath(conf, new Path("unused"));
    BatchWriterConfig writerConf = new BatchWriterConfig();
    BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
    Authorizations origAuths = con.securityOperations().getUserAuthorizations(USER);
    con.securityOperations().changeUserAuthorizations(USER, new Authorizations(origAuths.toString() + ",foo"));
    Mutation m = new Mutation("r4");
    m.put(COLUMN_FAMILY, NAME, new ColumnVisibility("foo"), new Value("frank".getBytes()));
    m.put(COLUMN_FAMILY, SID, new ColumnVisibility("foo"), new Value(parseIntBytes("4")));
    m.put(COLUMN_FAMILY, DEGREES, new ColumnVisibility("foo"), new Value(parseDoubleBytes("60.6")));
    m.put(COLUMN_FAMILY, MILLIS, new ColumnVisibility("foo"), new Value(parseLongBytes("777")));
    writer.addMutation(m);
    writer.close();
    conf.set(AccumuloSerDeParameters.AUTHORIZATIONS_KEY, "foo");
    InputSplit[] splits = inputformat.getSplits(conf, 0);
    assertEquals(splits.length, 1);
    RecordReader<Text, AccumuloHiveRow> reader = inputformat.getRecordReader(splits[0], conf, null);
    Text rowId = new Text("r1");
    AccumuloHiveRow row = new AccumuloHiveRow();
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "brian".getBytes());
    rowId = new Text("r2");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "mark".getBytes());
    rowId = new Text("r3");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "dennis".getBytes());
    rowId = new Text("r4");
    assertTrue(reader.next(rowId, row));
    assertEquals(row.getRowId(), rowId.toString());
    assertTrue(row.hasFamAndQual(COLUMN_FAMILY, NAME));
    assertArrayEquals(row.getValue(COLUMN_FAMILY, NAME), "frank".getBytes());
    assertFalse(reader.next(rowId, row));
}
Also used : Path(org.apache.hadoop.fs.Path) Authorizations(org.apache.accumulo.core.security.Authorizations) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Text(org.apache.hadoop.io.Text) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) ColumnVisibility(org.apache.accumulo.core.security.ColumnVisibility) InputSplit(org.apache.hadoop.mapred.InputSplit) AccumuloHiveRow(org.apache.hadoop.hive.accumulo.AccumuloHiveRow) Test(org.junit.Test)

Example 10 with Mutation

use of org.apache.accumulo.core.data.Mutation in project hive by apache.

the class TestHiveAccumuloTableInputFormat method createMockKeyValues.

@Before
public void createMockKeyValues() throws Exception {
    // Make a MockInstance here, by setting the instance name to be the same as this mock instance
    // we can "trick" the InputFormat into using a MockInstance
    mockInstance = new MockInstance(test.getMethodName());
    inputformat = new HiveAccumuloTableInputFormat();
    conf = new JobConf();
    conf.set(AccumuloSerDeParameters.TABLE_NAME, TEST_TABLE);
    conf.set(AccumuloSerDeParameters.USE_MOCK_INSTANCE, "true");
    conf.set(AccumuloSerDeParameters.INSTANCE_NAME, test.getMethodName());
    conf.set(AccumuloSerDeParameters.USER_NAME, USER);
    conf.set(AccumuloSerDeParameters.USER_PASS, PASS);
    // not used for mock, but
    conf.set(AccumuloSerDeParameters.ZOOKEEPERS, "localhost:2181");
    // required by input format.
    columnNames = Arrays.asList("name", "sid", "dgrs", "mills");
    columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.doubleTypeInfo, TypeInfoFactory.longTypeInfo);
    conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:name,cf:sid,cf:dgrs,cf:mills");
    conf.set(serdeConstants.LIST_COLUMNS, "name,sid,dgrs,mills");
    conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,double,bigint");
    con = mockInstance.getConnector(USER, new PasswordToken(PASS.getBytes()));
    con.tableOperations().create(TEST_TABLE);
    con.securityOperations().changeUserAuthorizations(USER, new Authorizations("blah"));
    BatchWriterConfig writerConf = new BatchWriterConfig();
    BatchWriter writer = con.createBatchWriter(TEST_TABLE, writerConf);
    Mutation m1 = new Mutation(new Text("r1"));
    m1.put(COLUMN_FAMILY, NAME, new Value("brian".getBytes()));
    m1.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("1")));
    m1.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("44.5")));
    m1.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("555")));
    Mutation m2 = new Mutation(new Text("r2"));
    m2.put(COLUMN_FAMILY, NAME, new Value("mark".getBytes()));
    m2.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("2")));
    m2.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("55.5")));
    m2.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("666")));
    Mutation m3 = new Mutation(new Text("r3"));
    m3.put(COLUMN_FAMILY, NAME, new Value("dennis".getBytes()));
    m3.put(COLUMN_FAMILY, SID, new Value(parseIntBytes("3")));
    m3.put(COLUMN_FAMILY, DEGREES, new Value(parseDoubleBytes("65.5")));
    m3.put(COLUMN_FAMILY, MILLIS, new Value(parseLongBytes("777")));
    writer.addMutation(m1);
    writer.addMutation(m2);
    writer.addMutation(m3);
    writer.close();
}
Also used : PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) Authorizations(org.apache.accumulo.core.security.Authorizations) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Text(org.apache.hadoop.io.Text) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) JobConf(org.apache.hadoop.mapred.JobConf) Before(org.junit.Before)

Aggregations

Mutation (org.apache.accumulo.core.data.Mutation)31 Value (org.apache.accumulo.core.data.Value)21 Key (org.apache.accumulo.core.data.Key)17 ColumnVisibility (org.apache.accumulo.core.security.ColumnVisibility)16 Authorizations (org.apache.accumulo.core.security.Authorizations)15 BatchWriter (org.apache.accumulo.core.client.BatchWriter)13 Entry (java.util.Map.Entry)12 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)12 Test (org.junit.Test)11 ByteArrayRef (org.apache.hadoop.hive.serde2.lazy.ByteArrayRef)10 Text (org.apache.hadoop.io.Text)10 Properties (java.util.Properties)8 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)8 Configuration (org.apache.hadoop.conf.Configuration)8 LazySerDeParameters (org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters)8 LazyStruct (org.apache.hadoop.hive.serde2.lazy.LazyStruct)8 LazySimpleStructObjectInspector (org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector)8 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)8 Element (uk.gov.gchq.gaffer.data.element.Element)8 AccumuloException (org.apache.accumulo.core.client.AccumuloException)7