Search in sources :

Example 1 with Address

use of org.apache.hadoop.hive.hbase.avro.Address in project hive by apache.

the class TestHBaseSerDe method getTestAvroBytesFromClass1.

private byte[] getTestAvroBytesFromClass1(int i) throws IOException {
    Employee employee = new Employee();
    employee.setEmployeeName("Avro Employee" + i);
    employee.setEmployeeID(11111L);
    employee.setGender(Gender.FEMALE);
    employee.setAge(25L);
    Address address = new Address();
    address.setAddress1("Avro First Address" + i);
    address.setAddress2("Avro Second Address" + i);
    address.setCity("Avro City" + i);
    address.setZipcode(123456L);
    Map<CharSequence, CharSequence> metadata = new HashMap<CharSequence, CharSequence>();
    metadata.put("testkey", "testvalue");
    address.setMetadata(metadata);
    HomePhone hPhone = new HomePhone();
    hPhone.setAreaCode(999L);
    hPhone.setNumber(1234567890L);
    OfficePhone oPhone = new OfficePhone();
    oPhone.setAreaCode(999L);
    oPhone.setNumber(1234455555L);
    ContactInfo contact = new ContactInfo();
    List<Address> addresses = new ArrayList<Address>();
    // set value for the union type
    address.setCounty(hPhone);
    addresses.add(address);
    addresses.add(address);
    contact.setAddress(addresses);
    contact.setHomePhone(hPhone);
    contact.setOfficePhone(oPhone);
    employee.setContactInfo(contact);
    DatumWriter<Employee> datumWriter = new SpecificDatumWriter<Employee>(Employee.class);
    DataFileWriter<Employee> dataFileWriter = new DataFileWriter<Employee>(datumWriter);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    dataFileWriter.create(employee.getSchema(), out);
    dataFileWriter.append(employee);
    dataFileWriter.close();
    return out.toByteArray();
}
Also used : OfficePhone(org.apache.hadoop.hive.hbase.avro.OfficePhone) Address(org.apache.hadoop.hive.hbase.avro.Address) HashMap(java.util.HashMap) DataFileWriter(org.apache.avro.file.DataFileWriter) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) SpecificDatumWriter(org.apache.avro.specific.SpecificDatumWriter) Employee(org.apache.hadoop.hive.hbase.avro.Employee) HomePhone(org.apache.hadoop.hive.hbase.avro.HomePhone) ContactInfo(org.apache.hadoop.hive.hbase.avro.ContactInfo)

Example 2 with Address

use of org.apache.hadoop.hive.hbase.avro.Address in project hive by apache.

the class TestHBaseSerDe method getTestAvroBytesFromClass2.

private byte[] getTestAvroBytesFromClass2(int i) throws IOException {
    Employee employee = new Employee();
    employee.setEmployeeName("Avro Employee" + i);
    employee.setEmployeeID(11111L);
    employee.setGender(Gender.FEMALE);
    employee.setAge(25L);
    Address address = new Address();
    address.setAddress1("Avro First Address" + i);
    address.setAddress2("Avro Second Address" + i);
    address.setCity("Avro City" + i);
    address.setZipcode(123456L);
    Map<CharSequence, CharSequence> metadata = new HashMap<CharSequence, CharSequence>();
    metadata.put("testkey", "testvalue");
    address.setMetadata(metadata);
    HomePhone hPhone = new HomePhone();
    hPhone.setAreaCode(999L);
    hPhone.setNumber(1234567890L);
    OfficePhone oPhone = new OfficePhone();
    oPhone.setAreaCode(999L);
    oPhone.setNumber(1234455555L);
    ContactInfo contact = new ContactInfo();
    List<Address> addresses = new ArrayList<Address>();
    // set value for the union type
    address.setCounty(hPhone);
    addresses.add(address);
    addresses.add(address);
    contact.setAddress(addresses);
    contact.setHomePhone(hPhone);
    contact.setOfficePhone(oPhone);
    employee.setContactInfo(contact);
    DatumWriter<Employee> employeeWriter = new SpecificDatumWriter<Employee>(Employee.class);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    // write out a header for the payload
    out.write(TEST_BYTE_ARRAY);
    employeeWriter.write(employee, encoder);
    encoder.flush();
    return out.toByteArray();
}
Also used : OfficePhone(org.apache.hadoop.hive.hbase.avro.OfficePhone) Address(org.apache.hadoop.hive.hbase.avro.Address) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) SpecificDatumWriter(org.apache.avro.specific.SpecificDatumWriter) Employee(org.apache.hadoop.hive.hbase.avro.Employee) HomePhone(org.apache.hadoop.hive.hbase.avro.HomePhone) Encoder(org.apache.avro.io.Encoder) ContactInfo(org.apache.hadoop.hive.hbase.avro.ContactInfo)

Aggregations

ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 SpecificDatumWriter (org.apache.avro.specific.SpecificDatumWriter)2 Address (org.apache.hadoop.hive.hbase.avro.Address)2 ContactInfo (org.apache.hadoop.hive.hbase.avro.ContactInfo)2 Employee (org.apache.hadoop.hive.hbase.avro.Employee)2 HomePhone (org.apache.hadoop.hive.hbase.avro.HomePhone)2 OfficePhone (org.apache.hadoop.hive.hbase.avro.OfficePhone)2 DataFileWriter (org.apache.avro.file.DataFileWriter)1 Encoder (org.apache.avro.io.Encoder)1