use of org.apache.thrift.transport.TIOStreamTransport in project carbondata by apache.
the class CarbonUtil method read.
/**
* Below method will be used to convert the byte array value to thrift object for
* data chunk
*
* @param data thrift byte array
* @param creator type of thrift
* @return thrift object
* @throws IOException any problem while converting the object
*/
private static TBase read(byte[] data, TBaseCreator creator, int offset, int length) throws IOException {
ByteArrayInputStream stream = new ByteArrayInputStream(data, offset, length);
TProtocol binaryIn = new TCompactProtocol(new TIOStreamTransport(stream));
TBase t = creator.create();
try {
t.read(binaryIn);
} catch (TException e) {
throw new IOException(e);
} finally {
CarbonUtil.closeStreams(stream);
}
return t;
}
use of org.apache.thrift.transport.TIOStreamTransport in project carbondata by apache.
the class ThriftReader method open.
/**
* Opens the fileName for reading.
*/
public void open() throws IOException {
FileFactory.FileType fileType = FileFactory.getFileType(fileName);
dataInputStream = FileFactory.getDataInputStream(fileName, fileType, bufferSize);
binaryIn = new TCompactProtocol(new TIOStreamTransport(dataInputStream));
}
use of org.apache.thrift.transport.TIOStreamTransport in project hive by apache.
the class TestHCatInputFormat method setUp.
/**
* Create an input sequence file with 100 records; every 10th record is bad.
* Load this table into Hive.
*/
@Before
@Override
public void setUp() throws Exception {
super.setUp();
if (setUpComplete) {
return;
}
Path intStringSeq = new Path(TEST_DATA_DIR + "/data/intString.seq");
LOG.info("Creating data file: " + intStringSeq);
SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(intStringSeq.getFileSystem(hiveConf), hiveConf, intStringSeq, NullWritable.class, BytesWritable.class);
ByteArrayOutputStream out = new ByteArrayOutputStream();
TIOStreamTransport transport = new TIOStreamTransport(out);
TBinaryProtocol protocol = new TBinaryProtocol(transport);
for (int i = 1; i <= 100; i++) {
if (i % 10 == 0) {
seqFileWriter.append(NullWritable.get(), new BytesWritable("bad record".getBytes()));
} else {
out.reset();
IntString intString = new IntString(i, Integer.toString(i), i);
intString.write(protocol);
BytesWritable bytesWritable = new BytesWritable(out.toByteArray());
seqFileWriter.append(NullWritable.get(), bytesWritable);
}
}
seqFileWriter.close();
// Now let's load this file into a new Hive table.
Assert.assertEquals(0, driver.run("drop table if exists test_bad_records").getResponseCode());
Assert.assertEquals(0, driver.run("create table test_bad_records " + "row format serde 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' " + "with serdeproperties ( " + " 'serialization.class'='org.apache.hadoop.hive.serde2.thrift.test.IntString', " + " 'serialization.format'='org.apache.thrift.protocol.TBinaryProtocol') " + "stored as" + " inputformat 'org.apache.hadoop.mapred.SequenceFileInputFormat'" + " outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'").getResponseCode());
Assert.assertEquals(0, driver.run("load data local inpath '" + intStringSeq.getParent() + "' into table test_bad_records").getResponseCode());
setUpComplete = true;
}
Aggregations