Search in sources :

Example 31 with TIOStreamTransport

use of org.apache.thrift.transport.TIOStreamTransport in project carbondata by apache.

the class CarbonUtil method read.

/**
   * Below method will be used to convert the byte array value to thrift object for
   * data chunk
   *
   * @param data    thrift byte array
   * @param creator type of thrift
   * @return thrift object
   * @throws IOException any problem while converting the object
   */
private static TBase read(byte[] data, TBaseCreator creator, int offset, int length) throws IOException {
    ByteArrayInputStream stream = new ByteArrayInputStream(data, offset, length);
    TProtocol binaryIn = new TCompactProtocol(new TIOStreamTransport(stream));
    TBase t = creator.create();
    try {
        t.read(binaryIn);
    } catch (TException e) {
        throw new IOException(e);
    } finally {
        CarbonUtil.closeStreams(stream);
    }
    return t;
}
Also used : TException(org.apache.thrift.TException) ByteArrayInputStream(java.io.ByteArrayInputStream) TProtocol(org.apache.thrift.protocol.TProtocol) TIOStreamTransport(org.apache.thrift.transport.TIOStreamTransport) TBase(org.apache.thrift.TBase) IOException(java.io.IOException) TCompactProtocol(org.apache.thrift.protocol.TCompactProtocol)

Example 32 with TIOStreamTransport

use of org.apache.thrift.transport.TIOStreamTransport in project carbondata by apache.

the class ThriftReader method open.

/**
   * Opens the fileName for reading.
   */
public void open() throws IOException {
    FileFactory.FileType fileType = FileFactory.getFileType(fileName);
    dataInputStream = FileFactory.getDataInputStream(fileName, fileType, bufferSize);
    binaryIn = new TCompactProtocol(new TIOStreamTransport(dataInputStream));
}
Also used : TIOStreamTransport(org.apache.thrift.transport.TIOStreamTransport) TCompactProtocol(org.apache.thrift.protocol.TCompactProtocol) FileFactory(org.apache.carbondata.core.datastore.impl.FileFactory)

Example 33 with TIOStreamTransport

use of org.apache.thrift.transport.TIOStreamTransport in project hive by apache.

the class TestHCatInputFormat method setUp.

/**
 * Create an input sequence file with 100 records; every 10th record is bad.
 * Load this table into Hive.
 */
@Before
@Override
public void setUp() throws Exception {
    super.setUp();
    if (setUpComplete) {
        return;
    }
    Path intStringSeq = new Path(TEST_DATA_DIR + "/data/intString.seq");
    LOG.info("Creating data file: " + intStringSeq);
    SequenceFile.Writer seqFileWriter = SequenceFile.createWriter(intStringSeq.getFileSystem(hiveConf), hiveConf, intStringSeq, NullWritable.class, BytesWritable.class);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    TIOStreamTransport transport = new TIOStreamTransport(out);
    TBinaryProtocol protocol = new TBinaryProtocol(transport);
    for (int i = 1; i <= 100; i++) {
        if (i % 10 == 0) {
            seqFileWriter.append(NullWritable.get(), new BytesWritable("bad record".getBytes()));
        } else {
            out.reset();
            IntString intString = new IntString(i, Integer.toString(i), i);
            intString.write(protocol);
            BytesWritable bytesWritable = new BytesWritable(out.toByteArray());
            seqFileWriter.append(NullWritable.get(), bytesWritable);
        }
    }
    seqFileWriter.close();
    // Now let's load this file into a new Hive table.
    Assert.assertEquals(0, driver.run("drop table if exists test_bad_records").getResponseCode());
    Assert.assertEquals(0, driver.run("create table test_bad_records " + "row format serde 'org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer' " + "with serdeproperties ( " + "  'serialization.class'='org.apache.hadoop.hive.serde2.thrift.test.IntString', " + "  'serialization.format'='org.apache.thrift.protocol.TBinaryProtocol') " + "stored as" + "  inputformat 'org.apache.hadoop.mapred.SequenceFileInputFormat'" + "  outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'").getResponseCode());
    Assert.assertEquals(0, driver.run("load data local inpath '" + intStringSeq.getParent() + "' into table test_bad_records").getResponseCode());
    setUpComplete = true;
}
Also used : Path(org.apache.hadoop.fs.Path) IntString(org.apache.hadoop.hive.serde2.thrift.test.IntString) SequenceFile(org.apache.hadoop.io.SequenceFile) TBinaryProtocol(org.apache.thrift.protocol.TBinaryProtocol) TIOStreamTransport(org.apache.thrift.transport.TIOStreamTransport) BytesWritable(org.apache.hadoop.io.BytesWritable) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Before(org.junit.Before)

Aggregations

TIOStreamTransport (org.apache.thrift.transport.TIOStreamTransport)33 TException (org.apache.thrift.TException)13 TBinaryProtocol (org.apache.thrift.protocol.TBinaryProtocol)11 TProtocol (org.apache.thrift.protocol.TProtocol)10 ByteArrayOutputStream (java.io.ByteArrayOutputStream)9 Test (org.junit.Test)9 TMessage (org.apache.thrift.protocol.TMessage)8 TTransport (org.apache.thrift.transport.TTransport)8 ByteArrayInputStream (java.io.ByteArrayInputStream)7 TCompactProtocol (org.apache.thrift.protocol.TCompactProtocol)7 Request (com.alibaba.dubbo.remoting.exchange.Request)6 Demo (com.alibaba.dubbo.rpc.gen.thrift.Demo)6 IOException (java.io.IOException)6 ChannelBuffer (com.alibaba.dubbo.remoting.buffer.ChannelBuffer)5 RpcResult (com.alibaba.dubbo.rpc.RpcResult)5 RandomAccessByteArrayOutputStream (com.alibaba.dubbo.rpc.protocol.thrift.io.RandomAccessByteArrayOutputStream)5 URL (com.alibaba.dubbo.common.URL)4 Channel (com.alibaba.dubbo.remoting.Channel)4 Response (com.alibaba.dubbo.remoting.exchange.Response)4 RpcException (com.alibaba.dubbo.rpc.RpcException)3