use of org.apache.thrift.TSerializer in project pinot by linkedin.
the class BrokerRequestSerializationTest method testSerialization.
@Test
public static void testSerialization() throws TException {
BrokerRequest req = new BrokerRequest();
// Populate Query Type
QueryType type = new QueryType();
type.setHasAggregation(true);
type.setHasFilter(true);
type.setHasSelection(true);
type.setHasGroup_by(true);
req.setQueryType(type);
// Populate Query source
QuerySource s = new QuerySource();
s.setTableName("dummy");
req.setQuerySource(s);
req.setDuration("dummy");
req.setTimeInterval("dummy");
//Populate Group-By
GroupBy groupBy = new GroupBy();
List<String> columns = new ArrayList<String>();
columns.add("dummy1");
columns.add("dummy2");
groupBy.setColumns(columns);
groupBy.setTopN(100);
req.setGroupBy(groupBy);
//Populate Selections
Selection sel = new Selection();
sel.setSize(1);
SelectionSort s2 = new SelectionSort();
s2.setColumn("dummy1");
s2.setIsAsc(true);
sel.addToSelectionSortSequence(s2);
sel.addToSelectionColumns("dummy1");
req.setSelections(sel);
//Populate FilterQuery
FilterQuery q1 = new FilterQuery();
q1.setId(1);
q1.setColumn("dummy1");
q1.addToValue("dummy1");
q1.addToNestedFilterQueryIds(2);
q1.setOperator(FilterOperator.AND);
FilterQuery q2 = new FilterQuery();
q2.setId(2);
q2.setColumn("dummy2");
q2.addToValue("dummy2");
q2.setOperator(FilterOperator.AND);
FilterQueryMap map = new FilterQueryMap();
map.putToFilterQueryMap(1, q1);
map.putToFilterQueryMap(2, q2);
req.setFilterQuery(q1);
req.setFilterSubQueryMap(map);
//Populate Aggregations
AggregationInfo agg = new AggregationInfo();
agg.setAggregationType("dummy1");
agg.putToAggregationParams("key1", "dummy1");
req.addToAggregationsInfo(agg);
TSerializer normalSerializer = new TSerializer();
TSerializer compactSerializer = new TSerializer(new TCompactProtocol.Factory());
normalSerializer.serialize(req);
compactSerializer.serialize(req);
// int numRequests = 100000;
// TimerContext t = MetricsHelper.startTimer();
// TSerializer serializer = new TSerializer(new TCompactProtocol.Factory());
// //TSerializer serializer = new TSerializer();
// //Compact : Size 183 , Serialization Latency : 0.03361ms
// // Normal : Size 385 , Serialization Latency : 0.01144ms
//
// for (int i = 0; i < numRequests; i++) {
// try {
// serializer.serialize(req);
// //System.out.println(s3.length);
// //break;
// } catch (TException e) {
// e.printStackTrace();
// }
// }
// t.stop();
// System.out.println("Latency is :" + (t.getLatencyMs() / (float) numRequests));
}
use of org.apache.thrift.TSerializer in project pinpoint by naver.
the class LegacyAgentStatMapperTest method createResultForLegacyWith_AGENT_STAT_CF_STATISTICS_V1.
private Result createResultForLegacyWith_AGENT_STAT_CF_STATISTICS_V1() throws TException {
final TAgentStat agentStat = new TAgentStat();
final TJvmGc gc = new TJvmGc();
gc.setType(GC_TYPE);
gc.setJvmGcOldCount(GC_OLD_COUNT);
gc.setJvmGcOldTime(GC_OLD_TIME);
gc.setJvmMemoryHeapUsed(HEAP_USED);
gc.setJvmMemoryHeapMax(HEAP_MAX);
gc.setJvmMemoryNonHeapUsed(NON_HEAP_USED);
gc.setJvmMemoryNonHeapMax(NON_HEAP_MAX);
agentStat.setGc(gc);
final TProtocolFactory factory = new TCompactProtocol.Factory();
final TSerializer serializer = new TSerializer(factory);
final byte[] qualifier = AGENT_STAT_CF_STATISTICS_V1;
final byte[] value = serializer.serialize(agentStat);
return Result.create(Arrays.asList(createCell(qualifier, value)));
}
use of org.apache.thrift.TSerializer in project grpc-java by grpc.
the class ThriftUtilsTest method testAvailable.
@Test
public void testAvailable() throws Exception {
Message m = new Message();
m.i = 10;
InputStream is = marshaller.stream(m);
assertEquals(is.available(), new TSerializer().serialize(m).length);
is.read();
assertEquals(is.available(), new TSerializer().serialize(m).length - 1);
while (is.read() != -1) {
}
assertEquals(-1, is.read());
assertEquals(0, is.available());
}
use of org.apache.thrift.TSerializer in project hive by apache.
the class AcidTableSerializer method encode.
/**
* Returns a base 64 encoded representation of the supplied {@link AcidTable}.
*/
public static String encode(AcidTable table) throws IOException {
DataOutputStream data = null;
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
try {
data = new DataOutputStream(bytes);
data.writeUTF(table.getDatabaseName());
data.writeUTF(table.getTableName());
data.writeBoolean(table.createPartitions());
if (table.getWriteId() <= 0) {
LOG.warn("Write ID <= 0. The recipient is probably expecting a table write ID.");
}
data.writeLong(table.getWriteId());
data.writeByte(table.getTableType().getId());
Table metaTable = table.getTable();
if (metaTable != null) {
byte[] thrift = new TSerializer(new TCompactProtocol.Factory()).serialize(metaTable);
data.writeInt(thrift.length);
data.write(thrift);
} else {
LOG.warn("Meta store table is null. The recipient is probably expecting an instance.");
data.writeInt(0);
}
} catch (TException e) {
throw new IOException("Error serializing meta store table.", e);
} finally {
data.close();
}
return PROLOG_V1 + new String(Base64.encodeBase64(bytes.toByteArray()), Charset.forName("UTF-8"));
}
use of org.apache.thrift.TSerializer in project vcell by virtualcell.
the class OptimizationApiTest method main.
public static void main(String[] args) {
try {
boolean bIgnoreCertProblems = true;
boolean bIgnoreHostMismatch = true;
VCellApiClient apiClient = new VCellApiClient(host, port, bIgnoreCertProblems, bIgnoreHostMismatch);
File optProbFile = new File("../pythonScripts/VCell_Opt/optprob.bin");
System.out.println("using optProblem: " + optProbFile.getAbsolutePath());
OptProblem optProblem = readOptProblem(optProbFile);
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
String optProblemJson = serializer.toString(optProblem);
ArrayList<String> jobIDs = new ArrayList<String>();
jobIDs.add(apiClient.submitOptimization(optProblemJson));
jobIDs.add(apiClient.submitOptimization(optProblemJson));
jobIDs.add(apiClient.submitOptimization(optProblemJson));
jobIDs.add(apiClient.submitOptimization(optProblemJson));
boolean done = false;
while (!done) {
done = true;
for (String jobID : jobIDs) {
String optRunJson = apiClient.getOptRunJson(jobID);
TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory());
OptRun optRun = new OptRun();
deserializer.deserialize(optRun, optRunJson.getBytes());
OptRunStatus status = optRun.status;
if (status != OptRunStatus.Complete && status != OptRunStatus.Failed) {
done = false;
}
if (status == OptRunStatus.Complete) {
System.out.println("job " + jobID + ": status " + status + " " + optRun.getOptResultSet().toString());
} else {
System.out.println("job " + jobID + ": status " + status);
}
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
System.out.println("done with all jobs");
} catch (Exception e) {
e.printStackTrace();
}
}
Aggregations