use of org.msgpack.core.MessageUnpacker in project fluency by komamitsu.
the class FluencyTest method testWithAckResponseButWrongReceiveToken.
@Theory
public void testWithAckResponseButWrongReceiveToken(final boolean sslEnabled) throws Throwable {
Exception exception = new ConfigurableTestServer(sslEnabled).run(new ConfigurableTestServer.WithClientSocket() {
@Override
public void run(Socket clientSocket) throws Exception {
MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(clientSocket.getInputStream());
assertEquals(3, unpacker.unpackArrayHeader());
assertEquals("foo.bar", unpacker.unpackString());
ImmutableRawValue rawValue = unpacker.unpackValue().asRawValue();
Map<Value, Value> map = unpacker.unpackValue().asMapValue().map();
assertEquals(2, map.size());
assertEquals(rawValue.asByteArray().length, map.get(KEY_OPTION_SIZE).asIntegerValue().asInt());
assertNotNull(map.get(KEY_OPTION_CHUNK).asRawValue().asString());
MessagePacker packer = MessagePack.newDefaultPacker(clientSocket.getOutputStream());
packer.packMapHeader(1).packString("ack").packString(UUID.randomUUID().toString()).close();
// Close the input stream after closing the output stream to avoid closing a socket too early
unpacker.close();
}
}, new ConfigurableTestServer.WithServerPort() {
@Override
public void run(int serverPort) throws Exception {
Fluency fluency = Fluency.defaultFluency(serverPort, new Fluency.Config().setSslEnabled(sslEnabled).setAckResponseMode(true));
fluency.emit("foo.bar", new HashMap<String, Object>());
fluency.close();
}
}, 5000);
assertEquals(exception.getClass(), TimeoutException.class);
}
use of org.msgpack.core.MessageUnpacker in project td-client-java by treasure-data.
the class Example method main.
public static void main(String[] args) {
TDClient client = TDClient.newClient();
try {
// Retrieve database and table names
List<TDDatabase> databases = client.listDatabases();
TDDatabase db = databases.get(0);
System.out.println("database: " + db.getName());
for (TDTable table : client.listTables(db.getName())) {
System.out.println(" table: " + table);
}
// Submit a new Presto query
String jobId = client.submit(TDJobRequest.newPrestoQuery("sample_datasets", "select count(1) cnt from www_access"));
// Wait until the query finishes
ExponentialBackOff backOff = new ExponentialBackOff();
TDJobSummary job = client.jobStatus(jobId);
while (!job.getStatus().isFinished()) {
Thread.sleep(backOff.nextWaitTimeMillis());
job = client.jobStatus(jobId);
}
// Read the detailed job information
TDJob jobInfo = client.jobInfo(jobId);
System.out.println("log:\n" + jobInfo.getCmdOut());
System.out.println("error log:\n" + jobInfo.getStdErr());
// Read the job results in msgpack.gz format
client.jobResult(jobId, TDResultFormat.MESSAGE_PACK_GZ, new Function<InputStream, Integer>() {
@Override
public Integer apply(InputStream input) {
int count = 0;
try {
MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new GZIPInputStream(input));
while (unpacker.hasNext()) {
// Each row of the query result is array type value (e.g., [1, "name", ...])
ArrayValue array = unpacker.unpackValue().asArrayValue();
System.out.println(array);
count++;
}
unpacker.close();
} catch (Exception e) {
throw Throwables.propagate(e);
}
return count;
}
});
} catch (Exception e) {
e.printStackTrace();
} finally {
client.close();
}
}
use of org.msgpack.core.MessageUnpacker in project td-client-java by treasure-data.
the class TestTDClient method testBulkImport.
@Test
public void testBulkImport() throws Exception {
final String bulkImportTable = newTemporaryName("sample_bi");
client.deleteTableIfExists(SAMPLE_DB, bulkImportTable);
client.createTableIfNotExists(SAMPLE_DB, bulkImportTable);
final int numRowsInPart = 10;
final int numParts = 3;
String dateStr = new SimpleDateFormat("yyyyMMddhhmmss").format(new Date());
final String session = "td-client-java-test-session-" + dateStr;
try {
client.createBulkImportSession(session, SAMPLE_DB, bulkImportTable);
List<TDBulkImportSession> sessionList = client.listBulkImportSessions();
TDBulkImportSession foundInList = Iterables.find(sessionList, new Predicate<TDBulkImportSession>() {
@Override
public boolean apply(TDBulkImportSession input) {
return test(input);
}
@Override
public boolean test(TDBulkImportSession input) {
return input.getName().equals(session);
}
});
TDBulkImportSession bs = client.getBulkImportSession(session);
logger.info("bulk import session: {}, error message: {}", bs.getJobId(), bs.getErrorMessage());
assertEquals(session, bs.getName());
assertEquals(SAMPLE_DB, bs.getDatabaseName());
assertEquals(bulkImportTable, bs.getTableName());
assertTrue(bs.isUploading());
assertEquals(foundInList.getStatus(), bs.getStatus());
int count = 0;
final long time = System.currentTimeMillis() / 1000;
// Upload part 0, 1, 2
for (int i = 0; i < 3; ++i) {
String partName = "bip" + i;
// Prepare msgpack.gz
ByteArrayOutputStream buf = new ByteArrayOutputStream();
OutputStream out = new GZIPOutputStream(buf);
MessagePacker packer = MessagePack.newDefaultPacker(out);
for (int n = 0; n < numRowsInPart; ++n) {
ValueFactory.MapBuilder b = ValueFactory.newMapBuilder();
b.put(ValueFactory.newString("time"), ValueFactory.newInteger(time + count));
b.put(ValueFactory.newString("event"), ValueFactory.newString("log" + count));
b.put(ValueFactory.newString("description"), ValueFactory.newString("sample data"));
packer.packValue(b.build());
count += 1;
}
// Embed an error record
packer.packValue(ValueFactory.newMap(new Value[] { ValueFactory.newNil(), ValueFactory.newString("invalid data") }));
packer.close();
out.close();
File tmpFile = File.createTempFile(partName, ".msgpack.gz", new File("target"));
Files.write(tmpFile.toPath(), buf.toByteArray());
client.uploadBulkImportPart(session, partName, tmpFile);
// list parts
List<String> parts = client.listBulkImportParts(session);
assertTrue(parts.contains(partName));
// freeze test
client.freezeBulkImportSession(session);
// unfreeze test
client.unfreezeBulkImportSession(session);
}
// delete the last
client.deleteBulkImportPart(session, "bip2");
List<String> parts = client.listBulkImportParts(session);
assertTrue(!parts.contains("bip2"));
// Freeze the session
client.freezeBulkImportSession(session);
// Perform the session
client.performBulkImportSession(session);
// Wait the perform completion
ExponentialBackOff backoff = new ExponentialBackOff();
long deadline = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(10);
bs = client.getBulkImportSession(session);
while (bs.getStatus() == TDBulkImportSession.ImportStatus.PERFORMING) {
assertFalse(bs.isUploading());
if (System.currentTimeMillis() > deadline) {
throw new IllegalStateException("timeout error: bulk import perform");
}
logger.debug("Waiting bulk import completion");
Thread.sleep(backoff.nextWaitTimeMillis());
bs = client.getBulkImportSession(session);
}
// Check session contents
assertTrue(bs.hasErrorOnPerform());
logger.debug(bs.getErrorMessage());
// Error record check
int errorCount = client.getBulkImportErrorRecords(session, new Function<InputStream, Integer>() {
int errorRecordCount = 0;
@Override
public Integer apply(InputStream input) {
try {
MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(new GZIPInputStream(input));
while (unpacker.hasNext()) {
Value v = unpacker.unpackValue();
logger.info("error record: " + v);
errorRecordCount += 1;
}
return errorRecordCount;
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
});
final int numValidParts = numParts - 1;
assertEquals(numValidParts, errorCount);
assertEquals(0, bs.getErrorParts());
assertEquals(numValidParts, bs.getValidParts());
assertEquals(numValidParts, bs.getErrorRecords());
assertEquals(numValidParts * numRowsInPart, bs.getValidRecords());
// Commit the session
deadline = System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(5);
client.commitBulkImportSession(session);
// Wait the commit completion
bs = client.getBulkImportSession(session);
while (bs.getStatus() != TDBulkImportSession.ImportStatus.COMMITTED) {
if (System.currentTimeMillis() > deadline) {
throw new IllegalStateException("timeout error: bulk import commit");
}
logger.info("Waiting bulk import perform step completion");
Thread.sleep(TimeUnit.SECONDS.toMillis(5));
bs = client.getBulkImportSession(session);
}
// Check the data
TDTable imported = Iterables.find(client.listTables(SAMPLE_DB), new Predicate<TDTable>() {
@Override
public boolean apply(TDTable input) {
return test(input);
}
@Override
public boolean test(TDTable input) {
return input.getName().equals(bulkImportTable);
}
});
assertEquals(numRowsInPart * 2, imported.getRowCount());
List<TDColumn> columns = imported.getColumns();
logger.info(Joiner.on(", ").join(columns));
// event, description, (time)
assertEquals(2, columns.size());
} finally {
client.deleteBulkImportSession(session);
}
}
Aggregations