use of java.io.DataOutput in project hive by apache.
the class TestWorker method inputSplitNullBase.
@Test
public void inputSplitNullBase() throws Exception {
String delta1 = "/warehouse/foo/delta_2_3";
String delta2 = "/warehouse/foo/delta_4_7";
HiveConf conf = new HiveConf();
Path file = new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest");
FileSystem fs = FileSystem.get(conf);
FSDataOutputStream os = fs.create(file);
for (int i = 0; i < 10; i++) {
os.writeBytes("mary had a little lamb its fleece was white as snow\n");
}
os.close();
List<Path> files = new ArrayList<Path>(1);
files.add(file);
Path[] deltas = new Path[2];
deltas[0] = new Path(delta1);
deltas[1] = new Path(delta2);
CompactorMR.CompactorInputSplit split = new CompactorMR.CompactorInputSplit(conf, 3, files, null, deltas);
ByteArrayOutputStream buf = new ByteArrayOutputStream();
DataOutput out = new DataOutputStream(buf);
split.write(out);
split = new CompactorMR.CompactorInputSplit();
DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray()));
split.readFields(in);
Assert.assertEquals(3, split.getBucket());
Assert.assertNull(split.getBaseDir());
deltas = split.getDeltaDirs();
Assert.assertEquals(2, deltas.length);
Assert.assertEquals(delta1, deltas[0].toString());
Assert.assertEquals(delta2, deltas[1].toString());
}
use of java.io.DataOutput in project hive by apache.
the class TestDefaultHCatRecord method testRYW.
/**
* test that we properly serialize/deserialize HCatRecordS
* @throws IOException
*/
public void testRYW() throws IOException {
File f = new File("binary.dat");
f.delete();
f.createNewFile();
f.deleteOnExit();
OutputStream fileOutStream = new FileOutputStream(f);
DataOutput outStream = new DataOutputStream(fileOutStream);
HCatRecord[] recs = getHCatRecords();
for (int i = 0; i < recs.length; i++) {
recs[i].write(outStream);
}
fileOutStream.flush();
fileOutStream.close();
InputStream fInStream = new FileInputStream(f);
DataInput inpStream = new DataInputStream(fInStream);
for (int i = 0; i < recs.length; i++) {
HCatRecord rec = new DefaultHCatRecord();
rec.readFields(inpStream);
StringBuilder msg = new StringBuilder("recs[" + i + "]='" + recs[i] + "' rec='" + rec + "'");
boolean isEqual = HCatDataCheckUtil.recordsEqual(recs[i], rec, msg);
Assert.assertTrue(msg.toString(), isEqual);
}
Assert.assertEquals(fInStream.available(), 0);
fInStream.close();
}
use of java.io.DataOutput in project nanohttpd by NanoHttpd.
the class HTTPSession method parseBody.
@Override
public void parseBody(Map<String, String> files) throws IOException, ResponseException {
RandomAccessFile randomAccessFile = null;
try {
long size = getBodySize();
ByteArrayOutputStream baos = null;
DataOutput requestDataOutput = null;
// Store the request in memory or a file, depending on size
if (size < MEMORY_STORE_LIMIT) {
baos = new ByteArrayOutputStream();
requestDataOutput = new DataOutputStream(baos);
} else {
randomAccessFile = getTmpBucket();
requestDataOutput = randomAccessFile;
}
// Read all the body and write it to request_data_output
byte[] buf = new byte[REQUEST_BUFFER_LEN];
while (this.rlen >= 0 && size > 0) {
this.rlen = this.inputStream.read(buf, 0, (int) Math.min(size, REQUEST_BUFFER_LEN));
size -= this.rlen;
if (this.rlen > 0) {
requestDataOutput.write(buf, 0, this.rlen);
}
}
ByteBuffer fbuf = null;
if (baos != null) {
fbuf = ByteBuffer.wrap(baos.toByteArray(), 0, baos.size());
} else {
fbuf = randomAccessFile.getChannel().map(FileChannel.MapMode.READ_ONLY, 0, randomAccessFile.length());
randomAccessFile.seek(0);
}
// in data section, too, read it:
if (Method.POST.equals(this.method)) {
ContentType contentType = new ContentType(this.headers.get("content-type"));
if (contentType.isMultipart()) {
String boundary = contentType.getBoundary();
if (boundary == null) {
throw new ResponseException(Status.BAD_REQUEST, "BAD REQUEST: Content type is multipart/form-data but boundary missing. Usage: GET /example/file.html");
}
decodeMultipartFormData(contentType, fbuf, this.parms, files);
} else {
byte[] postBytes = new byte[fbuf.remaining()];
fbuf.get(postBytes);
String postLine = new String(postBytes, contentType.getEncoding()).trim();
// Handle application/x-www-form-urlencoded
if ("application/x-www-form-urlencoded".equalsIgnoreCase(contentType.getContentType())) {
decodeParms(postLine, this.parms);
} else if (postLine.length() != 0) {
// Special case for raw POST data => create a
// special files entry "postData" with raw content
// data
files.put(POST_DATA, postLine);
}
}
} else if (Method.PUT.equals(this.method)) {
files.put("content", saveTmpFile(fbuf, 0, fbuf.limit(), null));
}
} finally {
NanoHTTPD.safeClose(randomAccessFile);
}
}
use of java.io.DataOutput in project stream-lib by addthis.
the class HyperLogLog method getBytes.
@Override
public byte[] getBytes() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutput dos = new DataOutputStream(baos);
writeBytes(dos);
return baos.toByteArray();
}
use of java.io.DataOutput in project pinot by linkedin.
the class DimensionKey method toBytes.
/**
* @return
* @throws IOException
*/
public byte[] toBytes() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutput out = new DataOutputStream(baos);
// write the number of dimensions
out.writeInt(dimensionValues.length);
// values
for (String dimensionValue : dimensionValues) {
byte[] bytes = dimensionValue.getBytes(Charset.forName("utf-8"));
out.writeInt(bytes.length);
out.write(bytes);
}
baos.close();
byte[] byteArray = baos.toByteArray();
try {
DimensionKey key = fromBytes(byteArray);
} catch (Exception e) {
LOGGER.info("input key:{}", Arrays.toString(dimensionValues));
LOGGER.info("generated:{}", Arrays.toString(byteArray));
throw new RuntimeException(e);
}
return byteArray;
}
Aggregations