use of org.apache.hadoop.io.DataInputBuffer in project tez by apache.
the class TestIFile method testWritingEmptyKeyValues.
@Test(timeout = 5000)
public // Write empty key value pairs
void testWritingEmptyKeyValues() throws IOException {
DataInputBuffer key = new DataInputBuffer();
DataInputBuffer value = new DataInputBuffer();
IFile.Writer writer = new IFile.Writer(defaultConf, localFs, outputPath, null, null, null, null, null);
writer.append(key, value);
writer.append(key, value);
writer.append(key, value);
writer.append(key, value);
writer.close();
IFile.Reader reader = new Reader(localFs, outputPath, null, null, null, false, -1, 1024);
DataInputBuffer keyIn = new DataInputBuffer();
DataInputBuffer valIn = new DataInputBuffer();
int records = 0;
while (reader.nextRawKey(keyIn)) {
reader.nextRawValue(valIn);
records++;
assert (keyIn.getLength() == 0);
assert (valIn.getLength() == 0);
}
assertTrue("Number of records read does not match", (records == 4));
reader.close();
}
use of org.apache.hadoop.io.DataInputBuffer in project systemml by apache.
the class MatrixBlock method readDenseBlock.
private void readDenseBlock(DataInput in) throws IOException, DMLRuntimeException {
if (// allocate block
!allocateDenseBlock(false))
denseBlock.reset(rlen, clen);
DenseBlock a = getDenseBlock();
long nnz = 0;
if (in instanceof MatrixBlockDataInput) {
// fast deserialize
MatrixBlockDataInput mbin = (MatrixBlockDataInput) in;
for (int i = 0; i < a.numBlocks(); i++) nnz += mbin.readDoubleArray(a.size(i), a.valuesAt(i));
} else if (in instanceof DataInputBuffer && MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION) {
// workaround because sequencefile.reader.next(key, value) does not yet support serialization framework
DataInputBuffer din = (DataInputBuffer) in;
try (FastBufferedDataInputStream mbin = new FastBufferedDataInputStream(din)) {
for (int i = 0; i < a.numBlocks(); i++) nnz += mbin.readDoubleArray(a.size(i), a.valuesAt(i));
}
} else {
// default deserialize
for (int i = 0; i < rlen; i++) {
double[] avals = a.values(i);
int aix = a.pos(i);
for (int j = 0; j < clen; j++) nnz += ((avals[aix + j] = in.readDouble()) != 0) ? 1 : 0;
}
}
nonZeros = nnz;
}
use of org.apache.hadoop.io.DataInputBuffer in project accumulo by apache.
the class TabletStateChangeIterator method parseMigrations.
private Set<KeyExtent> parseMigrations(String migrations) {
if (migrations == null)
return Collections.emptySet();
try {
Set<KeyExtent> result = new HashSet<>();
DataInputBuffer buffer = new DataInputBuffer();
byte[] data = Base64.getDecoder().decode(migrations);
buffer.reset(data, data.length);
while (buffer.available() > 0) {
KeyExtent extent = new KeyExtent();
extent.readFields(buffer);
result.add(extent);
}
return result;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
use of org.apache.hadoop.io.DataInputBuffer in project accumulo by apache.
the class MergeInfoTest method readWrite.
private static MergeInfo readWrite(MergeInfo info) throws Exception {
DataOutputBuffer buffer = new DataOutputBuffer();
info.write(buffer);
DataInputBuffer in = new DataInputBuffer();
in.reset(buffer.getData(), 0, buffer.getLength());
MergeInfo info2 = new MergeInfo();
info2.readFields(in);
assertEquals(info.getExtent(), info2.getExtent());
assertEquals(info.getState(), info2.getState());
assertEquals(info.getOperation(), info2.getOperation());
return info2;
}
use of org.apache.hadoop.io.DataInputBuffer in project accumulo by apache.
the class MergeStats method main.
public static void main(String[] args) throws Exception {
ClientOpts opts = new ClientOpts();
opts.parseArgs(MergeStats.class.getName(), args);
Connector conn = opts.getConnector();
Map<String, String> tableIdMap = conn.tableOperations().tableIdMap();
for (Entry<String, String> entry : tableIdMap.entrySet()) {
final String table = entry.getKey(), tableId = entry.getValue();
String path = ZooUtil.getRoot(conn.getInstance().getInstanceID()) + Constants.ZTABLES + "/" + tableId + "/merge";
MergeInfo info = new MergeInfo();
if (ZooReaderWriter.getInstance().exists(path)) {
byte[] data = ZooReaderWriter.getInstance().getData(path, new Stat());
DataInputBuffer in = new DataInputBuffer();
in.reset(data, data.length);
info.readFields(in);
}
System.out.println(String.format("%25s %10s %10s %s", table, info.getState(), info.getOperation(), info.getExtent()));
}
}
Aggregations