use of java.io.DataOutput in project pinot by linkedin.
the class DimensionKey method toBytes.
/**
* @return
* @throws IOException
*/
public byte[] toBytes() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutput out = new DataOutputStream(baos);
// write the number of dimensions
out.writeInt(dimensionValues.length);
// values
for (String dimensionValue : dimensionValues) {
byte[] bytes = dimensionValue.getBytes(Charset.forName("utf-8"));
out.writeInt(bytes.length);
out.write(bytes);
}
baos.close();
byte[] byteArray = baos.toByteArray();
try {
DimensionKey key = fromBytes(byteArray);
} catch (Exception e) {
LOGGER.info("input key:{}", Arrays.toString(dimensionValues));
LOGGER.info("generated:{}", Arrays.toString(byteArray));
throw new RuntimeException(e);
}
return byteArray;
}
use of java.io.DataOutput in project hadoop by apache.
the class TestContentSummary method testWrite.
// check the write method
@Test
public void testWrite() throws IOException {
long length = 11111;
long fileCount = 22222;
long directoryCount = 33333;
long quota = 44444;
long spaceConsumed = 55555;
long spaceQuota = 66666;
ContentSummary contentSummary = new ContentSummary.Builder().length(length).fileCount(fileCount).directoryCount(directoryCount).quota(quota).spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build();
DataOutput out = mock(DataOutput.class);
InOrder inOrder = inOrder(out);
contentSummary.write(out);
inOrder.verify(out).writeLong(length);
inOrder.verify(out).writeLong(fileCount);
inOrder.verify(out).writeLong(directoryCount);
inOrder.verify(out).writeLong(quota);
inOrder.verify(out).writeLong(spaceConsumed);
inOrder.verify(out).writeLong(spaceQuota);
}
use of java.io.DataOutput in project hadoop by apache.
the class TestTypedBytesWritable method testIO.
public void testIO() throws IOException {
TypedBytesWritable tbw = new TypedBytesWritable();
tbw.setValue(12345);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutput dout = new DataOutputStream(baos);
tbw.write(dout);
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInput din = new DataInputStream(bais);
TypedBytesWritable readTbw = new TypedBytesWritable();
readTbw.readFields(din);
assertEquals(tbw, readTbw);
}
use of java.io.DataOutput in project hadoop by apache.
the class HttpFSFileSystem method getFileChecksum.
@Override
public FileChecksum getFileChecksum(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
HttpURLConnection conn = getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
final JSONObject json = (JSONObject) ((JSONObject) HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
return new FileChecksum() {
@Override
public String getAlgorithmName() {
return (String) json.get(CHECKSUM_ALGORITHM_JSON);
}
@Override
public int getLength() {
return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
}
@Override
public byte[] getBytes() {
return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
}
@Override
public void write(DataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
};
}
use of java.io.DataOutput in project asterixdb by apache.
the class NonTaggedDataFormat method partitioningEvaluatorFactory.
@SuppressWarnings("unchecked")
@Override
public Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioningEvaluatorFactory(ARecordType recType, List<String> fldName) throws AlgebricksException {
String[] names = recType.getFieldNames();
int n = names.length;
if (fldName.size() > 1) {
for (int i = 0; i < n; i++) {
if (names[i].equals(fldName.get(0))) {
IScalarEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
DataOutput dos = abvs.getDataOutput();
try {
AInt32 ai = new AInt32(i);
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
IScalarEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
IScalarEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory, fldIndexEvalFactory, recType);
IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX);
ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
return new Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory, partitionFun, recType.getFieldTypes()[i]);
}
}
} else {
IScalarEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
DataOutput dos = abvs.getDataOutput();
AOrderedList as = new AOrderedList(fldName);
try {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(as.getType()).serialize(as, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
IScalarEvaluatorFactory evalFactory = new FieldAccessNestedEvalFactory(recordEvalFactory, recType, fldName);
IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_NESTED);
ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(as))));
return new Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory, partitionFun, recType.getSubFieldType(fldName));
}
throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
}
Aggregations