use of java.io.DataInput in project che by eclipse.
the class FileMetadataSerializerTest method readsProperties.
@Test
public void readsProperties() throws Exception {
DataInput data = mock(DataInput.class);
when(data.readInt()).thenReturn(2, 1, 1);
when(data.readUTF()).thenReturn("a", "x", "b", "z");
Map<String, String> expected = ImmutableMap.of("a", "x", "b", "z");
assertEquals(expected, metadataSerializer.read(data));
}
use of java.io.DataInput in project hadoop by apache.
the class HttpFSFileSystem method getFileChecksum.
@Override
public FileChecksum getFileChecksum(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
HttpURLConnection conn = getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
final JSONObject json = (JSONObject) ((JSONObject) HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
return new FileChecksum() {
@Override
public String getAlgorithmName() {
return (String) json.get(CHECKSUM_ALGORITHM_JSON);
}
@Override
public int getLength() {
return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
}
@Override
public byte[] getBytes() {
return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
}
@Override
public void write(DataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
};
}
use of java.io.DataInput in project asterixdb by apache.
the class DatasetTupleTranslator method getMetadataEntityFromTuple.
@Override
public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord datasetRecord = recordSerDes.deserialize(in);
return createDatasetFromARecord(datasetRecord);
}
use of java.io.DataInput in project asterixdb by apache.
the class DatasourceAdapterTupleTranslator method getMetadataEntityFromTuple.
@Override
public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, HyracksDataException {
byte[] serRecord = tuple.getFieldData(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = tuple.getFieldStart(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = tuple.getFieldLength(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord adapterRecord = recordSerDes.deserialize(in);
return createAdapterFromARecord(adapterRecord);
}
use of java.io.DataInput in project asterixdb by apache.
the class IndexTupleTranslator method getMetadataEntityFromTuple.
@Override
public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord rec = recordSerde.deserialize(in);
String dvName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
IndexType indexStructure = IndexType.valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX)).getStringValue());
IACursor fieldNameCursor = ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
List<List<String>> searchKey = new ArrayList<>();
AOrderedList fieldNameList;
while (fieldNameCursor.next()) {
fieldNameList = (AOrderedList) fieldNameCursor.get();
IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
List<String> nestedFieldName = new ArrayList<>();
while (nestedFieldNameCursor.next()) {
nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
}
searchKey.add(nestedFieldName);
}
int indexKeyTypeFieldPos = rec.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
IACursor fieldTypeCursor = new ACollectionCursor();
if (indexKeyTypeFieldPos > 0) {
fieldTypeCursor = ((AOrderedList) rec.getValueByPos(indexKeyTypeFieldPos)).getCursor();
}
List<IAType> searchKeyType = new ArrayList<>(searchKey.size());
while (fieldTypeCursor.next()) {
String typeName = ((AString) fieldTypeCursor.get()).getStringValue();
IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dvName, typeName, false);
searchKeyType.add(fieldType);
}
int isEnforcedFieldPos = rec.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
Boolean isEnforcingKeys = false;
if (isEnforcedFieldPos > 0) {
isEnforcingKeys = ((ABoolean) rec.getValueByPos(isEnforcedFieldPos)).getBoolean();
}
Boolean isPrimaryIndex = ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
// Check if there is a gram length as well.
int gramLength = -1;
int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
if (gramLenPos >= 0) {
gramLength = ((AInt32) rec.getValueByPos(gramLenPos)).getIntegerValue();
}
// Read a field-source-indicator field.
List<Integer> keyFieldSourceIndicator = new ArrayList<>();
int keyFieldSourceIndicatorIndex = rec.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
if (keyFieldSourceIndicatorIndex >= 0) {
IACursor cursor = ((AOrderedList) rec.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
while (cursor.next()) {
keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
}
} else {
for (int index = 0; index < searchKey.size(); ++index) {
keyFieldSourceIndicator.add(0);
}
}
// index key type information is not persisted, thus we extract type information from the record metadata
if (searchKeyType.isEmpty()) {
try {
Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
String datatypeName = dSet.getItemTypeName();
String datatypeDataverseName = dSet.getItemTypeDataverseName();
ARecordType recordDt = (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
String metatypeName = dSet.getMetaItemTypeName();
String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
ARecordType metaDt = null;
if (metatypeName != null && metatypeDataverseName != null) {
metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName).getDatatype();
}
try {
searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
} catch (AlgebricksException e) {
throw new MetadataException(e);
}
} catch (RemoteException re) {
throw HyracksDataException.create(re);
}
}
return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType, gramLength, isEnforcingKeys, isPrimaryIndex, pendingOp);
}
Aggregations