Search in sources :

Example 6 with DataInput

use of java.io.DataInput in project che by eclipse.

the class FileMetadataSerializerTest method readsProperties.

@Test
public void readsProperties() throws Exception {
    DataInput data = mock(DataInput.class);
    when(data.readInt()).thenReturn(2, 1, 1);
    when(data.readUTF()).thenReturn("a", "x", "b", "z");
    Map<String, String> expected = ImmutableMap.of("a", "x", "b", "z");
    assertEquals(expected, metadataSerializer.read(data));
}
Also used : DataInput(java.io.DataInput) Test(org.junit.Test)

Example 7 with DataInput

use of java.io.DataInput in project hadoop by apache.

the class HttpFSFileSystem method getFileChecksum.

@Override
public FileChecksum getFileChecksum(Path f) throws IOException {
    Map<String, String> params = new HashMap<String, String>();
    params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
    HttpURLConnection conn = getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
    HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
    final JSONObject json = (JSONObject) ((JSONObject) HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
    return new FileChecksum() {

        @Override
        public String getAlgorithmName() {
            return (String) json.get(CHECKSUM_ALGORITHM_JSON);
        }

        @Override
        public int getLength() {
            return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
        }

        @Override
        public byte[] getBytes() {
            return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
        }

        @Override
        public void write(DataOutput out) throws IOException {
            throw new UnsupportedOperationException();
        }

        @Override
        public void readFields(DataInput in) throws IOException {
            throw new UnsupportedOperationException();
        }
    };
}
Also used : DataInput(java.io.DataInput) DataOutput(java.io.DataOutput) HttpURLConnection(java.net.HttpURLConnection) JSONObject(org.json.simple.JSONObject) HashMap(java.util.HashMap) FileChecksum(org.apache.hadoop.fs.FileChecksum)

Example 8 with DataInput

use of java.io.DataInput in project asterixdb by apache.

the class DatasetTupleTranslator method getMetadataEntityFromTuple.

@Override
public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
    byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
    ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
    DataInput in = new DataInputStream(stream);
    ARecord datasetRecord = recordSerDes.deserialize(in);
    return createDatasetFromARecord(datasetRecord);
}
Also used : DataInput(java.io.DataInput) ARecord(org.apache.asterix.om.base.ARecord) ByteArrayInputStream(java.io.ByteArrayInputStream) DataInputStream(java.io.DataInputStream)

Example 9 with DataInput

use of java.io.DataInput in project asterixdb by apache.

the class DatasourceAdapterTupleTranslator method getMetadataEntityFromTuple.

@Override
public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, HyracksDataException {
    byte[] serRecord = tuple.getFieldData(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordStartOffset = tuple.getFieldStart(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordLength = tuple.getFieldLength(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
    ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
    DataInput in = new DataInputStream(stream);
    ARecord adapterRecord = recordSerDes.deserialize(in);
    return createAdapterFromARecord(adapterRecord);
}
Also used : DataInput(java.io.DataInput) ARecord(org.apache.asterix.om.base.ARecord) ByteArrayInputStream(java.io.ByteArrayInputStream) DataInputStream(java.io.DataInputStream)

Example 10 with DataInput

use of java.io.DataInput in project asterixdb by apache.

the class IndexTupleTranslator method getMetadataEntityFromTuple.

@Override
public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
    byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
    ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
    DataInput in = new DataInputStream(stream);
    ARecord rec = recordSerde.deserialize(in);
    String dvName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
    String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
    String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
    IndexType indexStructure = IndexType.valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX)).getStringValue());
    IACursor fieldNameCursor = ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
    List<List<String>> searchKey = new ArrayList<>();
    AOrderedList fieldNameList;
    while (fieldNameCursor.next()) {
        fieldNameList = (AOrderedList) fieldNameCursor.get();
        IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
        List<String> nestedFieldName = new ArrayList<>();
        while (nestedFieldNameCursor.next()) {
            nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
        }
        searchKey.add(nestedFieldName);
    }
    int indexKeyTypeFieldPos = rec.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
    IACursor fieldTypeCursor = new ACollectionCursor();
    if (indexKeyTypeFieldPos > 0) {
        fieldTypeCursor = ((AOrderedList) rec.getValueByPos(indexKeyTypeFieldPos)).getCursor();
    }
    List<IAType> searchKeyType = new ArrayList<>(searchKey.size());
    while (fieldTypeCursor.next()) {
        String typeName = ((AString) fieldTypeCursor.get()).getStringValue();
        IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dvName, typeName, false);
        searchKeyType.add(fieldType);
    }
    int isEnforcedFieldPos = rec.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
    Boolean isEnforcingKeys = false;
    if (isEnforcedFieldPos > 0) {
        isEnforcingKeys = ((ABoolean) rec.getValueByPos(isEnforcedFieldPos)).getBoolean();
    }
    Boolean isPrimaryIndex = ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
    int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
    // Check if there is a gram length as well.
    int gramLength = -1;
    int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
    if (gramLenPos >= 0) {
        gramLength = ((AInt32) rec.getValueByPos(gramLenPos)).getIntegerValue();
    }
    // Read a field-source-indicator field.
    List<Integer> keyFieldSourceIndicator = new ArrayList<>();
    int keyFieldSourceIndicatorIndex = rec.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
    if (keyFieldSourceIndicatorIndex >= 0) {
        IACursor cursor = ((AOrderedList) rec.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
        while (cursor.next()) {
            keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
        }
    } else {
        for (int index = 0; index < searchKey.size(); ++index) {
            keyFieldSourceIndicator.add(0);
        }
    }
    // index key type information is not persisted, thus we extract type information from the record metadata
    if (searchKeyType.isEmpty()) {
        try {
            Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
            String datatypeName = dSet.getItemTypeName();
            String datatypeDataverseName = dSet.getItemTypeDataverseName();
            ARecordType recordDt = (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
            String metatypeName = dSet.getMetaItemTypeName();
            String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
            ARecordType metaDt = null;
            if (metatypeName != null && metatypeDataverseName != null) {
                metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName).getDatatype();
            }
            try {
                searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
            } catch (AlgebricksException e) {
                throw new MetadataException(e);
            }
        } catch (RemoteException re) {
            throw HyracksDataException.create(re);
        }
    }
    return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType, gramLength, isEnforcingKeys, isPrimaryIndex, pendingOp);
}
Also used : ACollectionCursor(org.apache.asterix.om.base.ACollectionCursor) ArrayList(java.util.ArrayList) Index(org.apache.asterix.metadata.entities.Index) AString(org.apache.asterix.om.base.AString) MetadataException(org.apache.asterix.metadata.MetadataException) ARecord(org.apache.asterix.om.base.ARecord) AOrderedList(org.apache.asterix.om.base.AOrderedList) AOrderedList(org.apache.asterix.om.base.AOrderedList) ArrayList(java.util.ArrayList) List(java.util.List) IndexType(org.apache.asterix.common.config.DatasetConfig.IndexType) ABoolean(org.apache.asterix.om.base.ABoolean) AString(org.apache.asterix.om.base.AString) Dataset(org.apache.asterix.metadata.entities.Dataset) ABoolean(org.apache.asterix.om.base.ABoolean) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IACursor(org.apache.asterix.om.base.IACursor) DataInputStream(java.io.DataInputStream) AInt32(org.apache.asterix.om.base.AInt32) DataInput(java.io.DataInput) ByteArrayInputStream(java.io.ByteArrayInputStream) AInt8(org.apache.asterix.om.base.AInt8) RemoteException(java.rmi.RemoteException) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Aggregations

DataInput (java.io.DataInput)295 Test (org.junit.Test)141 DataInputStream (java.io.DataInputStream)139 ByteArrayInputStream (java.io.ByteArrayInputStream)125 DataOutputStream (java.io.DataOutputStream)122 UnitTest (org.apache.geode.test.junit.categories.UnitTest)96 IOException (java.io.IOException)39 ByteArrayOutputStream (java.io.ByteArrayOutputStream)34 DataOutput (java.io.DataOutput)26 ArrayList (java.util.ArrayList)17 ARecord (org.apache.asterix.om.base.ARecord)14 BufferedInputStream (java.io.BufferedInputStream)11 EOFException (java.io.EOFException)11 ByteBuffer (java.nio.ByteBuffer)10 BigInteger (java.math.BigInteger)8 HashMap (java.util.HashMap)7 HeapDataOutputStream (org.apache.geode.internal.HeapDataOutputStream)7 MembershipTest (org.apache.geode.test.junit.categories.MembershipTest)7 GrowableArray (org.apache.hyracks.data.std.util.GrowableArray)7 ChannelDataInput (org.apache.sis.internal.storage.io.ChannelDataInput)7