Search in sources :

Example 11 with ARecord

use of org.apache.asterix.om.base.ARecord in project asterixdb by apache.

the class FeedPolicyTupleTranslator method createFeedPolicyFromARecord.

private FeedPolicyEntity createFeedPolicyFromARecord(ARecord feedPolicyRecord) {
    FeedPolicyEntity feedPolicy = null;
    String dataverseName = ((AString) feedPolicyRecord.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
    String policyName = ((AString) feedPolicyRecord.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX)).getStringValue();
    String description = ((AString) feedPolicyRecord.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_DESCRIPTION_FIELD_INDEX)).getStringValue();
    IACursor cursor = ((AUnorderedList) feedPolicyRecord.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
    Map<String, String> policyParamters = new HashMap<>();
    String key;
    String value;
    while (cursor.next()) {
        ARecord field = (ARecord) cursor.get();
        key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
        value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
        policyParamters.put(key, value);
    }
    feedPolicy = new FeedPolicyEntity(dataverseName, policyName, description, policyParamters);
    return feedPolicy;
}
Also used : ARecord(org.apache.asterix.om.base.ARecord) FeedPolicyEntity(org.apache.asterix.metadata.entities.FeedPolicyEntity) HashMap(java.util.HashMap) AUnorderedList(org.apache.asterix.om.base.AUnorderedList) AString(org.apache.asterix.om.base.AString) AMutableString(org.apache.asterix.om.base.AMutableString) IACursor(org.apache.asterix.om.base.IACursor) AString(org.apache.asterix.om.base.AString)

Example 12 with ARecord

use of org.apache.asterix.om.base.ARecord in project asterixdb by apache.

the class DatasetTupleTranslator method createDatasetFromARecord.

protected Dataset createDatasetFromARecord(ARecord datasetRecord) throws HyracksDataException {
    String dataverseName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
    String datasetName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
    String typeName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPENAME_FIELD_INDEX)).getStringValue();
    String typeDataverseName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPEDATAVERSENAME_FIELD_INDEX)).getStringValue();
    DatasetType datasetType = DatasetType.valueOf(((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETTYPE_FIELD_INDEX)).getStringValue());
    IDatasetDetails datasetDetails = null;
    int datasetId = ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX)).getIntegerValue();
    int pendingOp = ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
    String nodeGroupName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX)).getStringValue();
    String compactionPolicy = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX)).getStringValue();
    IACursor cursor = ((AOrderedList) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX)).getCursor();
    Map<String, String> compactionPolicyProperties = new LinkedHashMap<>();
    String key;
    String value;
    while (cursor.next()) {
        ARecord field = (ARecord) cursor.get();
        key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
        value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
        compactionPolicyProperties.put(key, value);
    }
    switch(datasetType) {
        case INTERNAL:
            {
                ARecord datasetDetailsRecord = (ARecord) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX);
                FileStructure fileStructure = FileStructure.valueOf(((AString) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_FILESTRUCTURE_FIELD_INDEX)).getStringValue());
                PartitioningStrategy partitioningStrategy = PartitioningStrategy.valueOf(((AString) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX)).getStringValue());
                cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX)).getCursor();
                List<List<String>> partitioningKey = new ArrayList<>();
                List<IAType> partitioningKeyType = new ArrayList<>();
                AOrderedList fieldNameList;
                while (cursor.next()) {
                    fieldNameList = (AOrderedList) cursor.get();
                    IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
                    List<String> nestedFieldName = new ArrayList<>();
                    while (nestedFieldNameCursor.next()) {
                        nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
                    }
                    partitioningKey.add(nestedFieldName);
                    partitioningKeyType.add(BuiltinType.ASTRING);
                }
                boolean autogenerated = ((ABoolean) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_AUTOGENERATED_FIELD_INDEX)).getBoolean();
                // Check if there is a filter field.
                List<String> filterField = null;
                int filterFieldPos = datasetDetailsRecord.getType().getFieldIndex(InternalDatasetDetails.FILTER_FIELD_NAME);
                if (filterFieldPos >= 0) {
                    filterField = new ArrayList<>();
                    cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(filterFieldPos)).getCursor();
                    while (cursor.next()) {
                        filterField.add(((AString) cursor.get()).getStringValue());
                    }
                }
                // Read a field-source-indicator field.
                List<Integer> keyFieldSourceIndicator = new ArrayList<>();
                int keyFieldSourceIndicatorIndex = datasetDetailsRecord.getType().getFieldIndex(InternalDatasetDetails.KEY_FILD_SOURCE_INDICATOR_FIELD_NAME);
                if (keyFieldSourceIndicatorIndex >= 0) {
                    cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
                    while (cursor.next()) {
                        keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
                    }
                } else {
                    for (int index = 0; index < partitioningKey.size(); ++index) {
                        keyFieldSourceIndicator.add(0);
                    }
                }
                // Temporary dataset only lives in the compiler therefore the temp field is false.
                //  DatasetTupleTranslator always read from the metadata node, so the temp flag should be always false.
                datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey, partitioningKey, keyFieldSourceIndicator, partitioningKeyType, autogenerated, filterField, false);
                break;
            }
        case EXTERNAL:
            ARecord datasetDetailsRecord = (ARecord) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX);
            String adapter = ((AString) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX)).getStringValue();
            cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
            Map<String, String> properties = new HashMap<>();
            while (cursor.next()) {
                ARecord field = (ARecord) cursor.get();
                key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
                value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
                properties.put(key, value);
            }
            // Timestamp
            Date timestamp = new Date((((ADateTime) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_LAST_REFRESH_TIME_FIELD_INDEX))).getChrononTime());
            // State
            TransactionState state = TransactionState.values()[((AInt32) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX)).getIntegerValue()];
            datasetDetails = new ExternalDatasetDetails(adapter, properties, timestamp, state);
    }
    Map<String, String> hints = getDatasetHints(datasetRecord);
    String metaTypeDataverseName = null;
    String metaTypeName = null;
    int metaTypeDataverseNameIndex = datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
    if (metaTypeDataverseNameIndex >= 0) {
        metaTypeDataverseName = ((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex)).getStringValue();
        int metaTypeNameIndex = datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METATYPE_NAME);
        metaTypeName = ((AString) datasetRecord.getValueByPos(metaTypeNameIndex)).getStringValue();
    }
    // Read the rebalance count if there is one.
    int rebalanceCountIndex = datasetRecord.getType().getFieldIndex(REBALANCE_ID_FIELD_NAME);
    long rebalanceCount = rebalanceCountIndex >= 0 ? ((AInt64) datasetRecord.getValueByPos(rebalanceCountIndex)).getLongValue() : 0;
    return new Dataset(dataverseName, datasetName, typeDataverseName, typeName, metaTypeDataverseName, metaTypeName, nodeGroupName, compactionPolicy, compactionPolicyProperties, datasetDetails, hints, datasetType, datasetId, pendingOp, rebalanceCount);
}
Also used : TransactionState(org.apache.asterix.common.config.DatasetConfig.TransactionState) FileStructure(org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) ArrayList(java.util.ArrayList) ADateTime(org.apache.asterix.om.base.ADateTime) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) AMutableString(org.apache.asterix.om.base.AMutableString) AString(org.apache.asterix.om.base.AString) IACursor(org.apache.asterix.om.base.IACursor) IDatasetDetails(org.apache.asterix.metadata.IDatasetDetails) AInt32(org.apache.asterix.om.base.AInt32) Date(java.util.Date) LinkedHashMap(java.util.LinkedHashMap) ARecord(org.apache.asterix.om.base.ARecord) AOrderedList(org.apache.asterix.om.base.AOrderedList) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) PartitioningStrategy(org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy) List(java.util.List) AOrderedList(org.apache.asterix.om.base.AOrderedList) AUnorderedList(org.apache.asterix.om.base.AUnorderedList) ArrayList(java.util.ArrayList) AString(org.apache.asterix.om.base.AString)

Example 13 with ARecord

use of org.apache.asterix.om.base.ARecord in project asterixdb by apache.

the class DatasetTupleTranslator method getMetadataEntityFromTuple.

@Override
public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
    byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
    ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
    DataInput in = new DataInputStream(stream);
    ARecord datasetRecord = recordSerDes.deserialize(in);
    return createDatasetFromARecord(datasetRecord);
}
Also used : DataInput(java.io.DataInput) ARecord(org.apache.asterix.om.base.ARecord) ByteArrayInputStream(java.io.ByteArrayInputStream) DataInputStream(java.io.DataInputStream)

Example 14 with ARecord

use of org.apache.asterix.om.base.ARecord in project asterixdb by apache.

the class DatasourceAdapterTupleTranslator method getMetadataEntityFromTuple.

@Override
public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, HyracksDataException {
    byte[] serRecord = tuple.getFieldData(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordStartOffset = tuple.getFieldStart(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordLength = tuple.getFieldLength(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
    ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
    DataInput in = new DataInputStream(stream);
    ARecord adapterRecord = recordSerDes.deserialize(in);
    return createAdapterFromARecord(adapterRecord);
}
Also used : DataInput(java.io.DataInput) ARecord(org.apache.asterix.om.base.ARecord) ByteArrayInputStream(java.io.ByteArrayInputStream) DataInputStream(java.io.DataInputStream)

Example 15 with ARecord

use of org.apache.asterix.om.base.ARecord in project asterixdb by apache.

the class AUnorderedListSerializerDeserializerTest method test.

@Test
public void test() {
    // Generates types.
    ARecordType addrRecordType = SerializerDeserializerTestUtils.generateAddressRecordType();
    ARecordType employeeType = SerializerDeserializerTestUtils.generateEmployeeRecordType(addrRecordType);
    AUnorderedListType employeeListType = new AUnorderedListType(employeeType, "employee_list");
    //Generates records.
    ARecord[] records = SerializerDeserializerTestUtils.generateRecords(addrRecordType, employeeType);
    // Generates lists
    AUnorderedList[] lists = new AUnorderedList[4];
    for (int index = 0; index < lists.length; ++index) {
        lists[index] = new AUnorderedList(employeeListType, Arrays.asList(records));
    }
    AUnorderedListSerializerDeserializer serde = new AUnorderedListSerializerDeserializer(employeeListType);
    // Run four test threads to serialize/deserialize lists concurrently.
    SerializerDeserializerTestUtils.concurrentSerDeTestRun(serde, lists);
}
Also used : ARecord(org.apache.asterix.om.base.ARecord) AUnorderedList(org.apache.asterix.om.base.AUnorderedList) ARecordType(org.apache.asterix.om.types.ARecordType) AUnorderedListType(org.apache.asterix.om.types.AUnorderedListType) Test(org.junit.Test)

Aggregations

ARecord (org.apache.asterix.om.base.ARecord)24 ByteArrayInputStream (java.io.ByteArrayInputStream)14 DataInput (java.io.DataInput)14 DataInputStream (java.io.DataInputStream)14 AString (org.apache.asterix.om.base.AString)9 IACursor (org.apache.asterix.om.base.IACursor)7 AUnorderedList (org.apache.asterix.om.base.AUnorderedList)6 ARecordType (org.apache.asterix.om.types.ARecordType)6 HashMap (java.util.HashMap)4 AMutableString (org.apache.asterix.om.base.AMutableString)4 AOrderedList (org.apache.asterix.om.base.AOrderedList)4 ArrayList (java.util.ArrayList)3 AOrderedListType (org.apache.asterix.om.types.AOrderedListType)3 IAType (org.apache.asterix.om.types.IAType)3 LinkedHashMap (java.util.LinkedHashMap)2 List (java.util.List)2 Dataset (org.apache.asterix.metadata.entities.Dataset)2 ABoolean (org.apache.asterix.om.base.ABoolean)2 AInt32 (org.apache.asterix.om.base.AInt32)2 AUnorderedListType (org.apache.asterix.om.types.AUnorderedListType)2