Search in sources :

Example 1 with AInt32

use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.

the class FuzzyUtils method getSimThreshold.

public static IAObject getSimThreshold(MetadataProvider metadata, String simFuncName) {
    String simThresholValue = metadata.getPropertyValue(SIM_THRESHOLD_PROP_NAME);
    IAObject ret = null;
    if (simFuncName.equals(JACCARD_FUNCTION_NAME)) {
        if (simThresholValue != null) {
            float jaccThresh = Float.parseFloat(simThresholValue);
            ret = new AFloat(jaccThresh);
        } else {
            ret = new AFloat(JACCARD_DEFAULT_SIM_THRESHOLD);
        }
    } else if (simFuncName.equals(EDIT_DISTANCE_FUNCTION_NAME)) {
        if (simThresholValue != null) {
            int edThresh = Integer.parseInt(simThresholValue);
            ret = new AInt32(edThresh);
        } else {
            ret = new AFloat(EDIT_DISTANCE_DEFAULT_SIM_THRESHOLD);
        }
    }
    return ret;
}
Also used : AFloat(org.apache.asterix.om.base.AFloat) IAObject(org.apache.asterix.om.base.IAObject) AInt32(org.apache.asterix.om.base.AInt32)

Example 2 with AInt32

use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.

the class DatasetTupleTranslator method createDatasetFromARecord.

protected Dataset createDatasetFromARecord(ARecord datasetRecord) throws HyracksDataException {
    String dataverseName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
    String datasetName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
    String typeName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPENAME_FIELD_INDEX)).getStringValue();
    String typeDataverseName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPEDATAVERSENAME_FIELD_INDEX)).getStringValue();
    DatasetType datasetType = DatasetType.valueOf(((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETTYPE_FIELD_INDEX)).getStringValue());
    IDatasetDetails datasetDetails = null;
    int datasetId = ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX)).getIntegerValue();
    int pendingOp = ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
    String nodeGroupName = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX)).getStringValue();
    String compactionPolicy = ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX)).getStringValue();
    IACursor cursor = ((AOrderedList) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX)).getCursor();
    Map<String, String> compactionPolicyProperties = new LinkedHashMap<>();
    String key;
    String value;
    while (cursor.next()) {
        ARecord field = (ARecord) cursor.get();
        key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
        value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
        compactionPolicyProperties.put(key, value);
    }
    switch(datasetType) {
        case INTERNAL:
            {
                ARecord datasetDetailsRecord = (ARecord) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX);
                FileStructure fileStructure = FileStructure.valueOf(((AString) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_FILESTRUCTURE_FIELD_INDEX)).getStringValue());
                PartitioningStrategy partitioningStrategy = PartitioningStrategy.valueOf(((AString) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX)).getStringValue());
                cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX)).getCursor();
                List<List<String>> partitioningKey = new ArrayList<>();
                List<IAType> partitioningKeyType = new ArrayList<>();
                AOrderedList fieldNameList;
                while (cursor.next()) {
                    fieldNameList = (AOrderedList) cursor.get();
                    IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
                    List<String> nestedFieldName = new ArrayList<>();
                    while (nestedFieldNameCursor.next()) {
                        nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
                    }
                    partitioningKey.add(nestedFieldName);
                    partitioningKeyType.add(BuiltinType.ASTRING);
                }
                boolean autogenerated = ((ABoolean) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_AUTOGENERATED_FIELD_INDEX)).getBoolean();
                // Check if there is a filter field.
                List<String> filterField = null;
                int filterFieldPos = datasetDetailsRecord.getType().getFieldIndex(InternalDatasetDetails.FILTER_FIELD_NAME);
                if (filterFieldPos >= 0) {
                    filterField = new ArrayList<>();
                    cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(filterFieldPos)).getCursor();
                    while (cursor.next()) {
                        filterField.add(((AString) cursor.get()).getStringValue());
                    }
                }
                // Read a field-source-indicator field.
                List<Integer> keyFieldSourceIndicator = new ArrayList<>();
                int keyFieldSourceIndicatorIndex = datasetDetailsRecord.getType().getFieldIndex(InternalDatasetDetails.KEY_FILD_SOURCE_INDICATOR_FIELD_NAME);
                if (keyFieldSourceIndicatorIndex >= 0) {
                    cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
                    while (cursor.next()) {
                        keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
                    }
                } else {
                    for (int index = 0; index < partitioningKey.size(); ++index) {
                        keyFieldSourceIndicator.add(0);
                    }
                }
                // Temporary dataset only lives in the compiler therefore the temp field is false.
                //  DatasetTupleTranslator always read from the metadata node, so the temp flag should be always false.
                datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey, partitioningKey, keyFieldSourceIndicator, partitioningKeyType, autogenerated, filterField, false);
                break;
            }
        case EXTERNAL:
            ARecord datasetDetailsRecord = (ARecord) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX);
            String adapter = ((AString) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX)).getStringValue();
            cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
            Map<String, String> properties = new HashMap<>();
            while (cursor.next()) {
                ARecord field = (ARecord) cursor.get();
                key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
                value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
                properties.put(key, value);
            }
            // Timestamp
            Date timestamp = new Date((((ADateTime) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_LAST_REFRESH_TIME_FIELD_INDEX))).getChrononTime());
            // State
            TransactionState state = TransactionState.values()[((AInt32) datasetDetailsRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX)).getIntegerValue()];
            datasetDetails = new ExternalDatasetDetails(adapter, properties, timestamp, state);
    }
    Map<String, String> hints = getDatasetHints(datasetRecord);
    String metaTypeDataverseName = null;
    String metaTypeName = null;
    int metaTypeDataverseNameIndex = datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
    if (metaTypeDataverseNameIndex >= 0) {
        metaTypeDataverseName = ((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex)).getStringValue();
        int metaTypeNameIndex = datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METATYPE_NAME);
        metaTypeName = ((AString) datasetRecord.getValueByPos(metaTypeNameIndex)).getStringValue();
    }
    // Read the rebalance count if there is one.
    int rebalanceCountIndex = datasetRecord.getType().getFieldIndex(REBALANCE_ID_FIELD_NAME);
    long rebalanceCount = rebalanceCountIndex >= 0 ? ((AInt64) datasetRecord.getValueByPos(rebalanceCountIndex)).getLongValue() : 0;
    return new Dataset(dataverseName, datasetName, typeDataverseName, typeName, metaTypeDataverseName, metaTypeName, nodeGroupName, compactionPolicy, compactionPolicyProperties, datasetDetails, hints, datasetType, datasetId, pendingOp, rebalanceCount);
}
Also used : TransactionState(org.apache.asterix.common.config.DatasetConfig.TransactionState) FileStructure(org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) ArrayList(java.util.ArrayList) ADateTime(org.apache.asterix.om.base.ADateTime) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) AMutableString(org.apache.asterix.om.base.AMutableString) AString(org.apache.asterix.om.base.AString) IACursor(org.apache.asterix.om.base.IACursor) IDatasetDetails(org.apache.asterix.metadata.IDatasetDetails) AInt32(org.apache.asterix.om.base.AInt32) Date(java.util.Date) LinkedHashMap(java.util.LinkedHashMap) ARecord(org.apache.asterix.om.base.ARecord) AOrderedList(org.apache.asterix.om.base.AOrderedList) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) PartitioningStrategy(org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy) List(java.util.List) AOrderedList(org.apache.asterix.om.base.AOrderedList) AUnorderedList(org.apache.asterix.om.base.AUnorderedList) ArrayList(java.util.ArrayList) AString(org.apache.asterix.om.base.AString)

Example 3 with AInt32

use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.

the class NonTaggedDataFormat method partitioningEvaluatorFactory.

@SuppressWarnings("unchecked")
@Override
public Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioningEvaluatorFactory(ARecordType recType, List<String> fldName) throws AlgebricksException {
    String[] names = recType.getFieldNames();
    int n = names.length;
    if (fldName.size() > 1) {
        for (int i = 0; i < n; i++) {
            if (names[i].equals(fldName.get(0))) {
                IScalarEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
                ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
                DataOutput dos = abvs.getDataOutput();
                try {
                    AInt32 ai = new AInt32(i);
                    SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos);
                } catch (HyracksDataException e) {
                    throw new AlgebricksException(e);
                }
                IScalarEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
                IScalarEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory, fldIndexEvalFactory, recType);
                IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX);
                ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
                return new Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory, partitionFun, recType.getFieldTypes()[i]);
            }
        }
    } else {
        IScalarEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
        ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
        DataOutput dos = abvs.getDataOutput();
        AOrderedList as = new AOrderedList(fldName);
        try {
            SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(as.getType()).serialize(as, dos);
        } catch (HyracksDataException e) {
            throw new AlgebricksException(e);
        }
        IScalarEvaluatorFactory evalFactory = new FieldAccessNestedEvalFactory(recordEvalFactory, recType, fldName);
        IFunctionInfo finfoAccess = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.FIELD_ACCESS_NESTED);
        ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(as))));
        return new Triple<IScalarEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory, partitionFun, recType.getSubFieldType(fldName));
    }
    throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
}
Also used : DataOutput(java.io.DataOutput) IFunctionInfo(org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo) ConstantEvalFactory(org.apache.hyracks.algebricks.runtime.evaluators.ConstantEvalFactory) ConstantExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) AString(org.apache.asterix.om.base.AString) AInt32(org.apache.asterix.om.base.AInt32) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) Triple(org.apache.hyracks.algebricks.common.utils.Triple) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) AOrderedList(org.apache.asterix.om.base.AOrderedList) AsterixConstantValue(org.apache.asterix.om.constants.AsterixConstantValue) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) ColumnAccessEvalFactory(org.apache.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory) FieldAccessByIndexEvalFactory(org.apache.asterix.runtime.evaluators.functions.records.FieldAccessByIndexEvalFactory) FieldAccessNestedEvalFactory(org.apache.asterix.runtime.evaluators.functions.records.FieldAccessNestedEvalFactory) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)

Example 4 with AInt32

use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.

the class IntroduceSecondaryIndexInsertDeleteRule method rewritePost.

@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
    AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
    if (op0.getOperatorTag() != LogicalOperatorTag.DELEGATE_OPERATOR && op0.getOperatorTag() != LogicalOperatorTag.SINK) {
        return false;
    }
    if (op0.getOperatorTag() == LogicalOperatorTag.DELEGATE_OPERATOR) {
        DelegateOperator eOp = (DelegateOperator) op0;
        if (!(eOp.getDelegate() instanceof CommitOperator)) {
            return false;
        }
    }
    AbstractLogicalOperator op1 = (AbstractLogicalOperator) op0.getInputs().get(0).getValue();
    if (op1.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) {
        return false;
    }
    /** find the record variable */
    InsertDeleteUpsertOperator primaryIndexModificationOp = (InsertDeleteUpsertOperator) op0.getInputs().get(0).getValue();
    boolean isBulkload = primaryIndexModificationOp.isBulkload();
    ILogicalExpression newRecordExpr = primaryIndexModificationOp.getPayloadExpression().getValue();
    List<Mutable<ILogicalExpression>> newMetaExprs = primaryIndexModificationOp.getAdditionalNonFilteringExpressions();
    LogicalVariable newRecordVar;
    LogicalVariable newMetaVar = null;
    /**
         * inputOp is the assign operator which extracts primary keys from the input
         * variables (record or meta)
         */
    AbstractLogicalOperator inputOp = (AbstractLogicalOperator) primaryIndexModificationOp.getInputs().get(0).getValue();
    newRecordVar = getRecordVar(context, inputOp, newRecordExpr, 0);
    if (newMetaExprs != null && !newMetaExprs.isEmpty()) {
        if (newMetaExprs.size() > 1) {
            throw new AlgebricksException("Number of meta records can't be more than 1. Number of meta records found = " + newMetaExprs.size());
        }
        newMetaVar = getRecordVar(context, inputOp, newMetaExprs.get(0).getValue(), 1);
    }
    /*
         * At this point, we have the record variable and the insert/delete/upsert operator
         * Note: We have two operators:
         * 1. An InsertDeleteOperator (primary)
         * 2. An IndexInsertDeleteOperator (secondary)
         * The current primaryIndexModificationOp is of the first type
         */
    DataSource datasetSource = (DataSource) primaryIndexModificationOp.getDataSource();
    MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
    String dataverseName = datasetSource.getId().getDataverseName();
    String datasetName = datasetSource.getId().getDatasourceName();
    Dataset dataset = mp.findDataset(dataverseName, datasetName);
    if (dataset == null) {
        throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
    }
    if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
        return false;
    }
    // Create operators for secondary index insert / delete.
    String itemTypeName = dataset.getItemTypeName();
    IAType itemType = mp.findType(dataset.getItemTypeDataverseName(), itemTypeName);
    if (itemType.getTypeTag() != ATypeTag.OBJECT) {
        throw new AlgebricksException("Only record types can be indexed.");
    }
    ARecordType recType = (ARecordType) itemType;
    // meta type
    ARecordType metaType = null;
    if (dataset.hasMetaPart()) {
        metaType = (ARecordType) mp.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
    }
    List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
    // Set the top operator pointer to the primary IndexInsertDeleteOperator
    ILogicalOperator currentTop = primaryIndexModificationOp;
    boolean hasSecondaryIndex = false;
    // Put an n-gram or a keyword index in the later stage of index-update,
    // since TokenizeOperator needs to be involved.
    Collections.sort(indexes, (o1, o2) -> o1.getIndexType().ordinal() - o2.getIndexType().ordinal());
    // At this point, we have the data type info, and the indexes info as well
    int secondaryIndexTotalCnt = indexes.size() - 1;
    if (secondaryIndexTotalCnt > 0) {
        op0.getInputs().clear();
    } else {
        return false;
    }
    // Initialize inputs to the SINK operator Op0 (The SINK) is now without input
    // Prepare filtering field information (This is the filter created using the "filter with" key word in the
    // create dataset ddl)
    List<String> filteringFields = ((InternalDatasetDetails) dataset.getDatasetDetails()).getFilterField();
    List<LogicalVariable> filteringVars;
    List<Mutable<ILogicalExpression>> filteringExpressions = null;
    if (filteringFields != null) {
        // The filter field var already exists. we can simply get it from the insert op
        filteringVars = new ArrayList<>();
        filteringExpressions = new ArrayList<>();
        for (Mutable<ILogicalExpression> filteringExpression : primaryIndexModificationOp.getAdditionalFilteringExpressions()) {
            filteringExpression.getValue().getUsedVariables(filteringVars);
            for (LogicalVariable var : filteringVars) {
                filteringExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
            }
        }
    }
    // Replicate Operator is applied only when doing the bulk-load.
    ReplicateOperator replicateOp = null;
    if (secondaryIndexTotalCnt > 1 && primaryIndexModificationOp.isBulkload()) {
        // Split the logical plan into "each secondary index update branch"
        // to replicate each <PK,OBJECT> pair.
        replicateOp = new ReplicateOperator(secondaryIndexTotalCnt);
        replicateOp.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
        replicateOp.setExecutionMode(ExecutionMode.PARTITIONED);
        context.computeAndSetTypeEnvironmentForOperator(replicateOp);
        currentTop = replicateOp;
    }
    /*
         * The two maps are used to store variables to which [casted] field access is assigned.
         * One for the beforeOp record and the other for the new record.
         * There are two uses for these maps:
         * 1. used for shared fields in indexes with overlapping keys.
         * 2. used for setting variables of secondary keys for each secondary index operator.
         */
    Map<IndexFieldId, LogicalVariable> fieldVarsForBeforeOperation = new HashMap<>();
    Map<IndexFieldId, LogicalVariable> fieldVarsForNewRecord = new HashMap<>();
    /*
         * if the index is enforcing field types (For open indexes), We add a cast
         * operator to ensure type safety
         */
    try {
        if (primaryIndexModificationOp.getOperation() == Kind.INSERT || primaryIndexModificationOp.getOperation() == Kind.UPSERT || /* Actually, delete should not be here but it is now until issue
                     * https://issues.apache.org/jira/browse/ASTERIXDB-1507
                     * is solved
                     */
        primaryIndexModificationOp.getOperation() == Kind.DELETE) {
            injectFieldAccessesForIndexes(context, dataset, indexes, fieldVarsForNewRecord, recType, metaType, newRecordVar, newMetaVar, primaryIndexModificationOp, false);
            if (replicateOp != null) {
                context.computeAndSetTypeEnvironmentForOperator(replicateOp);
            }
        }
        if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) /* Actually, delete should be here but it is not until issue
             * https://issues.apache.org/jira/browse/ASTERIXDB-1507
             * is solved
             */
        {
            List<LogicalVariable> beforeOpMetaVars = primaryIndexModificationOp.getBeforeOpAdditionalNonFilteringVars();
            LogicalVariable beforeOpMetaVar = beforeOpMetaVars == null ? null : beforeOpMetaVars.get(0);
            currentTop = injectFieldAccessesForIndexes(context, dataset, indexes, fieldVarsForBeforeOperation, recType, metaType, primaryIndexModificationOp.getBeforeOpRecordVar(), beforeOpMetaVar, currentTop, true);
        }
    } catch (AsterixException e) {
        throw new AlgebricksException(e);
    }
    // At first, op1 is the index insert op insertOp
    for (Index index : indexes) {
        if (!index.isSecondaryIndex()) {
            continue;
        }
        hasSecondaryIndex = true;
        // Get the secondary fields names and types
        List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
        List<LogicalVariable> secondaryKeyVars = new ArrayList<>();
        List<Mutable<ILogicalExpression>> secondaryExpressions = new ArrayList<>();
        List<Mutable<ILogicalExpression>> beforeOpSecondaryExpressions = new ArrayList<>();
        ILogicalOperator replicateOutput;
        for (int i = 0; i < secondaryKeyFields.size(); i++) {
            IndexFieldId indexFieldId = new IndexFieldId(index.getKeyFieldSourceIndicators().get(i), secondaryKeyFields.get(i));
            LogicalVariable skVar = fieldVarsForNewRecord.get(indexFieldId);
            secondaryKeyVars.add(skVar);
            secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(skVar)));
            if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
                beforeOpSecondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(fieldVarsForBeforeOperation.get(indexFieldId))));
            }
        }
        IndexInsertDeleteUpsertOperator indexUpdate;
        if (index.getIndexType() != IndexType.RTREE) {
            // Create an expression per key
            Mutable<ILogicalExpression> filterExpression = (primaryIndexModificationOp.getOperation() == Kind.UPSERT) ? null : createFilterExpression(secondaryKeyVars, context.getOutputTypeEnvironment(currentTop), index.isEnforcingKeyFileds());
            DataSourceIndex dataSourceIndex = new DataSourceIndex(index, dataverseName, datasetName, mp);
            // and index type is keyword or n-gram.
            if (index.getIndexType() != IndexType.BTREE && primaryIndexModificationOp.isBulkload()) {
                // Note: Bulk load case, we don't need to take care of it for upsert operation
                // Check whether the index is length-partitioned or not.
                // If partitioned, [input variables to TokenizeOperator,
                // token, number of token] pairs will be generated and
                // fed into the IndexInsertDeleteOperator.
                // If not, [input variables, token] pairs will be generated
                // and fed into the IndexInsertDeleteOperator.
                // Input variables are passed since TokenizeOperator is not an
                // filtering operator.
                boolean isPartitioned = index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX;
                // Create a new logical variable - token
                List<LogicalVariable> tokenizeKeyVars = new ArrayList<>();
                List<Mutable<ILogicalExpression>> tokenizeKeyExprs = new ArrayList<>();
                LogicalVariable tokenVar = context.newVar();
                tokenizeKeyVars.add(tokenVar);
                tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(tokenVar)));
                // Check the field type of the secondary key.
                IAType secondaryKeyType;
                Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), recType);
                secondaryKeyType = keyPairType.first;
                List<Object> varTypes = new ArrayList<>();
                varTypes.add(NonTaggedFormatUtil.getTokenType(secondaryKeyType));
                // The type is short, and this does not contain type info.
                if (isPartitioned) {
                    LogicalVariable lengthVar = context.newVar();
                    tokenizeKeyVars.add(lengthVar);
                    tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(lengthVar)));
                    varTypes.add(BuiltinType.SHORTWITHOUTTYPEINFO);
                }
                // TokenizeOperator to tokenize [SK, PK] pairs
                TokenizeOperator tokenUpdate = new TokenizeOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), secondaryExpressions, tokenizeKeyVars, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), isPartitioned, varTypes);
                tokenUpdate.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
                context.computeAndSetTypeEnvironmentForOperator(tokenUpdate);
                replicateOutput = tokenUpdate;
                indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), tokenizeKeyExprs, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), primaryIndexModificationOp.getAdditionalNonFilteringExpressions() == null ? 0 : primaryIndexModificationOp.getAdditionalNonFilteringExpressions().size());
                indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
                indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(tokenUpdate));
            } else {
                // When TokenizeOperator is not needed
                indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), primaryIndexModificationOp.getAdditionalNonFilteringExpressions() == null ? 0 : primaryIndexModificationOp.getAdditionalNonFilteringExpressions().size());
                indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
                replicateOutput = indexUpdate;
                // We add the necessary expressions for upsert
                if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
                    indexUpdate.setBeforeOpSecondaryKeyExprs(beforeOpSecondaryExpressions);
                    if (filteringFields != null) {
                        indexUpdate.setBeforeOpAdditionalFilteringExpression(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(primaryIndexModificationOp.getBeforeOpFilterVar())));
                    }
                }
                indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
            }
        } else {
            // Get type, dimensions and number of keys
            Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), recType);
            IAType spatialType = keyPairType.first;
            boolean isPointMBR = spatialType.getTypeTag() == ATypeTag.POINT || spatialType.getTypeTag() == ATypeTag.POINT3D;
            int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
            int numKeys = (isPointMBR && isBulkload) ? dimension : dimension * 2;
            // Get variables and expressions
            List<LogicalVariable> keyVarList = new ArrayList<>();
            List<Mutable<ILogicalExpression>> keyExprList = new ArrayList<>();
            for (int i = 0; i < numKeys; i++) {
                LogicalVariable keyVar = context.newVar();
                keyVarList.add(keyVar);
                AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
                createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVars.get(0))));
                createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(dimension)))));
                createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
                keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
            }
            secondaryExpressions.clear();
            for (LogicalVariable secondaryKeyVar : keyVarList) {
                secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
            }
            if (isPointMBR && isBulkload) {
                //createFieldPermutationForBulkLoadOp(int) for more details.
                for (LogicalVariable secondaryKeyVar : keyVarList) {
                    secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
                }
            }
            AssignOperator assignCoordinates = new AssignOperator(keyVarList, keyExprList);
            assignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
            context.computeAndSetTypeEnvironmentForOperator(assignCoordinates);
            replicateOutput = assignCoordinates;
            Mutable<ILogicalExpression> filterExpression = null;
            AssignOperator originalAssignCoordinates = null;
            // We do something similar for beforeOp key if the operation is an upsert
            if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
                List<LogicalVariable> originalKeyVarList = new ArrayList<>();
                List<Mutable<ILogicalExpression>> originalKeyExprList = new ArrayList<>();
                // we don't do any filtering since nulls are expected here and there
                for (int i = 0; i < numKeys; i++) {
                    LogicalVariable keyVar = context.newVar();
                    originalKeyVarList.add(keyVar);
                    AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
                    createMBR.getArguments().add(beforeOpSecondaryExpressions.get(0));
                    createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(dimension)))));
                    createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
                    originalKeyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
                }
                beforeOpSecondaryExpressions.clear();
                for (LogicalVariable secondaryKeyVar : originalKeyVarList) {
                    beforeOpSecondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
                }
                originalAssignCoordinates = new AssignOperator(originalKeyVarList, originalKeyExprList);
                originalAssignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
                context.computeAndSetTypeEnvironmentForOperator(originalAssignCoordinates);
            } else {
                // We must enforce the filter if the originating spatial type is
                // nullable.
                boolean forceFilter = keyPairType.second;
                filterExpression = createFilterExpression(keyVarList, context.getOutputTypeEnvironment(assignCoordinates), forceFilter);
            }
            DataSourceIndex dataSourceIndex = new DataSourceIndex(index, dataverseName, datasetName, mp);
            indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex, primaryIndexModificationOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression, primaryIndexModificationOp.getOperation(), primaryIndexModificationOp.isBulkload(), primaryIndexModificationOp.getAdditionalNonFilteringExpressions() == null ? 0 : primaryIndexModificationOp.getAdditionalNonFilteringExpressions().size());
            indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
            if (primaryIndexModificationOp.getOperation() == Kind.UPSERT) {
                // set before op secondary key expressions
                if (filteringFields != null) {
                    indexUpdate.setBeforeOpAdditionalFilteringExpression(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(primaryIndexModificationOp.getBeforeOpFilterVar())));
                }
                // set filtering expressions
                indexUpdate.setBeforeOpSecondaryKeyExprs(beforeOpSecondaryExpressions);
                // assign --> assign beforeOp values --> secondary index upsert
                indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(originalAssignCoordinates));
            } else {
                indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
            }
        }
        context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
        if (!primaryIndexModificationOp.isBulkload() || secondaryIndexTotalCnt == 1) {
            currentTop = indexUpdate;
        } else {
            replicateOp.getOutputs().add(new MutableObject<>(replicateOutput));
        }
        if (primaryIndexModificationOp.isBulkload()) {
            // For bulk load, we connect all fanned out insert operator to a single SINK operator
            op0.getInputs().add(new MutableObject<ILogicalOperator>(indexUpdate));
        }
    }
    if (!hasSecondaryIndex) {
        return false;
    }
    if (!primaryIndexModificationOp.isBulkload()) {
        // If this is an upsert, we need to
        // Remove the current input to the SINK operator (It is actually already removed above)
        op0.getInputs().clear();
        // Connect the last index update to the SINK
        op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
    }
    return true;
}
Also used : HashMap(java.util.HashMap) ConstantExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression) ArrayList(java.util.ArrayList) Index(org.apache.asterix.metadata.entities.Index) DataSourceIndex(org.apache.asterix.metadata.declared.DataSourceIndex) AString(org.apache.asterix.om.base.AString) AsterixException(org.apache.asterix.common.exceptions.AsterixException) TokenizeOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator) AsterixConstantValue(org.apache.asterix.om.constants.AsterixConstantValue) DelegateOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator) List(java.util.List) AOrderedList(org.apache.asterix.om.base.AOrderedList) ArrayList(java.util.ArrayList) CommitOperator(org.apache.asterix.algebra.operators.CommitOperator) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression) ReplicateOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) DataSource(org.apache.asterix.metadata.declared.DataSource) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) MutableObject(org.apache.commons.lang3.mutable.MutableObject) IAObject(org.apache.asterix.om.base.IAObject) IAType(org.apache.asterix.om.types.IAType) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) Dataset(org.apache.asterix.metadata.entities.Dataset) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) DataSourceIndex(org.apache.asterix.metadata.declared.DataSourceIndex) IndexInsertDeleteUpsertOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator) AInt32(org.apache.asterix.om.base.AInt32) Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) IndexInsertDeleteUpsertOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator) InsertDeleteUpsertOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator) ARecordType(org.apache.asterix.om.types.ARecordType)

Example 5 with AInt32

use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.

the class PushFieldAccessRule method propagateFieldAccessRec.

@SuppressWarnings("unchecked")
private boolean propagateFieldAccessRec(Mutable<ILogicalOperator> opRef, IOptimizationContext context, String finalAnnot) throws AlgebricksException {
    AssignOperator access = (AssignOperator) opRef.getValue();
    Mutable<ILogicalOperator> opRef2 = access.getInputs().get(0);
    AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
    // rewritten into index search.
    if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT || context.checkAndAddToAlreadyCompared(access, op2) && !(op2.getOperatorTag() == LogicalOperatorTag.SELECT && isAccessToIndexedField(access, context))) {
        return false;
    }
    Object annotation = op2.getAnnotations().get(IS_MOVABLE);
    if (annotation != null && !((Boolean) annotation)) {
        return false;
    }
    if (tryingToPushThroughSelectionWithSameDataSource(access, op2)) {
        return false;
    }
    if (testAndModifyRedundantOp(access, op2)) {
        propagateFieldAccessRec(opRef2, context, finalAnnot);
        return true;
    }
    List<LogicalVariable> usedInAccess = new LinkedList<>();
    VariableUtilities.getUsedVariables(access, usedInAccess);
    List<LogicalVariable> produced2 = new LinkedList<>();
    if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
        VariableUtilities.getLiveVariables(op2, produced2);
    } else {
        VariableUtilities.getProducedVariables(op2, produced2);
    }
    boolean pushItDown = false;
    List<LogicalVariable> inter = new ArrayList<>(usedInAccess);
    if (inter.isEmpty()) {
        // ground value
        return false;
    }
    inter.retainAll(produced2);
    if (inter.isEmpty()) {
        pushItDown = true;
    } else if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
        GroupByOperator g = (GroupByOperator) op2;
        List<Pair<LogicalVariable, LogicalVariable>> varMappings = new ArrayList<>();
        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : g.getDecorList()) {
            ILogicalExpression e = p.second.getValue();
            if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
                LogicalVariable decorVar = GroupByOperator.getDecorVariable(p);
                if (inter.contains(decorVar)) {
                    inter.remove(decorVar);
                    LogicalVariable v1 = ((VariableReferenceExpression) e).getVariableReference();
                    varMappings.add(new Pair<>(decorVar, v1));
                }
            }
        }
        if (inter.isEmpty()) {
            boolean changed = false;
            for (Pair<LogicalVariable, LogicalVariable> m : varMappings) {
                LogicalVariable v2 = context.newVar();
                LogicalVariable oldVar = access.getVariables().get(0);
                g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(oldVar, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v2))));
                changed = true;
                access.getVariables().set(0, v2);
                VariableUtilities.substituteVariables(access, m.first, m.second, context);
            }
            if (changed) {
                context.computeAndSetTypeEnvironmentForOperator(g);
            }
            usedInAccess.clear();
            VariableUtilities.getUsedVariables(access, usedInAccess);
            pushItDown = true;
        }
    }
    if (pushItDown) {
        if (op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
            Mutable<ILogicalOperator> childOfSubplan = ((NestedTupleSourceOperator) op2).getDataSourceReference().getValue().getInputs().get(0);
            pushAccessDown(opRef, op2, childOfSubplan, context, finalAnnot);
            return true;
        }
        if (op2.getInputs().size() == 1 && !op2.hasNestedPlans()) {
            pushAccessDown(opRef, op2, op2.getInputs().get(0), context, finalAnnot);
            return true;
        } else {
            for (Mutable<ILogicalOperator> inp : op2.getInputs()) {
                HashSet<LogicalVariable> v2 = new HashSet<>();
                VariableUtilities.getLiveVariables(inp.getValue(), v2);
                if (v2.containsAll(usedInAccess)) {
                    pushAccessDown(opRef, op2, inp, context, finalAnnot);
                    return true;
                }
            }
        }
        if (op2.hasNestedPlans()) {
            AbstractOperatorWithNestedPlans nestedOp = (AbstractOperatorWithNestedPlans) op2;
            for (ILogicalPlan plan : nestedOp.getNestedPlans()) {
                for (Mutable<ILogicalOperator> root : plan.getRoots()) {
                    HashSet<LogicalVariable> v2 = new HashSet<>();
                    VariableUtilities.getLiveVariables(root.getValue(), v2);
                    if (v2.containsAll(usedInAccess)) {
                        pushAccessDown(opRef, op2, root, context, finalAnnot);
                        return true;
                    }
                }
            }
        }
        throw new AlgebricksException("Field access " + access.getExpressions().get(0).getValue() + " does not correspond to any input of operator " + op2);
    } else {
        // fields. If yes, we can equate the two variables.
        if (op2.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
            DataSourceScanOperator scan = (DataSourceScanOperator) op2;
            int n = scan.getVariables().size();
            LogicalVariable scanRecordVar = scan.getVariables().get(n - 1);
            AbstractFunctionCallExpression accessFun = (AbstractFunctionCallExpression) access.getExpressions().get(0).getValue();
            ILogicalExpression e0 = accessFun.getArguments().get(0).getValue();
            LogicalExpressionTag tag = e0.getExpressionTag();
            if (tag == LogicalExpressionTag.VARIABLE) {
                VariableReferenceExpression varRef = (VariableReferenceExpression) e0;
                if (varRef.getVariableReference() == scanRecordVar) {
                    ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
                    if (e1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
                        IDataSource<DataSourceId> dataSource = (IDataSource<DataSourceId>) scan.getDataSource();
                        byte dsType = ((DataSource) dataSource).getDatasourceType();
                        if (dsType == DataSource.Type.FEED || dsType == DataSource.Type.LOADABLE) {
                            return false;
                        }
                        DataSourceId asid = dataSource.getId();
                        MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
                        Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
                        if (dataset == null) {
                            throw new AlgebricksException("Dataset " + asid.getDatasourceName() + " not found.");
                        }
                        if (dataset.getDatasetType() != DatasetType.INTERNAL) {
                            setAsFinal(access, context, finalAnnot);
                            return false;
                        }
                        ConstantExpression ce = (ConstantExpression) e1;
                        IAObject obj = ((AsterixConstantValue) ce.getValue()).getObject();
                        String fldName;
                        if (obj.getType().getTypeTag() == ATypeTag.STRING) {
                            fldName = ((AString) obj).getStringValue();
                        } else {
                            int pos = ((AInt32) obj).getIntegerValue();
                            String tName = dataset.getItemTypeName();
                            IAType t = mp.findType(dataset.getItemTypeDataverseName(), tName);
                            if (t.getTypeTag() != ATypeTag.OBJECT) {
                                return false;
                            }
                            ARecordType rt = (ARecordType) t;
                            if (pos >= rt.getFieldNames().length) {
                                setAsFinal(access, context, finalAnnot);
                                return false;
                            }
                            fldName = rt.getFieldNames()[pos];
                        }
                        int p = DatasetUtil.getPositionOfPartitioningKeyField(dataset, fldName);
                        if (p < 0) {
                            // not one of the partitioning fields
                            setAsFinal(access, context, finalAnnot);
                            return false;
                        }
                        LogicalVariable keyVar = scan.getVariables().get(p);
                        access.getExpressions().get(0).setValue(new VariableReferenceExpression(keyVar));
                        return true;
                    }
                }
            }
        }
        setAsFinal(access, context, finalAnnot);
        return false;
    }
}
Also used : ConstantExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression) ArrayList(java.util.ArrayList) AString(org.apache.asterix.om.base.AString) DataSourceScanOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator) AsterixConstantValue(org.apache.asterix.om.constants.AsterixConstantValue) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) List(java.util.List) Pair(org.apache.hyracks.algebricks.common.utils.Pair) HashSet(java.util.HashSet) AbstractOperatorWithNestedPlans(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans) LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) GroupByOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) Dataset(org.apache.asterix.metadata.entities.Dataset) ILogicalOperator(org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) IAObject(org.apache.asterix.om.base.IAObject) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) AssignOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator) LinkedList(java.util.LinkedList) AInt32(org.apache.asterix.om.base.AInt32) IDataSource(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSource) IDataSource(org.apache.hyracks.algebricks.core.algebra.metadata.IDataSource) DataSource(org.apache.asterix.metadata.declared.DataSource) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) LogicalExpressionTag(org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag) ILogicalPlan(org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan) IAObject(org.apache.asterix.om.base.IAObject) MutableObject(org.apache.commons.lang3.mutable.MutableObject) ARecordType(org.apache.asterix.om.types.ARecordType) DataSourceId(org.apache.asterix.metadata.declared.DataSourceId) IAType(org.apache.asterix.om.types.IAType)

Aggregations

AInt32 (org.apache.asterix.om.base.AInt32)24 AsterixConstantValue (org.apache.asterix.om.constants.AsterixConstantValue)15 ConstantExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression)14 AString (org.apache.asterix.om.base.AString)13 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)13 ArrayList (java.util.ArrayList)11 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)10 ScalarFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression)10 VariableReferenceExpression (org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression)10 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)9 Mutable (org.apache.commons.lang3.mutable.Mutable)8 List (java.util.List)7 AOrderedList (org.apache.asterix.om.base.AOrderedList)6 MutableObject (org.apache.commons.lang3.mutable.MutableObject)6 ILogicalOperator (org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator)6 AssignOperator (org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator)6 Dataset (org.apache.asterix.metadata.entities.Dataset)5 IAObject (org.apache.asterix.om.base.IAObject)5 ARecordType (org.apache.asterix.om.types.ARecordType)5 IAType (org.apache.asterix.om.types.IAType)5