Search in sources :

Example 26 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class IntroduceDynamicTypeCastForExternalFunctionRule method rewriteFunctionArgs.

private boolean rewriteFunctionArgs(ILogicalOperator op, Mutable<ILogicalExpression> expRef, IOptimizationContext context) throws AlgebricksException {
    ILogicalExpression expr = expRef.getValue();
    if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL || !(expr instanceof ScalarFunctionCallExpression)) {
        return false;
    }
    ScalarFunctionCallExpression funcCallExpr = (ScalarFunctionCallExpression) expr;
    boolean changed = false;
    IAType inputRecordType;
    ARecordType requiredRecordType;
    for (int iter1 = 0; iter1 < funcCallExpr.getArguments().size(); iter1++) {
        inputRecordType = (IAType) op.computeOutputTypeEnvironment(context).getType(funcCallExpr.getArguments().get(iter1).getValue());
        if (!(((ExternalScalarFunctionInfo) funcCallExpr.getFunctionInfo()).getArgumenTypes().get(iter1) instanceof ARecordType)) {
            continue;
        }
        requiredRecordType = (ARecordType) ((ExternalScalarFunctionInfo) funcCallExpr.getFunctionInfo()).getArgumenTypes().get(iter1);
        /**
             * the input record type can be an union type
             * for the case when it comes from a subplan or left-outer join
             */
        boolean checkUnknown = false;
        while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
            /** while-loop for the case there is a nested multi-level union */
            inputRecordType = ((AUnionType) inputRecordType).getActualType();
            checkUnknown = true;
        }
        boolean castFlag = !IntroduceDynamicTypeCastRule.compatible(requiredRecordType, inputRecordType);
        if (castFlag || checkUnknown) {
            AbstractFunctionCallExpression castFunc = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CAST_TYPE));
            castFunc.getArguments().add(funcCallExpr.getArguments().get(iter1));
            TypeCastUtils.setRequiredAndInputTypes(castFunc, requiredRecordType, inputRecordType);
            funcCallExpr.getArguments().set(iter1, new MutableObject<>(castFunc));
            changed = changed || true;
        }
    }
    return changed;
}
Also used : ExternalScalarFunctionInfo(org.apache.asterix.metadata.functions.ExternalScalarFunctionInfo) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) ARecordType(org.apache.asterix.om.types.ARecordType) ScalarFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression) IAType(org.apache.asterix.om.types.IAType)

Example 27 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class IntroduceDynamicTypeCastRule method rewritePost.

@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
    // Depending on the operator type, we need to extract the following pieces of information.
    AbstractLogicalOperator op;
    ARecordType requiredRecordType;
    LogicalVariable recordVar;
    // We identify INSERT and DISTRIBUTE_RESULT operators.
    AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
    switch(op1.getOperatorTag()) {
        case SINK:
        case DELEGATE_OPERATOR:
            {
                /**
                 * pattern match: commit insert assign
                 * resulting plan: commit-insert-project-assign
                 */
                if (op1.getOperatorTag() == LogicalOperatorTag.DELEGATE_OPERATOR) {
                    DelegateOperator eOp = (DelegateOperator) op1;
                    if (!(eOp.getDelegate() instanceof CommitOperator)) {
                        return false;
                    }
                }
                AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
                if (op2.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE_UPSERT) {
                    InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op2;
                    if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE) {
                        return false;
                    }
                    // Remember this is the operator we need to modify
                    op = insertDeleteOp;
                    // Derive the required ARecordType based on the schema of the DataSource
                    InsertDeleteUpsertOperator insertDeleteOperator = (InsertDeleteUpsertOperator) op2;
                    DataSource dataSource = (DataSource) insertDeleteOperator.getDataSource();
                    requiredRecordType = (ARecordType) dataSource.getItemType();
                    // Derive the Variable which we will potentially wrap with cast/null functions
                    ILogicalExpression expr = insertDeleteOperator.getPayloadExpression().getValue();
                    List<LogicalVariable> payloadVars = new ArrayList<>();
                    expr.getUsedVariables(payloadVars);
                    recordVar = payloadVars.get(0);
                } else {
                    return false;
                }
                break;
            }
        case DISTRIBUTE_RESULT:
            {
                // First, see if there was an output-record-type specified
                requiredRecordType = (ARecordType) op1.getAnnotations().get("output-record-type");
                if (requiredRecordType == null) {
                    return false;
                }
                // Remember this is the operator we need to modify
                op = op1;
                recordVar = ((VariableReferenceExpression) ((DistributeResultOperator) op).getExpressions().get(0).getValue()).getVariableReference();
                break;
            }
        default:
            {
                return false;
            }
    }
    // Derive the statically-computed type of the record
    IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
    IAType inputRecordType = (IAType) env.getVarType(recordVar);
    /** the input record type can be an union type -- for the case when it comes from a subplan or left-outer join */
    boolean checkUnknown = false;
    while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
        /** while-loop for the case there is a nested multi-level union */
        inputRecordType = ((AUnionType) inputRecordType).getActualType();
        checkUnknown = true;
    }
    /** see whether the input record type needs to be casted */
    boolean cast = !compatible(requiredRecordType, inputRecordType);
    if (checkUnknown) {
        recordVar = addWrapperFunction(requiredRecordType, recordVar, op, context, BuiltinFunctions.CHECK_UNKNOWN);
    }
    if (cast) {
        addWrapperFunction(requiredRecordType, recordVar, op, context, BuiltinFunctions.CAST_TYPE);
    }
    return cast || checkUnknown;
}
Also used : LogicalVariable(org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable) AbstractLogicalOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator) DataSource(org.apache.asterix.metadata.declared.DataSource) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) InsertDeleteUpsertOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator) DelegateOperator(org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator) VariableReferenceExpression(org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression) ArrayList(java.util.ArrayList) List(java.util.List) ARecordType(org.apache.asterix.om.types.ARecordType) CommitOperator(org.apache.asterix.algebra.operators.CommitOperator) IVariableTypeEnvironment(org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment) IAType(org.apache.asterix.om.types.IAType)

Example 28 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class ConnectorApiServletTest method testGet.

@Test
public void testGet() throws Exception {
    // Starts test asterixdb cluster.
    SqlppExecutionTest.setUp();
    // Configures a test connector api servlet.
    ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" }, (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
    Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    PrintWriter outputWriter = new PrintWriter(outputStream);
    // Creates mocks.
    IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
    NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
    NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
    IServletRequest mockRequest = mock(IServletRequest.class);
    IServletResponse mockResponse = mock(IServletResponse.class);
    FullHttpRequest mockHttpRequest = mock(FullHttpRequest.class);
    // Put stuff in let map
    let.ctx().put(ServletConstants.HYRACKS_CONNECTION_ATTR, mockHcc);
    // Sets up mock returns.
    when(mockRequest.getHttpRequest()).thenReturn(mockHttpRequest);
    when(mockHttpRequest.method()).thenReturn(HttpMethod.GET);
    when(mockRequest.getParameter("dataverseName")).thenReturn("Metadata");
    when(mockRequest.getParameter("datasetName")).thenReturn("Dataset");
    when(mockResponse.writer()).thenReturn(outputWriter);
    when(mockHcc.getNodeControllerInfos()).thenReturn(nodeMap);
    when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
    when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
    // Calls ConnectorAPIServlet.formResponseObject.
    nodeMap.put("asterix_nc1", mockInfo1);
    nodeMap.put("asterix_nc2", mockInfo2);
    let.handle(mockRequest, mockResponse);
    // Constructs the actual response.
    ObjectMapper om = new ObjectMapper();
    ObjectNode actualResponse = (ObjectNode) om.readTree(outputStream.toString());
    // Checks the temp-or-not, primary key, data type of the dataset.
    boolean temp = actualResponse.get("temp").asBoolean();
    Assert.assertFalse(temp);
    String primaryKey = actualResponse.get("keys").asText();
    Assert.assertEquals("DataverseName,DatasetName", primaryKey);
    ARecordType recordType = (ARecordType) JSONDeserializerForTypes.convertFromJSON(actualResponse.get("type"));
    Assert.assertEquals(getMetadataRecordType("Metadata", "Dataset"), recordType);
    // Checks the correctness of results.
    ArrayNode splits = (ArrayNode) actualResponse.get("splits");
    String path = (splits.get(0)).get("path").asText();
    Assert.assertTrue(path.endsWith("Metadata/Dataset_idx_Dataset"));
    // Tears down the asterixdb cluster.
    SqlppExecutionTest.tearDown();
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) FullHttpRequest(io.netty.handler.codec.http.FullHttpRequest) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IServletRequest(org.apache.hyracks.http.api.IServletRequest) NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) ConnectorApiServlet(org.apache.asterix.api.http.server.ConnectorApiServlet) IServletResponse(org.apache.hyracks.http.api.IServletResponse) ARecordType(org.apache.asterix.om.types.ARecordType) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) PrintWriter(java.io.PrintWriter) Test(org.junit.Test) SqlppExecutionTest(org.apache.asterix.test.runtime.SqlppExecutionTest)

Example 29 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class ConnectorApiServletTest method getMetadataRecordType.

private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception {
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    // Retrieves file splits of the dataset.
    MetadataProvider metadataProvider = new MetadataProvider((ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null, new StorageComponentProvider());
    try {
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
        ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
        // Metadata transaction commits.
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        return recordType;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : MetadataProvider(org.apache.asterix.metadata.declared.MetadataProvider) Dataset(org.apache.asterix.metadata.entities.Dataset) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) StorageComponentProvider(org.apache.asterix.file.StorageComponentProvider) ARecordType(org.apache.asterix.om.types.ARecordType)

Example 30 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class IndexTupleTranslator method getMetadataEntityFromTuple.

@Override
public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
    byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
    int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
    ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
    DataInput in = new DataInputStream(stream);
    ARecord rec = recordSerde.deserialize(in);
    String dvName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
    String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
    String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
    IndexType indexStructure = IndexType.valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX)).getStringValue());
    IACursor fieldNameCursor = ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
    List<List<String>> searchKey = new ArrayList<>();
    AOrderedList fieldNameList;
    while (fieldNameCursor.next()) {
        fieldNameList = (AOrderedList) fieldNameCursor.get();
        IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
        List<String> nestedFieldName = new ArrayList<>();
        while (nestedFieldNameCursor.next()) {
            nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
        }
        searchKey.add(nestedFieldName);
    }
    int indexKeyTypeFieldPos = rec.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
    IACursor fieldTypeCursor = new ACollectionCursor();
    if (indexKeyTypeFieldPos > 0) {
        fieldTypeCursor = ((AOrderedList) rec.getValueByPos(indexKeyTypeFieldPos)).getCursor();
    }
    List<IAType> searchKeyType = new ArrayList<>(searchKey.size());
    while (fieldTypeCursor.next()) {
        String typeName = ((AString) fieldTypeCursor.get()).getStringValue();
        IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dvName, typeName, false);
        searchKeyType.add(fieldType);
    }
    int isEnforcedFieldPos = rec.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
    Boolean isEnforcingKeys = false;
    if (isEnforcedFieldPos > 0) {
        isEnforcingKeys = ((ABoolean) rec.getValueByPos(isEnforcedFieldPos)).getBoolean();
    }
    Boolean isPrimaryIndex = ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
    int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
    // Check if there is a gram length as well.
    int gramLength = -1;
    int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
    if (gramLenPos >= 0) {
        gramLength = ((AInt32) rec.getValueByPos(gramLenPos)).getIntegerValue();
    }
    // Read a field-source-indicator field.
    List<Integer> keyFieldSourceIndicator = new ArrayList<>();
    int keyFieldSourceIndicatorIndex = rec.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
    if (keyFieldSourceIndicatorIndex >= 0) {
        IACursor cursor = ((AOrderedList) rec.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
        while (cursor.next()) {
            keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
        }
    } else {
        for (int index = 0; index < searchKey.size(); ++index) {
            keyFieldSourceIndicator.add(0);
        }
    }
    // index key type information is not persisted, thus we extract type information from the record metadata
    if (searchKeyType.isEmpty()) {
        try {
            Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
            String datatypeName = dSet.getItemTypeName();
            String datatypeDataverseName = dSet.getItemTypeDataverseName();
            ARecordType recordDt = (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
            String metatypeName = dSet.getMetaItemTypeName();
            String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
            ARecordType metaDt = null;
            if (metatypeName != null && metatypeDataverseName != null) {
                metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName).getDatatype();
            }
            try {
                searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
            } catch (AlgebricksException e) {
                throw new MetadataException(e);
            }
        } catch (RemoteException re) {
            throw HyracksDataException.create(re);
        }
    }
    return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType, gramLength, isEnforcingKeys, isPrimaryIndex, pendingOp);
}
Also used : ACollectionCursor(org.apache.asterix.om.base.ACollectionCursor) ArrayList(java.util.ArrayList) Index(org.apache.asterix.metadata.entities.Index) AString(org.apache.asterix.om.base.AString) MetadataException(org.apache.asterix.metadata.MetadataException) ARecord(org.apache.asterix.om.base.ARecord) AOrderedList(org.apache.asterix.om.base.AOrderedList) AOrderedList(org.apache.asterix.om.base.AOrderedList) ArrayList(java.util.ArrayList) List(java.util.List) IndexType(org.apache.asterix.common.config.DatasetConfig.IndexType) ABoolean(org.apache.asterix.om.base.ABoolean) AString(org.apache.asterix.om.base.AString) Dataset(org.apache.asterix.metadata.entities.Dataset) ABoolean(org.apache.asterix.om.base.ABoolean) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IACursor(org.apache.asterix.om.base.IACursor) DataInputStream(java.io.DataInputStream) AInt32(org.apache.asterix.om.base.AInt32) DataInput(java.io.DataInput) ByteArrayInputStream(java.io.ByteArrayInputStream) AInt8(org.apache.asterix.om.base.AInt8) RemoteException(java.rmi.RemoteException) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Aggregations

ARecordType (org.apache.asterix.om.types.ARecordType)105 IAType (org.apache.asterix.om.types.IAType)73 ArrayList (java.util.ArrayList)48 List (java.util.List)24 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)22 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)20 Dataset (org.apache.asterix.metadata.entities.Dataset)19 AString (org.apache.asterix.om.base.AString)19 AbstractFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression)19 Test (org.junit.Test)16 AsterixException (org.apache.asterix.common.exceptions.AsterixException)15 Index (org.apache.asterix.metadata.entities.Index)15 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)15 CompilationException (org.apache.asterix.common.exceptions.CompilationException)13 AOrderedListType (org.apache.asterix.om.types.AOrderedListType)13 Mutable (org.apache.commons.lang3.mutable.Mutable)13 IOException (java.io.IOException)12 MetadataException (org.apache.asterix.metadata.MetadataException)12 AUnionType (org.apache.asterix.om.types.AUnionType)11 Pair (org.apache.hyracks.algebricks.common.utils.Pair)10