Search in sources :

Example 71 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class QueryTranslator method handleCreateDatasetStatement.

public void handleCreateDatasetStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws CompilationException, Exception {
    MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
    DatasetDecl dd = (DatasetDecl) stmt;
    String dataverseName = getActiveDataverse(dd.getDataverse());
    String datasetName = dd.getName().getValue();
    DatasetType dsType = dd.getDatasetType();
    String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
    String itemTypeName = dd.getItemTypeName().getValue();
    String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
    String metaItemTypeName = dd.getMetaItemTypeName().getValue();
    Identifier ngNameId = dd.getNodegroupName();
    String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
    String compactionPolicy = dd.getCompactionPolicy();
    Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
    boolean defaultCompactionPolicy = compactionPolicy == null;
    boolean temp = dd.getDatasetDetailsDecl().isTemp();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.createDatasetBegin(metadataProvider.getLocks(), dataverseName, itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName, metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
    Dataset dataset = null;
    try {
        IDatasetDetails datasetDetails = null;
        Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds != null) {
            if (dd.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
            }
        }
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), itemTypeDataverseName, itemTypeName);
        if (dt == null) {
            throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
        }
        String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, metadataProvider);
        if (compactionPolicy == null) {
            compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
            compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
        } else {
            validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
        }
        switch(dd.getDatasetType()) {
            case INTERNAL:
                IAType itemType = dt.getDatatype();
                if (itemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset type has to be a record type.");
                }
                IAType metaItemType = null;
                if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
                    metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
                }
                if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.OBJECT) {
                    throw new AlgebricksException("Dataset meta type has to be a record type.");
                }
                ARecordType metaRecType = (ARecordType) metaItemType;
                List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
                List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getKeySourceIndicators();
                boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
                ARecordType aRecordType = (ARecordType) itemType;
                List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType, metaRecType, partitioningExprs, keySourceIndicators, autogenerated);
                List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
                if (filterField != null) {
                    ValidateUtil.validateFilterField(aRecordType, filterField);
                }
                if (compactionPolicy == null && filterField != null) {
                    // If the dataset has a filter and the user didn't specify a merge
                    // policy, then we will pick the
                    // correlated-prefix as the default merge policy.
                    compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
                    compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
                }
                datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs, keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
                break;
            case EXTERNAL:
                String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
                Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
                datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(), TransactionState.COMMIT);
                break;
            default:
                throw new CompilationException("Unknown datatype " + dd.getDatasetType());
        }
        // #. initialize DatasetIdFactory if it is not initialized.
        if (!DatasetIdFactory.isInitialized()) {
            DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
        }
        // #. add a new dataset with PendingAddOp
        dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(), MetadataUtil.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        if (dd.getDatasetType() == DatasetType.INTERNAL) {
            JobSpecification jobSpec = DatasetUtil.createDatasetJobSpec(dataset, metadataProvider);
            // #. make metadataTxn commit before calling runJob.
            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            bActiveTxn = false;
            progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
            // #. runJob
            JobUtils.runJob(hcc, jobSpec, true);
            // #. begin new metadataTxn
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
        }
        // #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
        MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
        dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
        MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the index in NC
            // [Notice]
            // As long as we updated(and committed) metadata, we should remove any effect of the job
            // because an exception occurs during runJob.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                JobSpecification jobSpec = DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) ExternalDetailsDecl(org.apache.asterix.lang.common.statement.ExternalDetailsDecl) DatasetType(org.apache.asterix.common.config.DatasetConfig.DatasetType) IDatasetDetails(org.apache.asterix.metadata.IDatasetDetails) Datatype(org.apache.asterix.metadata.entities.Datatype) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) Identifier(org.apache.asterix.lang.common.struct.Identifier) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) ArrayList(java.util.ArrayList) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) MutableObject(org.apache.commons.lang3.mutable.MutableObject) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) InternalDatasetDetails(org.apache.asterix.metadata.entities.InternalDatasetDetails) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) Date(java.util.Date) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) InternalDetailsDecl(org.apache.asterix.lang.common.statement.InternalDetailsDecl) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Example 72 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class ConnectorApiServletTest method testFormResponseObject.

@Test
public void testFormResponseObject() throws Exception {
    ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" }, (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
    ObjectMapper om = new ObjectMapper();
    ObjectNode actualResponse = om.createObjectNode();
    FileSplit[] splits = new FileSplit[2];
    splits[0] = new ManagedFileSplit("asterix_nc1", "foo1");
    splits[1] = new ManagedFileSplit("asterix_nc2", "foo2");
    Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
    NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
    NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
    // Sets up mock returns.
    when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
    when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
    String[] fieldNames = new String[] { "a1", "a2" };
    IAType[] fieldTypes = new IAType[] { BuiltinType.ABOOLEAN, BuiltinType.ADAYTIMEDURATION };
    ARecordType recordType = new ARecordType("record", fieldNames, fieldTypes, true);
    String primaryKey = "a1";
    // Calls ConnectorAPIServlet.formResponseObject.
    nodeMap.put("asterix_nc1", mockInfo1);
    nodeMap.put("asterix_nc2", mockInfo2);
    PA.invokeMethod(let, "formResponseObject(" + ObjectNode.class.getName() + ", " + FileSplit.class.getName() + "[], " + ARecordType.class.getName() + ", " + String.class.getName() + ", boolean, " + Map.class.getName() + ")", actualResponse, splits, recordType, primaryKey, true, nodeMap);
    // Constructs expected response.
    ObjectNode expectedResponse = om.createObjectNode();
    expectedResponse.put("temp", true);
    expectedResponse.put("keys", primaryKey);
    expectedResponse.set("type", recordType.toJSON());
    ArrayNode splitsArray = om.createArrayNode();
    ObjectNode element1 = om.createObjectNode();
    element1.put("ip", "127.0.0.1");
    element1.put("path", splits[0].getPath());
    ObjectNode element2 = om.createObjectNode();
    element2.put("ip", "127.0.0.2");
    element2.put("path", splits[1].getPath());
    splitsArray.add(element1);
    splitsArray.add(element2);
    expectedResponse.set("splits", splitsArray);
    // Checks results.
    Assert.assertEquals(actualResponse.toString(), expectedResponse.toString());
}
Also used : ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) FileSplit(org.apache.hyracks.api.io.FileSplit) ManagedFileSplit(org.apache.hyracks.api.io.ManagedFileSplit) NetworkAddress(org.apache.hyracks.api.comm.NetworkAddress) NodeControllerInfo(org.apache.hyracks.api.client.NodeControllerInfo) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) ConnectorApiServlet(org.apache.asterix.api.http.server.ConnectorApiServlet) ARecordType(org.apache.asterix.om.types.ARecordType) HashMap(java.util.HashMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) IAType(org.apache.asterix.om.types.IAType) Test(org.junit.Test) SqlppExecutionTest(org.apache.asterix.test.runtime.SqlppExecutionTest)

Example 73 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class JObjectPointableVisitor method visit.

@Override
public IJObject visit(ARecordVisitablePointable accessor, TypeInfo arg) throws HyracksDataException {
    IJObject result = null;
    IJRecordAccessor jRecordAccessor = raccessorToJObject.get(accessor);
    if (jRecordAccessor == null) {
        jRecordAccessor = new JRecordAccessor(accessor.getInputRecordType(), arg.getObjectPool());
        raccessorToJObject.put(accessor, jRecordAccessor);
    }
    result = jRecordAccessor.access(accessor, arg.getObjectPool(), (ARecordType) arg.getAtype(), this);
    return result;
}
Also used : IJObject(org.apache.asterix.external.api.IJObject) JRecordAccessor(org.apache.asterix.external.library.java.JObjectAccessors.JRecordAccessor) IJRecordAccessor(org.apache.asterix.external.api.IJRecordAccessor) ARecordType(org.apache.asterix.om.types.ARecordType) IJRecordAccessor(org.apache.asterix.external.api.IJRecordAccessor)

Example 74 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class JTypeObjectFactory method create.

@Override
public IJObject create(IAType type) {
    IJObject retValue = null;
    switch(type.getTypeTag()) {
        case INTEGER:
            retValue = new JInt(0);
            break;
        case STRING:
            retValue = new JString("");
            break;
        case FLOAT:
            retValue = new JFloat(0);
            break;
        case DOUBLE:
            retValue = new JDouble(0);
            break;
        case BOOLEAN:
            retValue = new JBoolean(false);
            break;
        case CIRCLE:
            retValue = new JCircle(new JPoint(0, 0), 0);
            break;
        case POINT:
            retValue = new JPoint(0, 0);
            break;
        case POINT3D:
            retValue = new JPoint3D(0, 0, 0);
            break;
        case POLYGON:
            retValue = new JPolygon(new JPoint[] {});
            break;
        case LINE:
            retValue = new JLine(new JPoint(0, 0), new JPoint(0, 0));
            break;
        case RECTANGLE:
            retValue = new JRectangle(new JPoint(0, 0), new JPoint(1, 1));
            break;
        case DATE:
            retValue = new JDate(0);
            break;
        case DATETIME:
            retValue = new JDateTime(0);
            break;
        case DURATION:
            retValue = new JDuration(0, 0);
            break;
        case INTERVAL:
            retValue = new JInterval(0, 0);
            break;
        case TIME:
            retValue = new JTime(0);
            break;
        case BIGINT:
            retValue = new JLong(0);
            break;
        case NULL:
            retValue = JObjects.JNull.INSTANCE;
            break;
        case MISSING:
            retValue = JObjects.JMissing.INSTANCE;
            break;
        case ARRAY:
            AOrderedListType ot = (AOrderedListType) type;
            IAType orderedItemType = ot.getItemType();
            IJObject orderedItemObject = create(orderedItemType);
            retValue = new JOrderedList(orderedItemObject);
            break;
        case MULTISET:
            AUnorderedListType ut = (AUnorderedListType) type;
            IAType unorderedItemType = ut.getItemType();
            IJObject unorderedItemObject = create(unorderedItemType);
            retValue = new JUnorderedList(unorderedItemObject);
            break;
        case OBJECT:
            IAType[] fieldTypes = ((ARecordType) type).getFieldTypes();
            IJObject[] fieldObjects = new IJObject[fieldTypes.length];
            int index = 0;
            for (IAType fieldType : fieldTypes) {
                fieldObjects[index] = create(fieldType);
                index++;
            }
            retValue = new JRecord((ARecordType) type, fieldObjects);
            break;
        case UNION:
            AUnionType unionType = (AUnionType) type;
            IJObject itemObject = null;
            if (unionType.isMissableType()) {
                itemObject = create(unionType);
            }
            retValue = itemObject;
            break;
        default:
            break;
    }
    return retValue;
}
Also used : JRecord(org.apache.asterix.external.library.java.JObjects.JRecord) JPoint(org.apache.asterix.external.library.java.JObjects.JPoint) AUnionType(org.apache.asterix.om.types.AUnionType) JRectangle(org.apache.asterix.external.library.java.JObjects.JRectangle) JPolygon(org.apache.asterix.external.library.java.JObjects.JPolygon) JInterval(org.apache.asterix.external.library.java.JObjects.JInterval) JTime(org.apache.asterix.external.library.java.JObjects.JTime) JLong(org.apache.asterix.external.library.java.JObjects.JLong) JDouble(org.apache.asterix.external.library.java.JObjects.JDouble) JBoolean(org.apache.asterix.external.library.java.JObjects.JBoolean) JDuration(org.apache.asterix.external.library.java.JObjects.JDuration) JFloat(org.apache.asterix.external.library.java.JObjects.JFloat) JCircle(org.apache.asterix.external.library.java.JObjects.JCircle) JInt(org.apache.asterix.external.library.java.JObjects.JInt) AOrderedListType(org.apache.asterix.om.types.AOrderedListType) IJObject(org.apache.asterix.external.api.IJObject) JLine(org.apache.asterix.external.library.java.JObjects.JLine) JDate(org.apache.asterix.external.library.java.JObjects.JDate) AUnorderedListType(org.apache.asterix.om.types.AUnorderedListType) JPoint(org.apache.asterix.external.library.java.JObjects.JPoint) JUnorderedList(org.apache.asterix.external.library.java.JObjects.JUnorderedList) JDateTime(org.apache.asterix.external.library.java.JObjects.JDateTime) JPoint3D(org.apache.asterix.external.library.java.JObjects.JPoint3D) JOrderedList(org.apache.asterix.external.library.java.JObjects.JOrderedList) JString(org.apache.asterix.external.library.java.JObjects.JString) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Example 75 with ARecordType

use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.

the class ClosedRecordConstructorResultType method computeType.

@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env, IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
    String funcName = f.getFunctionIdentifier().getName();
    /**
         * if type has been top-down propagated, use the enforced type
         */
    ARecordType type = (ARecordType) TypeCastUtils.getRequiredType(f);
    if (type != null) {
        return type;
    }
    int n = f.getArguments().size() / 2;
    String[] fieldNames = new String[n];
    IAType[] fieldTypes = new IAType[n];
    int i = 0;
    Iterator<Mutable<ILogicalExpression>> argIter = f.getArguments().iterator();
    while (argIter.hasNext()) {
        ILogicalExpression e1 = argIter.next().getValue();
        ILogicalExpression e2 = argIter.next().getValue();
        IAType e2Type = (IAType) env.getType(e2);
        if (e2Type.getTypeTag() == ATypeTag.UNION) {
            AUnionType unionType = (AUnionType) e2Type;
            e2Type = AUnionType.createUnknownableType(unionType.getActualType());
        }
        fieldTypes[i] = e2Type;
        fieldNames[i] = ConstantExpressionUtil.getStringConstant(e1);
        if (fieldNames[i] == null) {
            throw new InvalidExpressionException(funcName, 2 * i, e1, LogicalExpressionTag.CONSTANT);
        }
        i++;
    }
    return new ARecordType(null, fieldNames, fieldTypes, false);
}
Also used : Mutable(org.apache.commons.lang3.mutable.Mutable) ILogicalExpression(org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression) InvalidExpressionException(org.apache.asterix.om.exceptions.InvalidExpressionException) AUnionType(org.apache.asterix.om.types.AUnionType) AbstractFunctionCallExpression(org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Aggregations

ARecordType (org.apache.asterix.om.types.ARecordType)105 IAType (org.apache.asterix.om.types.IAType)73 ArrayList (java.util.ArrayList)48 List (java.util.List)24 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)22 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)20 Dataset (org.apache.asterix.metadata.entities.Dataset)19 AString (org.apache.asterix.om.base.AString)19 AbstractFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression)19 Test (org.junit.Test)16 AsterixException (org.apache.asterix.common.exceptions.AsterixException)15 Index (org.apache.asterix.metadata.entities.Index)15 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)15 CompilationException (org.apache.asterix.common.exceptions.CompilationException)13 AOrderedListType (org.apache.asterix.om.types.AOrderedListType)13 Mutable (org.apache.commons.lang3.mutable.Mutable)13 IOException (java.io.IOException)12 MetadataException (org.apache.asterix.metadata.MetadataException)12 AUnionType (org.apache.asterix.om.types.AUnionType)11 Pair (org.apache.hyracks.algebricks.common.utils.Pair)10