Search in sources :

Example 86 with IAType

use of org.apache.asterix.om.types.IAType in project asterixdb by apache.

the class ARecordVisitablePointable method set.

@Override
public void set(byte[] b, int start, int len) {
    // clear the previous states
    reset();
    super.set(b, start, len);
    boolean isExpanded = false;
    int openPartOffset = 0;
    int recordOffset = start;
    int offsetArrayOffset;
    //advance to either isExpanded or numberOfSchemaFields
    int s = start + 5;
    //inputRecType will never be null.
    if (inputRecType.isOpen()) {
        isExpanded = b[s] == 1;
        //advance either to openPartOffset or numberOfSchemaFields
        s += 1;
        if (isExpanded) {
            openPartOffset = start + AInt32SerializerDeserializer.getInt(b, s);
            //advance to numberOfSchemaFields
            s += 4;
        }
    }
    try {
        if (numberOfSchemaFields > 0) {
            //advance to nullBitMap if hasOptionalFields, or fieldOffsets
            s += 4;
            int nullBitMapOffset = 0;
            boolean hasOptionalFields = NonTaggedFormatUtil.hasOptionalField(inputRecType);
            if (hasOptionalFields) {
                nullBitMapOffset = s;
                offsetArrayOffset = s + (this.numberOfSchemaFields % 4 == 0 ? numberOfSchemaFields / 4 : numberOfSchemaFields / 4 + 1);
            } else {
                offsetArrayOffset = s;
            }
            for (int i = 0; i < numberOfSchemaFields; i++) {
                fieldOffsets[i] = AInt32SerializerDeserializer.getInt(b, offsetArrayOffset) + recordOffset;
                offsetArrayOffset += 4;
            }
            for (int fieldNumber = 0; fieldNumber < numberOfSchemaFields; fieldNumber++) {
                if (hasOptionalFields) {
                    byte b1 = b[nullBitMapOffset + fieldNumber / 4];
                    int p = 1 << (7 - 2 * (fieldNumber % 4));
                    if ((b1 & p) == 0) {
                        // set null value (including type tag inside)
                        fieldValues.add(nullReference);
                        continue;
                    }
                    p = 1 << (7 - 2 * (fieldNumber % 4) - 1);
                    if ((b1 & p) == 0) {
                        // set missing value (including type tag inside)
                        fieldValues.add(missingReference);
                        continue;
                    }
                }
                IAType[] fieldTypes = inputRecType.getFieldTypes();
                int fieldValueLength = 0;
                IAType fieldType = fieldTypes[fieldNumber];
                if (fieldTypes[fieldNumber].getTypeTag() == ATypeTag.UNION) {
                    if (((AUnionType) fieldTypes[fieldNumber]).isUnknownableType()) {
                        fieldType = ((AUnionType) fieldTypes[fieldNumber]).getActualType();
                        typeTag = fieldType.getTypeTag();
                        fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffsets[fieldNumber], typeTag, false);
                    }
                } else {
                    typeTag = fieldTypes[fieldNumber].getTypeTag();
                    fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffsets[fieldNumber], typeTag, false);
                }
                // set field value (including the type tag)
                int fstart = dataBos.size();
                dataDos.writeByte(typeTag.serialize());
                dataDos.write(b, fieldOffsets[fieldNumber], fieldValueLength);
                int fend = dataBos.size();
                IVisitablePointable fieldValue = allocator.allocateFieldValue(fieldType);
                fieldValue.set(dataBos.getByteArray(), fstart, fend - fstart);
                fieldValues.add(fieldValue);
            }
        }
        if (isExpanded) {
            int numberOfOpenFields = AInt32SerializerDeserializer.getInt(b, openPartOffset);
            int fieldOffset = openPartOffset + 4 + (8 * numberOfOpenFields);
            for (int i = 0; i < numberOfOpenFields; i++) {
                // set the field name (including a type tag, which is
                // astring)
                int fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, ATypeTag.STRING, false);
                int fnstart = dataBos.size();
                dataDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
                dataDos.write(b, fieldOffset, fieldValueLength);
                int fnend = dataBos.size();
                IVisitablePointable fieldName = allocator.allocateEmpty();
                fieldName.set(dataBos.getByteArray(), fnstart, fnend - fnstart);
                fieldNames.add(fieldName);
                fieldOffset += fieldValueLength;
                // set the field type tag
                IVisitablePointable fieldTypeTag = allocator.allocateEmpty();
                fieldTypeTag.set(b, fieldOffset, 1);
                fieldTypeTags.add(fieldTypeTag);
                typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[fieldOffset]);
                // set the field value (already including type tag)
                fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, typeTag, true) + 1;
                // allocate
                IVisitablePointable fieldValueAccessor = allocator.allocateFieldValue(typeTag, b, fieldOffset + 1);
                fieldValueAccessor.set(b, fieldOffset, fieldValueLength);
                fieldValues.add(fieldValueAccessor);
                fieldOffset += fieldValueLength;
            }
        }
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }
}
Also used : IVisitablePointable(org.apache.asterix.om.pointables.base.IVisitablePointable) AUnionType(org.apache.asterix.om.types.AUnionType) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IOException(java.io.IOException) IAType(org.apache.asterix.om.types.IAType)

Example 87 with IAType

use of org.apache.asterix.om.types.IAType in project asterixdb by apache.

the class QueryTranslator method handleCreateIndexStatement.

protected void handleCreateIndexStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
    ProgressState progress = ProgressState.NO_PROGRESS;
    CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
    String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
    String datasetName = stmtCreateIndex.getDatasetName().getValue();
    List<Integer> keySourceIndicators = stmtCreateIndex.getFieldSourceIndicators();
    MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
    boolean bActiveTxn = true;
    metadataProvider.setMetadataTxnContext(mdTxnCtx);
    MetadataLockManager.INSTANCE.createIndexBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
    String indexName = null;
    JobSpecification spec = null;
    Dataset ds = null;
    // For external datasets
    List<ExternalFile> externalFilesSnapshot = null;
    boolean firstExternalDatasetIndex = false;
    boolean filesIndexReplicated = false;
    Index filesIndex = null;
    boolean datasetLocked = false;
    Index index = null;
    try {
        ds = metadataProvider.findDataset(dataverseName, datasetName);
        if (ds == null) {
            throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
        }
        indexName = stmtCreateIndex.getIndexName().getValue();
        index = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
        Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), ds.getItemTypeDataverseName(), ds.getItemTypeName());
        ARecordType aRecordType = (ARecordType) dt.getDatatype();
        ARecordType metaRecordType = null;
        if (ds.hasMetaPart()) {
            Datatype metaDt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), ds.getMetaItemTypeDataverseName(), ds.getMetaItemTypeName());
            metaRecordType = (ARecordType) metaDt.getDatatype();
        }
        List<List<String>> indexFields = new ArrayList<>();
        List<IAType> indexFieldTypes = new ArrayList<>();
        int keyIndex = 0;
        for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
            IAType fieldType = null;
            ARecordType subType = KeyFieldTypeUtil.chooseSource(keySourceIndicators, keyIndex, aRecordType, metaRecordType);
            boolean isOpen = subType.isOpen();
            int i = 0;
            if (fieldExpr.first.size() > 1 && !isOpen) {
                while (i < fieldExpr.first.size() - 1 && !isOpen) {
                    subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
                    i++;
                    isOpen = subType.isOpen();
                }
            }
            if (fieldExpr.second == null) {
                fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
            } else {
                if (!stmtCreateIndex.isEnforced()) {
                    throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first + "\" field without enforcing it's type");
                }
                if (!isOpen) {
                    throw new AlgebricksException("Typed index on \"" + fieldExpr.first + "\" field could be created only for open datatype");
                }
                if (stmtCreateIndex.hasMetaField()) {
                    throw new AlgebricksException("Typed open index can only be created on the record part");
                }
                Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second, indexName, dataverseName);
                TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
                fieldType = typeMap.get(typeSignature);
            }
            if (fieldType == null) {
                throw new AlgebricksException("Unknown type " + (fieldExpr.second == null ? fieldExpr.first : fieldExpr.second));
            }
            indexFields.add(fieldExpr.first);
            indexFieldTypes.add(fieldType);
            ++keyIndex;
        }
        ValidateUtil.validateKeyFields(aRecordType, metaRecordType, indexFields, keySourceIndicators, indexFieldTypes, stmtCreateIndex.getIndexType());
        if (index != null) {
            if (stmtCreateIndex.getIfNotExists()) {
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                return;
            } else {
                throw new AlgebricksException("An index with this name " + indexName + " already exists.");
            }
        }
        // error message and stop.
        if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
            List<List<String>> partitioningKeys = ds.getPrimaryKeys();
            for (List<String> partitioningKey : partitioningKeys) {
                IAType keyType = aRecordType.getSubFieldType(partitioningKey);
                ITypeTraits typeTrait = TypeTraitProvider.INSTANCE.getTypeTrait(keyType);
                // If it is not a fixed length
                if (typeTrait.getFixedLength() < 0) {
                    throw new AlgebricksException("The keyword or ngram index -" + indexName + " cannot be created on the dataset -" + datasetName + " due to its variable-length primary key field - " + partitioningKey);
                }
            }
        }
        if (ds.getDatasetType() == DatasetType.INTERNAL) {
            validateIfResourceIsActiveInFeed(ds);
        } else {
            // Check if the dataset is indexible
            if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
                throw new AlgebricksException("dataset using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter() + " Adapter can't be indexed");
            }
            // Check if the name of the index is valid
            if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
                throw new AlgebricksException("external dataset index name is invalid");
            }
            // Check if the files index exist
            filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
            firstExternalDatasetIndex = filesIndex == null;
            // Lock external dataset
            ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
            datasetLocked = true;
            if (firstExternalDatasetIndex) {
                // Verify that no one has created an index before we acquire the lock
                filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                if (filesIndex != null) {
                    ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
                    firstExternalDatasetIndex = false;
                    ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
                }
            }
            if (firstExternalDatasetIndex) {
                // Get snapshot from External File System
                externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
                // Add an entry for the files index
                filesIndex = new Index(dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName), IndexType.BTREE, ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES, null, ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false, MetadataUtil.PENDING_ADD_OP);
                MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
                // Add files to the external files index
                for (ExternalFile file : externalFilesSnapshot) {
                    MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
                }
                // This is the first index for the external dataset, replicate the files index
                spec = ExternalIndexingOperations.buildFilesIndexCreateJobSpec(ds, externalFilesSnapshot, metadataProvider);
                if (spec == null) {
                    throw new CompilationException("Failed to create job spec for replicating Files Index For external dataset");
                }
                filesIndexReplicated = true;
                JobUtils.runJob(hcc, spec, true);
            }
        }
        // check whether there exists another enforced index on the same field
        if (stmtCreateIndex.isEnforced()) {
            List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
            for (Index existingIndex : indexes) {
                if (existingIndex.getKeyFieldNames().equals(indexFields) && !existingIndex.getKeyFieldTypes().equals(indexFieldTypes) && existingIndex.isEnforcingKeyFileds()) {
                    throw new CompilationException("Cannot create index " + indexName + " , enforced index " + existingIndex.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',') + "\" is already defined with type \"" + existingIndex.getKeyFieldTypes() + "\"");
                }
            }
        }
        // #. add a new index with PendingAddOp
        index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(), indexFields, keySourceIndicators, indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(), false, MetadataUtil.PENDING_ADD_OP);
        MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
        // #. prepare to create the index artifact in NC.
        spec = IndexUtil.buildSecondaryIndexCreationJobSpec(ds, index, metadataProvider);
        if (spec == null) {
            throw new CompilationException("Failed to create job spec for creating index '" + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
        // #. create the index artifact in NC.
        JobUtils.runJob(hcc, spec, true);
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        // #. load data into the index in NC.
        spec = IndexUtil.buildSecondaryIndexLoadingJobSpec(ds, index, metadataProvider);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
        bActiveTxn = false;
        JobUtils.runJob(hcc, spec, true);
        // #. begin new metadataTxn
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        bActiveTxn = true;
        metadataProvider.setMetadataTxnContext(mdTxnCtx);
        // #. add another new index with PendingNoOp after deleting the index with PendingAddOp
        MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
        index.setPendingOp(MetadataUtil.PENDING_NO_OP);
        MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
        // PendingAddOp
        if (firstExternalDatasetIndex) {
            MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, filesIndex.getIndexName());
            filesIndex.setPendingOp(MetadataUtil.PENDING_NO_OP);
            MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
            // update transaction timestamp
            ((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
        }
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        if (bActiveTxn) {
            abort(e, e, mdTxnCtx);
        }
        // If files index was replicated for external dataset, it should be cleaned up on NC side
        if (filesIndexReplicated) {
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            try {
                JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, ds);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
        }
        if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
            // #. execute compensation operations
            // remove the index in NC
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            bActiveTxn = true;
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                JobSpecification jobSpec = IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                bActiveTxn = false;
                JobUtils.runJob(hcc, jobSpec, true);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                if (bActiveTxn) {
                    abort(e, e2, mdTxnCtx);
                }
            }
            if (firstExternalDatasetIndex) {
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                metadataProvider.setMetadataTxnContext(mdTxnCtx);
                try {
                    // Drop External Files from metadata
                    MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                } catch (Exception e2) {
                    e.addSuppressed(e2);
                    abort(e, e2, mdTxnCtx);
                    throw new IllegalStateException("System is inconsistent state: pending files for(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
                }
                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                metadataProvider.setMetadataTxnContext(mdTxnCtx);
                try {
                    // Drop the files index from metadata
                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
                } catch (Exception e2) {
                    e.addSuppressed(e2);
                    abort(e, e2, mdTxnCtx);
                    throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + IndexingConstants.getFilesIndexName(datasetName) + ") couldn't be removed from the metadata", e);
                }
            }
            // remove the record from the metadata.
            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
            metadataProvider.setMetadataTxnContext(mdTxnCtx);
            try {
                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
            } catch (Exception e2) {
                e.addSuppressed(e2);
                abort(e, e2, mdTxnCtx);
                throw new IllegalStateException("System is in inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
            }
        }
        throw e;
    } finally {
        metadataProvider.getLocks().unlock();
        if (datasetLocked) {
            ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
        }
    }
}
Also used : ProgressState(org.apache.asterix.common.utils.JobUtils.ProgressState) ArrayList(java.util.ArrayList) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Index(org.apache.asterix.metadata.entities.Index) Datatype(org.apache.asterix.metadata.entities.Datatype) TypeSignature(org.apache.asterix.om.types.TypeSignature) ExternalDatasetDetails(org.apache.asterix.metadata.entities.ExternalDatasetDetails) ArrayList(java.util.ArrayList) List(java.util.List) JobSpecification(org.apache.hyracks.api.job.JobSpecification) CompilationException(org.apache.asterix.common.exceptions.CompilationException) ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IDataset(org.apache.asterix.common.metadata.IDataset) Dataset(org.apache.asterix.metadata.entities.Dataset) TypeExpression(org.apache.asterix.lang.common.expression.TypeExpression) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) CreateIndexStatement(org.apache.asterix.lang.common.statement.CreateIndexStatement) ExternalFile(org.apache.asterix.external.indexing.ExternalFile) DatasetNodegroupCardinalityHint(org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) Date(java.util.Date) ACIDException(org.apache.asterix.common.exceptions.ACIDException) MetadataException(org.apache.asterix.metadata.MetadataException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType)

Example 88 with IAType

use of org.apache.asterix.om.types.IAType in project asterixdb by apache.

the class ClassAdParser method writeFieldValueToBuffer.

private void writeFieldValueToBuffer(IAType fieldType, DataOutput out, String name, ExprTree tree, ClassAd pAd) throws IOException, AsterixException {
    Value val;
    switch(tree.getKind()) {
        case ATTRREF_NODE:
        case CLASSAD_NODE:
        case EXPR_ENVELOPE:
        case EXPR_LIST_NODE:
        case FN_CALL_NODE:
        case OP_NODE:
            val = objectPool.valuePool.get();
            if (pAd.evaluateAttr(name, val)) {
            } else {
                // just write the expr
                val = ((Literal) pAd.getAttrList().get(name + "Expr")).getValue();
            }
            break;
        case LITERAL_NODE:
            val = ((Literal) tree.getTree()).getValue();
            break;
        default:
            throw new HyracksDataException("Unknown Expression type detected: " + tree.getKind());
    }
    if (fieldType != null) {
        if (NonTaggedFormatUtil.isOptional(fieldType)) {
            fieldType = ((AUnionType) fieldType).getActualType();
        }
    }
    switch(val.getValueType()) {
        case ABSOLUTE_TIME_VALUE:
            if (checkType(ATypeTag.DATETIME, fieldType)) {
                parseDateTime(val, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        case BOOLEAN_VALUE:
            if (checkType(ATypeTag.BOOLEAN, fieldType)) {
                booleanSerde.serialize(val.getBoolVal() ? ABoolean.TRUE : ABoolean.FALSE, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        case CLASSAD_VALUE:
            if (checkType(ATypeTag.OBJECT, fieldType)) {
                IAType objectType = getComplexType(fieldType, ATypeTag.OBJECT);
                ClassAd classad = val.getClassadVal();
                parseRecord((ARecordType) objectType, classad, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        case ERROR_VALUE:
        case STRING_VALUE:
        case UNDEFINED_VALUE:
            if (checkType(ATypeTag.STRING, fieldType)) {
                parseString(val, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        case INTEGER_VALUE:
            if (checkType(ATypeTag.BIGINT, fieldType)) {
                if (fieldType == null || fieldType.getTypeTag() == ATypeTag.BIGINT) {
                    aInt64.setValue(val.getLongVal());
                    int64Serde.serialize(aInt64, out);
                } else if (fieldType.getTypeTag() == ATypeTag.INTEGER) {
                    aInt32.setValue((int) val.getLongVal());
                    int32Serde.serialize(aInt32, out);
                } else if (fieldType.getTypeTag() == ATypeTag.DOUBLE) {
                    aDouble.setValue(val.getLongVal());
                    doubleSerde.serialize(aDouble, out);
                } else if (fieldType.getTypeTag() == ATypeTag.SMALLINT) {
                    aInt16.setValue((short) val.getLongVal());
                    int16Serde.serialize(aInt16, out);
                } else if (fieldType.getTypeTag() == ATypeTag.TINYINT) {
                    aInt8.setValue((byte) val.getLongVal());
                    int8Serde.serialize(aInt8, out);
                } else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
                    aFloat.setValue(val.getLongVal());
                    floatSerde.serialize(aFloat, out);
                }
            } else if (checkType(ATypeTag.DATETIME, fieldType)) {
                // Classad uses Linux Timestamps (s instead of ms)
                aDateTime.setValue(val.getLongVal() * 1000);
                datetimeSerde.serialize(aDateTime, out);
            } else if (checkType(ATypeTag.DURATION, fieldType)) {
                // Classad uses Linux Timestamps (s instead of ms)
                aDuration.setValue(0, val.getLongVal() * 1000);
                durationSerde.serialize(aDuration, out);
            } else if (checkType(ATypeTag.INTEGER, fieldType)) {
                aInt32.setValue((int) val.getLongVal());
                int32Serde.serialize(aInt32, out);
            } else if (checkType(ATypeTag.DOUBLE, fieldType)) {
                aDouble.setValue(val.getLongVal());
                doubleSerde.serialize(aDouble, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        case LIST_VALUE:
        case SLIST_VALUE:
            IAType objectType;
            if (checkType(ATypeTag.MULTISET, fieldType)) {
                objectType = getComplexType(fieldType, ATypeTag.MULTISET);
                parseUnorderedList((AUnorderedListType) objectType, val, out);
            } else if (checkType(ATypeTag.ARRAY, fieldType)) {
                objectType = getComplexType(fieldType, ATypeTag.ARRAY);
                parseOrderedList((AOrderedListType) objectType, val, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        case REAL_VALUE:
            if (checkType(ATypeTag.DOUBLE, fieldType)) {
                if (fieldType == null || fieldType.getTypeTag() == ATypeTag.DOUBLE) {
                    aDouble.setValue(val.getDoubleVal());
                    doubleSerde.serialize(aDouble, out);
                } else if (fieldType.getTypeTag() == ATypeTag.INTEGER) {
                    aInt32.setValue((int) val.getDoubleVal());
                    int32Serde.serialize(aInt32, out);
                } else if (fieldType.getTypeTag() == ATypeTag.BIGINT) {
                    aInt64.setValue((long) val.getDoubleVal());
                    int64Serde.serialize(aInt64, out);
                } else if (fieldType.getTypeTag() == ATypeTag.SMALLINT) {
                    aInt16.setValue((short) val.getDoubleVal());
                    int16Serde.serialize(aInt16, out);
                } else if (fieldType.getTypeTag() == ATypeTag.TINYINT) {
                    aInt8.setValue((byte) val.getDoubleVal());
                    int8Serde.serialize(aInt8, out);
                } else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
                    aFloat.setValue((float) val.getDoubleVal());
                    floatSerde.serialize(aFloat, out);
                }
            } else if (checkType(ATypeTag.INTEGER, fieldType)) {
                aInt32.setValue((int) val.getDoubleVal());
                int32Serde.serialize(aInt32, out);
            } else if (checkType(ATypeTag.BIGINT, fieldType)) {
                aInt64.setValue((long) val.getDoubleVal());
                int64Serde.serialize(aInt64, out);
            } else if (checkType(ATypeTag.DATETIME, fieldType)) {
                // Classad uses Linux Timestamps (s instead of ms)
                aDateTime.setValue(val.getLongVal() * 1000);
                datetimeSerde.serialize(aDateTime, out);
            } else if (checkType(ATypeTag.DURATION, fieldType)) {
                // Classad uses Linux Timestamps (s instead of ms)
                aDuration.setValue(0, (long) (val.getDoubleVal() * 1000.0));
                durationSerde.serialize(aDuration, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        case RELATIVE_TIME_VALUE:
            if (checkType(ATypeTag.DURATION, fieldType)) {
                parseDuration(val, out);
            } else {
                throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
            }
            break;
        default:
            throw new HyracksDataException("unknown data type " + val.getValueType());
    }
}
Also used : ClassAd(org.apache.asterix.external.classad.ClassAd) AOrderedListType(org.apache.asterix.om.types.AOrderedListType) TokenValue(org.apache.asterix.external.classad.TokenValue) Value(org.apache.asterix.external.classad.Value) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IAType(org.apache.asterix.om.types.IAType)

Example 89 with IAType

use of org.apache.asterix.om.types.IAType in project asterixdb by apache.

the class ClassAdParser method parseRecord.

private void parseRecord(ARecordType recType, ClassAd pAd, DataOutput out) throws IOException, AsterixException {
    ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
    ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
    IARecordBuilder recBuilder = getRecordBuilder();
    BitSet nulls = null;
    if (recType != null) {
        nulls = getBitSet();
        recBuilder.reset(recType);
    } else {
        recBuilder.reset(null);
    }
    recBuilder.init();
    Boolean openRecordField = false;
    int fieldId = 0;
    IAType fieldType = null;
    // new stuff
    Map<CaseInsensitiveString, ExprTree> attrs = pAd.getAttrList();
    for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
        // reset buffers
        fieldNameBuffer.reset();
        fieldValueBuffer.reset();
        // take care of field name
        String fldName = entry.getKey().get();
        if (recType != null) {
            fieldId = recBuilder.getFieldId(fldName);
            if (fieldId < 0 && !recType.isOpen()) {
                throw new HyracksDataException("This record is closed, you can not add extra fields !!");
            } else if (fieldId < 0 && recType.isOpen()) {
                aStringFieldName.setValue(fldName);
                stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
                openRecordField = true;
                fieldType = null;
            } else {
                // a closed field
                nulls.set(fieldId);
                fieldType = recType.getFieldTypes()[fieldId];
                openRecordField = false;
            }
        } else {
            aStringFieldName.setValue(fldName);
            stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
            openRecordField = true;
            fieldType = null;
        }
        // add field value to value buffer
        writeFieldValueToBuffer(fieldType, fieldValueBuffer.getDataOutput(), fldName, entry.getValue(), pAd);
        if (openRecordField) {
            if (fieldValueBuffer.getByteArray()[0] != ATypeTag.MISSING.serialize()) {
                recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
            }
        } else if (NonTaggedFormatUtil.isOptional(fieldType)) {
            if (fieldValueBuffer.getByteArray()[0] != ATypeTag.MISSING.serialize()) {
                recBuilder.addField(fieldId, fieldValueBuffer);
            }
        } else {
            recBuilder.addField(fieldId, fieldValueBuffer);
        }
    }
    if (recType != null) {
        int optionalFieldId = checkOptionalConstraints(recType, nulls);
        if (optionalFieldId != -1) {
            throw new HyracksDataException("Field: " + recType.getFieldNames()[optionalFieldId] + " can not be optional");
        }
    }
    recBuilder.write(out, true);
}
Also used : ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) IARecordBuilder(org.apache.asterix.builders.IARecordBuilder) BitSet(java.util.BitSet) ExprTree(org.apache.asterix.external.classad.ExprTree) CaseInsensitiveString(org.apache.asterix.external.classad.CaseInsensitiveString) AMutableCharArrayString(org.apache.asterix.external.classad.AMutableCharArrayString) MutableBoolean(org.apache.commons.lang3.mutable.MutableBoolean) ABoolean(org.apache.asterix.om.base.ABoolean) CaseInsensitiveString(org.apache.asterix.external.classad.CaseInsensitiveString) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IAType(org.apache.asterix.om.types.IAType)

Example 90 with IAType

use of org.apache.asterix.om.types.IAType in project asterixdb by apache.

the class TweetParserTest method closedRecordTypeTest.

@Test
public void closedRecordTypeTest() throws IOException, URISyntaxException {
    // contruct type
    IAType geoFieldType = new ARecordType("GeoType", new String[] { "coordinates" }, new IAType[] { new AOrderedListType(AFLOAT, "point") }, true);
    ARecordType tweetRecordType = new ARecordType("TweetType", new String[] { "id", "geo" }, new IAType[] { AINT64, geoFieldType }, true);
    TweetParser parser = new TweetParser(tweetRecordType);
    List<String> lines = Files.readAllLines(Paths.get(getClass().getResource("/test_tweets.txt").toURI()));
    ByteArrayOutputStream is = new ByteArrayOutputStream();
    DataOutput output = new DataOutputStream(is);
    int regularCount = 0;
    for (int iter1 = 0; iter1 < lines.size(); iter1++) {
        GenericRecord<String> record = new GenericRecord<>();
        record.set(lines.get(iter1));
        try {
            parser.parse(record, output);
            regularCount++;
        } catch (HyracksDataException e) {
            Assert.assertTrue(e.toString().contains("Non-null") && (iter1 == 0 || iter1 == 1));
        }
    }
    Assert.assertTrue(regularCount == 4);
}
Also used : DataOutput(java.io.DataOutput) DataOutputStream(java.io.DataOutputStream) AOrderedListType(org.apache.asterix.om.types.AOrderedListType) ByteArrayOutputStream(java.io.ByteArrayOutputStream) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) TweetParser(org.apache.asterix.external.parser.TweetParser) GenericRecord(org.apache.asterix.external.input.record.GenericRecord) ARecordType(org.apache.asterix.om.types.ARecordType) IAType(org.apache.asterix.om.types.IAType) Test(org.junit.Test)

Aggregations

IAType (org.apache.asterix.om.types.IAType)190 ARecordType (org.apache.asterix.om.types.ARecordType)73 ArrayList (java.util.ArrayList)64 ILogicalExpression (org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression)42 ATypeTag (org.apache.asterix.om.types.ATypeTag)40 AbstractFunctionCallExpression (org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression)37 List (java.util.List)32 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)32 AUnionType (org.apache.asterix.om.types.AUnionType)31 AString (org.apache.asterix.om.base.AString)28 LogicalVariable (org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable)27 Mutable (org.apache.commons.lang3.mutable.Mutable)25 Pair (org.apache.hyracks.algebricks.common.utils.Pair)24 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)20 Dataset (org.apache.asterix.metadata.entities.Dataset)18 AsterixException (org.apache.asterix.common.exceptions.AsterixException)17 AOrderedListType (org.apache.asterix.om.types.AOrderedListType)16 VariableReferenceExpression (org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression)16 IVisitablePointable (org.apache.asterix.om.pointables.base.IVisitablePointable)15 IVariableTypeEnvironment (org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment)15