use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class ARecordVisitablePointable method set.
@Override
public void set(byte[] b, int start, int len) {
// clear the previous states
reset();
super.set(b, start, len);
boolean isExpanded = false;
int openPartOffset = 0;
int recordOffset = start;
int offsetArrayOffset;
//advance to either isExpanded or numberOfSchemaFields
int s = start + 5;
//inputRecType will never be null.
if (inputRecType.isOpen()) {
isExpanded = b[s] == 1;
//advance either to openPartOffset or numberOfSchemaFields
s += 1;
if (isExpanded) {
openPartOffset = start + AInt32SerializerDeserializer.getInt(b, s);
//advance to numberOfSchemaFields
s += 4;
}
}
try {
if (numberOfSchemaFields > 0) {
//advance to nullBitMap if hasOptionalFields, or fieldOffsets
s += 4;
int nullBitMapOffset = 0;
boolean hasOptionalFields = NonTaggedFormatUtil.hasOptionalField(inputRecType);
if (hasOptionalFields) {
nullBitMapOffset = s;
offsetArrayOffset = s + (this.numberOfSchemaFields % 4 == 0 ? numberOfSchemaFields / 4 : numberOfSchemaFields / 4 + 1);
} else {
offsetArrayOffset = s;
}
for (int i = 0; i < numberOfSchemaFields; i++) {
fieldOffsets[i] = AInt32SerializerDeserializer.getInt(b, offsetArrayOffset) + recordOffset;
offsetArrayOffset += 4;
}
for (int fieldNumber = 0; fieldNumber < numberOfSchemaFields; fieldNumber++) {
if (hasOptionalFields) {
byte b1 = b[nullBitMapOffset + fieldNumber / 4];
int p = 1 << (7 - 2 * (fieldNumber % 4));
if ((b1 & p) == 0) {
// set null value (including type tag inside)
fieldValues.add(nullReference);
continue;
}
p = 1 << (7 - 2 * (fieldNumber % 4) - 1);
if ((b1 & p) == 0) {
// set missing value (including type tag inside)
fieldValues.add(missingReference);
continue;
}
}
IAType[] fieldTypes = inputRecType.getFieldTypes();
int fieldValueLength = 0;
IAType fieldType = fieldTypes[fieldNumber];
if (fieldTypes[fieldNumber].getTypeTag() == ATypeTag.UNION) {
if (((AUnionType) fieldTypes[fieldNumber]).isUnknownableType()) {
fieldType = ((AUnionType) fieldTypes[fieldNumber]).getActualType();
typeTag = fieldType.getTypeTag();
fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffsets[fieldNumber], typeTag, false);
}
} else {
typeTag = fieldTypes[fieldNumber].getTypeTag();
fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffsets[fieldNumber], typeTag, false);
}
// set field value (including the type tag)
int fstart = dataBos.size();
dataDos.writeByte(typeTag.serialize());
dataDos.write(b, fieldOffsets[fieldNumber], fieldValueLength);
int fend = dataBos.size();
IVisitablePointable fieldValue = allocator.allocateFieldValue(fieldType);
fieldValue.set(dataBos.getByteArray(), fstart, fend - fstart);
fieldValues.add(fieldValue);
}
}
if (isExpanded) {
int numberOfOpenFields = AInt32SerializerDeserializer.getInt(b, openPartOffset);
int fieldOffset = openPartOffset + 4 + (8 * numberOfOpenFields);
for (int i = 0; i < numberOfOpenFields; i++) {
// set the field name (including a type tag, which is
// astring)
int fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, ATypeTag.STRING, false);
int fnstart = dataBos.size();
dataDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
dataDos.write(b, fieldOffset, fieldValueLength);
int fnend = dataBos.size();
IVisitablePointable fieldName = allocator.allocateEmpty();
fieldName.set(dataBos.getByteArray(), fnstart, fnend - fnstart);
fieldNames.add(fieldName);
fieldOffset += fieldValueLength;
// set the field type tag
IVisitablePointable fieldTypeTag = allocator.allocateEmpty();
fieldTypeTag.set(b, fieldOffset, 1);
fieldTypeTags.add(fieldTypeTag);
typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[fieldOffset]);
// set the field value (already including type tag)
fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, typeTag, true) + 1;
// allocate
IVisitablePointable fieldValueAccessor = allocator.allocateFieldValue(typeTag, b, fieldOffset + 1);
fieldValueAccessor.set(b, fieldOffset, fieldValueLength);
fieldValues.add(fieldValueAccessor);
fieldOffset += fieldValueLength;
}
}
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class QueryTranslator method handleCreateIndexStatement.
protected void handleCreateIndexStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws Exception {
ProgressState progress = ProgressState.NO_PROGRESS;
CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
String datasetName = stmtCreateIndex.getDatasetName().getValue();
List<Integer> keySourceIndicators = stmtCreateIndex.getFieldSourceIndicators();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createIndexBegin(metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName);
String indexName = null;
JobSpecification spec = null;
Dataset ds = null;
// For external datasets
List<ExternalFile> externalFilesSnapshot = null;
boolean firstExternalDatasetIndex = false;
boolean filesIndexReplicated = false;
Index filesIndex = null;
boolean datasetLocked = false;
Index index = null;
try {
ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
}
indexName = stmtCreateIndex.getIndexName().getValue();
index = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), ds.getItemTypeDataverseName(), ds.getItemTypeName());
ARecordType aRecordType = (ARecordType) dt.getDatatype();
ARecordType metaRecordType = null;
if (ds.hasMetaPart()) {
Datatype metaDt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), ds.getMetaItemTypeDataverseName(), ds.getMetaItemTypeName());
metaRecordType = (ARecordType) metaDt.getDatatype();
}
List<List<String>> indexFields = new ArrayList<>();
List<IAType> indexFieldTypes = new ArrayList<>();
int keyIndex = 0;
for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
IAType fieldType = null;
ARecordType subType = KeyFieldTypeUtil.chooseSource(keySourceIndicators, keyIndex, aRecordType, metaRecordType);
boolean isOpen = subType.isOpen();
int i = 0;
if (fieldExpr.first.size() > 1 && !isOpen) {
while (i < fieldExpr.first.size() - 1 && !isOpen) {
subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
i++;
isOpen = subType.isOpen();
}
}
if (fieldExpr.second == null) {
fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
} else {
if (!stmtCreateIndex.isEnforced()) {
throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first + "\" field without enforcing it's type");
}
if (!isOpen) {
throw new AlgebricksException("Typed index on \"" + fieldExpr.first + "\" field could be created only for open datatype");
}
if (stmtCreateIndex.hasMetaField()) {
throw new AlgebricksException("Typed open index can only be created on the record part");
}
Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second, indexName, dataverseName);
TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
fieldType = typeMap.get(typeSignature);
}
if (fieldType == null) {
throw new AlgebricksException("Unknown type " + (fieldExpr.second == null ? fieldExpr.first : fieldExpr.second));
}
indexFields.add(fieldExpr.first);
indexFieldTypes.add(fieldType);
++keyIndex;
}
ValidateUtil.validateKeyFields(aRecordType, metaRecordType, indexFields, keySourceIndicators, indexFieldTypes, stmtCreateIndex.getIndexType());
if (index != null) {
if (stmtCreateIndex.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("An index with this name " + indexName + " already exists.");
}
}
// error message and stop.
if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
List<List<String>> partitioningKeys = ds.getPrimaryKeys();
for (List<String> partitioningKey : partitioningKeys) {
IAType keyType = aRecordType.getSubFieldType(partitioningKey);
ITypeTraits typeTrait = TypeTraitProvider.INSTANCE.getTypeTrait(keyType);
// If it is not a fixed length
if (typeTrait.getFixedLength() < 0) {
throw new AlgebricksException("The keyword or ngram index -" + indexName + " cannot be created on the dataset -" + datasetName + " due to its variable-length primary key field - " + partitioningKey);
}
}
}
if (ds.getDatasetType() == DatasetType.INTERNAL) {
validateIfResourceIsActiveInFeed(ds);
} else {
// Check if the dataset is indexible
if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
throw new AlgebricksException("dataset using " + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter() + " Adapter can't be indexed");
}
// Check if the name of the index is valid
if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
throw new AlgebricksException("external dataset index name is invalid");
}
// Check if the files index exist
filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
firstExternalDatasetIndex = filesIndex == null;
// Lock external dataset
ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
datasetLocked = true;
if (firstExternalDatasetIndex) {
// Verify that no one has created an index before we acquire the lock
filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
if (filesIndex != null) {
ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
firstExternalDatasetIndex = false;
ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
}
}
if (firstExternalDatasetIndex) {
// Get snapshot from External File System
externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
// Add an entry for the files index
filesIndex = new Index(dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName), IndexType.BTREE, ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES, null, ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false, MetadataUtil.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
// Add files to the external files index
for (ExternalFile file : externalFilesSnapshot) {
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
}
// This is the first index for the external dataset, replicate the files index
spec = ExternalIndexingOperations.buildFilesIndexCreateJobSpec(ds, externalFilesSnapshot, metadataProvider);
if (spec == null) {
throw new CompilationException("Failed to create job spec for replicating Files Index For external dataset");
}
filesIndexReplicated = true;
JobUtils.runJob(hcc, spec, true);
}
}
// check whether there exists another enforced index on the same field
if (stmtCreateIndex.isEnforced()) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
for (Index existingIndex : indexes) {
if (existingIndex.getKeyFieldNames().equals(indexFields) && !existingIndex.getKeyFieldTypes().equals(indexFieldTypes) && existingIndex.isEnforcingKeyFileds()) {
throw new CompilationException("Cannot create index " + indexName + " , enforced index " + existingIndex.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',') + "\" is already defined with type \"" + existingIndex.getKeyFieldTypes() + "\"");
}
}
}
// #. add a new index with PendingAddOp
index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(), indexFields, keySourceIndicators, indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(), false, MetadataUtil.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
// #. prepare to create the index artifact in NC.
spec = IndexUtil.buildSecondaryIndexCreationJobSpec(ds, index, metadataProvider);
if (spec == null) {
throw new CompilationException("Failed to create job spec for creating index '" + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
// #. create the index artifact in NC.
JobUtils.runJob(hcc, spec, true);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. load data into the index in NC.
spec = IndexUtil.buildSecondaryIndexLoadingJobSpec(ds, index, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, spec, true);
// #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. add another new index with PendingNoOp after deleting the index with PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
index.setPendingOp(MetadataUtil.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
// PendingAddOp
if (firstExternalDatasetIndex) {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, filesIndex.getIndexName());
filesIndex.setPendingOp(MetadataUtil.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
// update transaction timestamp
((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
// If files index was replicated for external dataset, it should be cleaned up on NC side
if (filesIndexReplicated) {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
try {
JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(metadataProvider, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
JobSpecification jobSpec = IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
if (firstExternalDatasetIndex) {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// Drop External Files from metadata
MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending files for(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// Drop the files index from metadata
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, IndexingConstants.getFilesIndexName(datasetName));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + IndexingConstants.getFilesIndexName(datasetName) + ") couldn't be removed from the metadata", e);
}
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is in inconsistent state: pending index(" + dataverseName + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
if (datasetLocked) {
ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
}
}
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class ClassAdParser method writeFieldValueToBuffer.
private void writeFieldValueToBuffer(IAType fieldType, DataOutput out, String name, ExprTree tree, ClassAd pAd) throws IOException, AsterixException {
Value val;
switch(tree.getKind()) {
case ATTRREF_NODE:
case CLASSAD_NODE:
case EXPR_ENVELOPE:
case EXPR_LIST_NODE:
case FN_CALL_NODE:
case OP_NODE:
val = objectPool.valuePool.get();
if (pAd.evaluateAttr(name, val)) {
} else {
// just write the expr
val = ((Literal) pAd.getAttrList().get(name + "Expr")).getValue();
}
break;
case LITERAL_NODE:
val = ((Literal) tree.getTree()).getValue();
break;
default:
throw new HyracksDataException("Unknown Expression type detected: " + tree.getKind());
}
if (fieldType != null) {
if (NonTaggedFormatUtil.isOptional(fieldType)) {
fieldType = ((AUnionType) fieldType).getActualType();
}
}
switch(val.getValueType()) {
case ABSOLUTE_TIME_VALUE:
if (checkType(ATypeTag.DATETIME, fieldType)) {
parseDateTime(val, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
case BOOLEAN_VALUE:
if (checkType(ATypeTag.BOOLEAN, fieldType)) {
booleanSerde.serialize(val.getBoolVal() ? ABoolean.TRUE : ABoolean.FALSE, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
case CLASSAD_VALUE:
if (checkType(ATypeTag.OBJECT, fieldType)) {
IAType objectType = getComplexType(fieldType, ATypeTag.OBJECT);
ClassAd classad = val.getClassadVal();
parseRecord((ARecordType) objectType, classad, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
case ERROR_VALUE:
case STRING_VALUE:
case UNDEFINED_VALUE:
if (checkType(ATypeTag.STRING, fieldType)) {
parseString(val, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
case INTEGER_VALUE:
if (checkType(ATypeTag.BIGINT, fieldType)) {
if (fieldType == null || fieldType.getTypeTag() == ATypeTag.BIGINT) {
aInt64.setValue(val.getLongVal());
int64Serde.serialize(aInt64, out);
} else if (fieldType.getTypeTag() == ATypeTag.INTEGER) {
aInt32.setValue((int) val.getLongVal());
int32Serde.serialize(aInt32, out);
} else if (fieldType.getTypeTag() == ATypeTag.DOUBLE) {
aDouble.setValue(val.getLongVal());
doubleSerde.serialize(aDouble, out);
} else if (fieldType.getTypeTag() == ATypeTag.SMALLINT) {
aInt16.setValue((short) val.getLongVal());
int16Serde.serialize(aInt16, out);
} else if (fieldType.getTypeTag() == ATypeTag.TINYINT) {
aInt8.setValue((byte) val.getLongVal());
int8Serde.serialize(aInt8, out);
} else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
aFloat.setValue(val.getLongVal());
floatSerde.serialize(aFloat, out);
}
} else if (checkType(ATypeTag.DATETIME, fieldType)) {
// Classad uses Linux Timestamps (s instead of ms)
aDateTime.setValue(val.getLongVal() * 1000);
datetimeSerde.serialize(aDateTime, out);
} else if (checkType(ATypeTag.DURATION, fieldType)) {
// Classad uses Linux Timestamps (s instead of ms)
aDuration.setValue(0, val.getLongVal() * 1000);
durationSerde.serialize(aDuration, out);
} else if (checkType(ATypeTag.INTEGER, fieldType)) {
aInt32.setValue((int) val.getLongVal());
int32Serde.serialize(aInt32, out);
} else if (checkType(ATypeTag.DOUBLE, fieldType)) {
aDouble.setValue(val.getLongVal());
doubleSerde.serialize(aDouble, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
case LIST_VALUE:
case SLIST_VALUE:
IAType objectType;
if (checkType(ATypeTag.MULTISET, fieldType)) {
objectType = getComplexType(fieldType, ATypeTag.MULTISET);
parseUnorderedList((AUnorderedListType) objectType, val, out);
} else if (checkType(ATypeTag.ARRAY, fieldType)) {
objectType = getComplexType(fieldType, ATypeTag.ARRAY);
parseOrderedList((AOrderedListType) objectType, val, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
case REAL_VALUE:
if (checkType(ATypeTag.DOUBLE, fieldType)) {
if (fieldType == null || fieldType.getTypeTag() == ATypeTag.DOUBLE) {
aDouble.setValue(val.getDoubleVal());
doubleSerde.serialize(aDouble, out);
} else if (fieldType.getTypeTag() == ATypeTag.INTEGER) {
aInt32.setValue((int) val.getDoubleVal());
int32Serde.serialize(aInt32, out);
} else if (fieldType.getTypeTag() == ATypeTag.BIGINT) {
aInt64.setValue((long) val.getDoubleVal());
int64Serde.serialize(aInt64, out);
} else if (fieldType.getTypeTag() == ATypeTag.SMALLINT) {
aInt16.setValue((short) val.getDoubleVal());
int16Serde.serialize(aInt16, out);
} else if (fieldType.getTypeTag() == ATypeTag.TINYINT) {
aInt8.setValue((byte) val.getDoubleVal());
int8Serde.serialize(aInt8, out);
} else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
aFloat.setValue((float) val.getDoubleVal());
floatSerde.serialize(aFloat, out);
}
} else if (checkType(ATypeTag.INTEGER, fieldType)) {
aInt32.setValue((int) val.getDoubleVal());
int32Serde.serialize(aInt32, out);
} else if (checkType(ATypeTag.BIGINT, fieldType)) {
aInt64.setValue((long) val.getDoubleVal());
int64Serde.serialize(aInt64, out);
} else if (checkType(ATypeTag.DATETIME, fieldType)) {
// Classad uses Linux Timestamps (s instead of ms)
aDateTime.setValue(val.getLongVal() * 1000);
datetimeSerde.serialize(aDateTime, out);
} else if (checkType(ATypeTag.DURATION, fieldType)) {
// Classad uses Linux Timestamps (s instead of ms)
aDuration.setValue(0, (long) (val.getDoubleVal() * 1000.0));
durationSerde.serialize(aDuration, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
case RELATIVE_TIME_VALUE:
if (checkType(ATypeTag.DURATION, fieldType)) {
parseDuration(val, out);
} else {
throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
}
break;
default:
throw new HyracksDataException("unknown data type " + val.getValueType());
}
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class ClassAdParser method parseRecord.
private void parseRecord(ARecordType recType, ClassAd pAd, DataOutput out) throws IOException, AsterixException {
ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
IARecordBuilder recBuilder = getRecordBuilder();
BitSet nulls = null;
if (recType != null) {
nulls = getBitSet();
recBuilder.reset(recType);
} else {
recBuilder.reset(null);
}
recBuilder.init();
Boolean openRecordField = false;
int fieldId = 0;
IAType fieldType = null;
// new stuff
Map<CaseInsensitiveString, ExprTree> attrs = pAd.getAttrList();
for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
// reset buffers
fieldNameBuffer.reset();
fieldValueBuffer.reset();
// take care of field name
String fldName = entry.getKey().get();
if (recType != null) {
fieldId = recBuilder.getFieldId(fldName);
if (fieldId < 0 && !recType.isOpen()) {
throw new HyracksDataException("This record is closed, you can not add extra fields !!");
} else if (fieldId < 0 && recType.isOpen()) {
aStringFieldName.setValue(fldName);
stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
openRecordField = true;
fieldType = null;
} else {
// a closed field
nulls.set(fieldId);
fieldType = recType.getFieldTypes()[fieldId];
openRecordField = false;
}
} else {
aStringFieldName.setValue(fldName);
stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
openRecordField = true;
fieldType = null;
}
// add field value to value buffer
writeFieldValueToBuffer(fieldType, fieldValueBuffer.getDataOutput(), fldName, entry.getValue(), pAd);
if (openRecordField) {
if (fieldValueBuffer.getByteArray()[0] != ATypeTag.MISSING.serialize()) {
recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
}
} else if (NonTaggedFormatUtil.isOptional(fieldType)) {
if (fieldValueBuffer.getByteArray()[0] != ATypeTag.MISSING.serialize()) {
recBuilder.addField(fieldId, fieldValueBuffer);
}
} else {
recBuilder.addField(fieldId, fieldValueBuffer);
}
}
if (recType != null) {
int optionalFieldId = checkOptionalConstraints(recType, nulls);
if (optionalFieldId != -1) {
throw new HyracksDataException("Field: " + recType.getFieldNames()[optionalFieldId] + " can not be optional");
}
}
recBuilder.write(out, true);
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class TweetParserTest method closedRecordTypeTest.
@Test
public void closedRecordTypeTest() throws IOException, URISyntaxException {
// contruct type
IAType geoFieldType = new ARecordType("GeoType", new String[] { "coordinates" }, new IAType[] { new AOrderedListType(AFLOAT, "point") }, true);
ARecordType tweetRecordType = new ARecordType("TweetType", new String[] { "id", "geo" }, new IAType[] { AINT64, geoFieldType }, true);
TweetParser parser = new TweetParser(tweetRecordType);
List<String> lines = Files.readAllLines(Paths.get(getClass().getResource("/test_tweets.txt").toURI()));
ByteArrayOutputStream is = new ByteArrayOutputStream();
DataOutput output = new DataOutputStream(is);
int regularCount = 0;
for (int iter1 = 0; iter1 < lines.size(); iter1++) {
GenericRecord<String> record = new GenericRecord<>();
record.set(lines.get(iter1));
try {
parser.parse(record, output);
regularCount++;
} catch (HyracksDataException e) {
Assert.assertTrue(e.toString().contains("Non-null") && (iter1 == 0 || iter1 == 1));
}
}
Assert.assertTrue(regularCount == 4);
}
Aggregations