use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.
the class IndexTupleTranslator method getMetadataEntityFromTuple.
@Override
public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord rec = recordSerde.deserialize(in);
String dvName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
IndexType indexStructure = IndexType.valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX)).getStringValue());
IACursor fieldNameCursor = ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
List<List<String>> searchKey = new ArrayList<>();
AOrderedList fieldNameList;
while (fieldNameCursor.next()) {
fieldNameList = (AOrderedList) fieldNameCursor.get();
IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
List<String> nestedFieldName = new ArrayList<>();
while (nestedFieldNameCursor.next()) {
nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
}
searchKey.add(nestedFieldName);
}
int indexKeyTypeFieldPos = rec.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
IACursor fieldTypeCursor = new ACollectionCursor();
if (indexKeyTypeFieldPos > 0) {
fieldTypeCursor = ((AOrderedList) rec.getValueByPos(indexKeyTypeFieldPos)).getCursor();
}
List<IAType> searchKeyType = new ArrayList<>(searchKey.size());
while (fieldTypeCursor.next()) {
String typeName = ((AString) fieldTypeCursor.get()).getStringValue();
IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dvName, typeName, false);
searchKeyType.add(fieldType);
}
int isEnforcedFieldPos = rec.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
Boolean isEnforcingKeys = false;
if (isEnforcedFieldPos > 0) {
isEnforcingKeys = ((ABoolean) rec.getValueByPos(isEnforcedFieldPos)).getBoolean();
}
Boolean isPrimaryIndex = ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
// Check if there is a gram length as well.
int gramLength = -1;
int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
if (gramLenPos >= 0) {
gramLength = ((AInt32) rec.getValueByPos(gramLenPos)).getIntegerValue();
}
// Read a field-source-indicator field.
List<Integer> keyFieldSourceIndicator = new ArrayList<>();
int keyFieldSourceIndicatorIndex = rec.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
if (keyFieldSourceIndicatorIndex >= 0) {
IACursor cursor = ((AOrderedList) rec.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
while (cursor.next()) {
keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
}
} else {
for (int index = 0; index < searchKey.size(); ++index) {
keyFieldSourceIndicator.add(0);
}
}
// index key type information is not persisted, thus we extract type information from the record metadata
if (searchKeyType.isEmpty()) {
try {
Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
String datatypeName = dSet.getItemTypeName();
String datatypeDataverseName = dSet.getItemTypeDataverseName();
ARecordType recordDt = (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
String metatypeName = dSet.getMetaItemTypeName();
String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
ARecordType metaDt = null;
if (metatypeName != null && metatypeDataverseName != null) {
metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName).getDatatype();
}
try {
searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
} catch (AlgebricksException e) {
throw new MetadataException(e);
}
} catch (RemoteException re) {
throw HyracksDataException.create(re);
}
}
return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType, gramLength, isEnforcingKeys, isPrimaryIndex, pendingOp);
}
use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.
the class ExternalFileTupleTranslator method createExternalFileFromARecord.
private ExternalFile createExternalFileFromARecord(ARecord externalFileRecord) {
String dataverseName = ((AString) externalFileRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
String datasetName = ((AString) externalFileRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_DATASET_NAME_FIELD_INDEX)).getStringValue();
int fileNumber = ((AInt32) externalFileRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_NUMBER_FIELD_INDEX)).getIntegerValue();
String fileName = ((AString) externalFileRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_NAME_FIELD_INDEX)).getStringValue();
long fileSize = ((AInt64) externalFileRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_SIZE_FIELD_INDEX)).getLongValue();
Date lastMoDifiedDate = new Date(((ADateTime) externalFileRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_MOD_DATE_FIELD_INDEX)).getChrononTime());
ExternalFilePendingOp pendingOp = ExternalFilePendingOp.values()[((AInt32) externalFileRecord.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_PENDING_OP_FIELD_INDEX)).getIntegerValue()];
return new ExternalFile(dataverseName, datasetName, fileNumber, fileName, lastMoDifiedDate, fileSize, pendingOp);
}
use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.
the class MetadataNode method createExternalFileSearchTuple.
// This method is used to create a search tuple for external data file since the search tuple has an int value
@SuppressWarnings("unchecked")
public ITupleReference createExternalFileSearchTuple(String dataverseName, String datasetName, int fileNumber) throws HyracksDataException {
ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
ISerializerDeserializer<AInt32> intSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
AMutableString aString = new AMutableString("");
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(3);
//dataverse field
aString.setValue(dataverseName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
//dataset field
aString.setValue(datasetName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
//file number field
intSerde.serialize(new AInt32(fileNumber), tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
ArrayTupleReference tuple = new ArrayTupleReference();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.
the class EquivalenceClassUtils method addEquivalenceClassesForPrimaryIndexAccess.
/**
* Adds equivalent classes for primary index accesses, including unnest-map for
* primary index access and data source scan through primary index ---
* one equivalent class between a primary key variable and a record field-access expression.
*
* @param operator
* , the primary index access operator.
* @param indexSearchVars
* , the returned variables from primary index access. The last variable
* is the record variable.
* @param recordType
* , the record type of an index payload record.
* @param metaRecordType
* , the type of a meta record associated with an index payload record.
* @param dataset
* , the accessed dataset.
* @param context
* , the optimization context.
* @throws AlgebricksException
*/
@SuppressWarnings("unchecked")
public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator, List<LogicalVariable> indexSearchVars, ARecordType recordType, ARecordType metaRecordType, Dataset dataset, IOptimizationContext context) throws AlgebricksException {
if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
return;
}
InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
String[] fieldNames = recordType.getFieldNames();
for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
}
boolean hasMeta = dataset.hasMetaPart();
Map<String, Integer> metaFieldNameToIndexMap = new HashMap<>();
if (hasMeta) {
String[] metaFieldNames = metaRecordType.getFieldNames();
for (int metaFieldIndex = 0; metaFieldIndex < metaFieldNames.length; ++metaFieldIndex) {
metaFieldNameToIndexMap.put(metaFieldNames[metaFieldIndex], metaFieldIndex);
}
}
List<Integer> keySourceIndicators = datasetDetails.getKeySourceIndicator();
LogicalVariable recordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 2) : indexSearchVars.get(indexSearchVars.size() - 1);
LogicalVariable metaRecordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 1) : null;
for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
LogicalVariable referredRecordVar = recordVar;
String pkFieldName = primaryKey.get(pkIndex).get(0);
int source = keySourceIndicators.get(pkIndex);
Integer fieldIndexInRecord;
if (source == 0) {
// The field is from the main record.
fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
} else {
// The field is from the auxiliary meta record.
referredRecordVar = metaRecordVar;
fieldIndexInRecord = metaFieldNameToIndexMap.get(pkFieldName);
}
LogicalVariable var = indexSearchVars.get(pkIndex);
ILogicalExpression expr = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX), new MutableObject<ILogicalExpression>(new VariableReferenceExpression(referredRecordVar)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(fieldIndexInRecord)))));
EquivalenceClass equivClass = new EquivalenceClass(Collections.singletonList(var), var, Collections.singletonList(expr));
Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
if (equivalenceMap == null) {
equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
context.putEquivalenceClassMap(operator, equivalenceMap);
}
equivalenceMap.put(var, equivClass);
}
}
use of org.apache.asterix.om.base.AInt32 in project asterixdb by apache.
the class NonTaggedDataFormat method createMBRFactory.
@SuppressWarnings("unchecked")
@Override
public IScalarEvaluatorFactory[] createMBRFactory(ARecordType recType, List<String> fldName, int recordColumn, int dimension, List<String> filterFieldName, boolean isPointMBR) throws AlgebricksException {
IScalarEvaluatorFactory evalFactory = getFieldAccessEvaluatorFactory(recType, fldName, recordColumn);
int numOfFields = isPointMBR ? dimension : dimension * 2;
IScalarEvaluatorFactory[] evalFactories = new IScalarEvaluatorFactory[numOfFields + (filterFieldName == null ? 0 : 1)];
ArrayBackedValueStorage abvs1 = new ArrayBackedValueStorage();
DataOutput dos1 = abvs1.getDataOutput();
try {
AInt32 ai = new AInt32(dimension);
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos1);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
IScalarEvaluatorFactory dimensionEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs1.getByteArray(), abvs1.getLength()));
for (int i = 0; i < numOfFields; i++) {
ArrayBackedValueStorage abvs2 = new ArrayBackedValueStorage();
DataOutput dos2 = abvs2.getDataOutput();
try {
AInt32 ai = new AInt32(i);
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos2);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
IScalarEvaluatorFactory coordinateEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs2.getByteArray(), abvs2.getLength()));
evalFactories[i] = new CreateMBREvalFactory(evalFactory, dimensionEvalFactory, coordinateEvalFactory);
}
if (filterFieldName != null) {
evalFactories[numOfFields] = getFieldAccessEvaluatorFactory(recType, filterFieldName, recordColumn);
}
return evalFactories;
}
Aggregations