use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class TypeUtil method createEnforcedType.
/**
* Merges typed index fields with specified recordType, allowing indexed fields to be optional.
* I.e. the type { "personId":int32, "name": string, "address" : { "street": string } } with typed indexes
* on age:int32, address.state:string will be merged into type { "personId":int32, "name": string,
* "age": int32? "address" : { "street": string, "state": string? } } Used by open indexes to enforce
* the type of an indexed record
*/
public static Pair<ARecordType, ARecordType> createEnforcedType(ARecordType recordType, ARecordType metaType, List<Index> indexes) throws AlgebricksException {
ARecordType enforcedRecordType = recordType;
ARecordType enforcedMetaType = metaType;
for (Index index : indexes) {
if (!index.isSecondaryIndex() || !index.isEnforcingKeyFileds()) {
continue;
}
if (index.hasMetaFields()) {
throw new AlgebricksException("Indexing an open field is only supported on the record part");
}
for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
Deque<Pair<ARecordType, String>> nestedTypeStack = new ArrayDeque<>();
List<String> splits = index.getKeyFieldNames().get(i);
ARecordType nestedFieldType = enforcedRecordType;
boolean openRecords = false;
String bridgeName = nestedFieldType.getTypeName();
int j;
// Build the stack for the enforced type
for (j = 1; j < splits.size(); j++) {
nestedTypeStack.push(new Pair<>(nestedFieldType, splits.get(j - 1)));
bridgeName = nestedFieldType.getTypeName();
nestedFieldType = (ARecordType) enforcedRecordType.getSubFieldType(splits.subList(0, j));
if (nestedFieldType == null) {
openRecords = true;
break;
}
}
if (openRecords) {
// create the smallest record
enforcedRecordType = new ARecordType(splits.get(splits.size() - 2), new String[] { splits.get(splits.size() - 1) }, new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) }, true);
// create the open part of the nested field
for (int k = splits.size() - 3; k > (j - 2); k--) {
enforcedRecordType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) }, new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) }, true);
}
// Bridge the gap
Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
ARecordType parent = gapPair.first;
IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(), new IAType[] { AUnionType.createUnknownableType(enforcedRecordType) });
enforcedRecordType = new ARecordType(bridgeName, ArrayUtils.addAll(parent.getFieldNames(), enforcedRecordType.getTypeName()), parentFieldTypes, true);
} else {
//Schema is closed all the way to the field
//enforced fields are either null or strongly typed
Map<String, IAType> recordNameTypesMap = TypeUtil.createRecordNameTypeMap(nestedFieldType);
// if a an enforced field already exists and the type is correct
IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1));
if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION && ((AUnionType) enforcedFieldType).isUnknownableType()) {
enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType();
}
if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(), index.getKeyFieldTypes().get(i).getTypeTag())) {
throw new AlgebricksException("Cannot enforce field " + index.getKeyFieldNames().get(i) + " to have type " + index.getKeyFieldTypes().get(i));
}
if (enforcedFieldType == null) {
recordNameTypesMap.put(splits.get(splits.size() - 1), AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)));
}
enforcedRecordType = new ARecordType(nestedFieldType.getTypeName(), recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]), recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]), nestedFieldType.isOpen());
}
// Create the enforced type for the nested fields in the schema, from the ground up
if (!nestedTypeStack.isEmpty()) {
while (!nestedTypeStack.isEmpty()) {
Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
ARecordType nestedRecType = nestedTypePair.first;
IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedTypePair.second)] = enforcedRecordType;
enforcedRecordType = new ARecordType(nestedRecType.getTypeName() + "_enforced", nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen());
}
}
}
}
return new Pair<>(enforcedRecordType, enforcedMetaType);
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class AOrderedlistPrinterFactory method createPrinter.
@Override
public IPrinter createPrinter() {
final PointableAllocator allocator = new PointableAllocator();
final IAType inputType = orderedlistType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.ARRAY) : orderedlistType;
final IVisitablePointable listAccessor = allocator.allocateListValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
return new IPrinter() {
@Override
public void init() {
arg.second = inputType.getTypeTag();
}
@Override
public void print(byte[] b, int start, int l, PrintStream ps) throws HyracksDataException {
listAccessor.set(b, start, l);
arg.first = ps;
listAccessor.accept(printVisitor, arg);
}
};
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class TypeResolverUtil method generalizeRecordFields.
// Generates closed fields and possible additional fields of a generalized type of two record types.
private static boolean generalizeRecordFields(ARecordType leftType, ARecordType rightType, Set<String> allPossibleAdditionalFieldNames, List<String> fieldNames, List<IAType> fieldTypes) {
boolean allMatched = true;
Set<String> existingFieldNames = new HashSet<>(fieldNames);
for (String fieldName : leftType.getFieldNames()) {
IAType leftFieldType = leftType.getFieldType(fieldName);
IAType rightFieldType = rightType.getFieldType(fieldName);
IAType generalizedFieldType = rightFieldType == null ? null : generalizeTypes(leftFieldType, rightFieldType);
if (generalizedFieldType == null || generalizedFieldType.equals(BuiltinType.ANY)) {
allPossibleAdditionalFieldNames.add(fieldName);
allMatched = false;
} else if (!existingFieldNames.contains(fieldName)) {
fieldNames.add(fieldName);
fieldTypes.add(generalizedFieldType);
}
}
return allMatched;
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class SecondaryIndexOperationsHelper method setFilterTypeTraitsAndComparators.
protected void setFilterTypeTraitsAndComparators() throws AlgebricksException {
filterTypeTraits = new ITypeTraits[numFilterFields];
filterCmpFactories = new IBinaryComparatorFactory[numFilterFields];
secondaryFilterFields = new int[numFilterFields];
primaryFilterFields = new int[numFilterFields];
primaryBTreeFields = new int[numPrimaryKeys + 1];
secondaryBTreeFields = new int[index.getKeyFieldNames().size() + numPrimaryKeys];
for (int i = 0; i < primaryBTreeFields.length; i++) {
primaryBTreeFields[i] = i;
}
for (int i = 0; i < secondaryBTreeFields.length; i++) {
secondaryBTreeFields[i] = i;
}
IAType type = itemType.getSubFieldType(filterFieldName);
filterCmpFactories[0] = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(type, true);
filterTypeTraits[0] = TypeTraitProvider.INSTANCE.getTypeTrait(type);
secondaryFilterFields[0] = getNumSecondaryKeys() + numPrimaryKeys;
primaryFilterFields[0] = numPrimaryKeys + 1;
}
use of org.apache.asterix.om.types.IAType in project asterixdb by apache.
the class RTreeAccessMethod method createSecondaryToPrimaryPlan.
private ILogicalOperator createSecondaryToPrimaryPlan(OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx, boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context) throws AlgebricksException {
IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
Dataset dataset = indexSubTree.getDataset();
ARecordType recordType = indexSubTree.getRecordType();
ARecordType metaRecordType = indexSubTree.getMetaRecordType();
int optFieldIdx = AccessMethodUtils.chooseFirstOptFuncVar(chosenIndex, analysisCtx);
Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(optFuncExpr.getFieldType(optFieldIdx), optFuncExpr.getFieldName(optFieldIdx), recordType);
if (keyPairType == null) {
return null;
}
// Get the number of dimensions corresponding to the field indexed by chosenIndex.
IAType spatialType = keyPairType.first;
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numSecondaryKeys = numDimensions * 2;
// we made sure indexSubTree has datasource scan
AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.getDataSourceRef().getValue();
RTreeJobGenParams jobGenParams = new RTreeJobGenParams(chosenIndex.getIndexName(), IndexType.RTREE, dataset.getDataverseName(), dataset.getDatasetName(), retainInput, requiresBroadcast);
// A spatial object is serialized in the constant of the func expr we are optimizing.
// The R-Tree expects as input an MBR represented with 1 field per dimension.
// Here we generate vars and funcs for extracting MBR fields from the constant into fields of a tuple (as the
// R-Tree expects them).
// List of variables for the assign.
ArrayList<LogicalVariable> keyVarList = new ArrayList<>();
// List of expressions for the assign.
ArrayList<Mutable<ILogicalExpression>> keyExprList = new ArrayList<>();
Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr, indexSubTree, probeSubTree);
ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
for (int i = 0; i < numSecondaryKeys; i++) {
// The create MBR function "extracts" one field of an MBR around the given spatial object.
AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
// Spatial object is the constant from the func expr we are optimizing.
createMBR.getArguments().add(new MutableObject<>(searchKeyExpr));
// The number of dimensions.
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(numDimensions)))));
// Which part of the MBR to extract.
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
// Add a variable and its expr to the lists which will be passed into an assign op.
LogicalVariable keyVar = context.newVar();
keyVarList.add(keyVar);
keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
}
jobGenParams.setKeyVarList(keyVarList);
// Assign operator that "extracts" the MBR fields from the func-expr constant into a tuple.
AssignOperator assignSearchKeys = new AssignOperator(keyVarList, keyExprList);
if (probeSubTree == null) {
// We are optimizing a selection query.
// Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
assignSearchKeys.getInputs().add(new MutableObject<>(OperatorManipulationUtil.deepCopy(dataSourceOp.getInputs().get(0).getValue())));
assignSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
} else {
// We are optimizing a join, place the assign op top of the probe subtree.
assignSearchKeys.getInputs().add(probeSubTree.getRootRef());
}
ILogicalOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType, metaRecordType, chosenIndex, assignSearchKeys, jobGenParams, context, false, retainInput, retainNull);
// Generate the rest of the upstream plan which feeds the search results into the primary index.
return dataset.getDatasetType() == DatasetType.EXTERNAL ? AccessMethodUtils.createExternalDataLookupUnnestMap(dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, retainInput, retainNull) : AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp, dataset, recordType, metaRecordType, secondaryIndexUnnestOp, context, true, retainInput, false, false);
}
Aggregations