use of org.apache.asterix.om.constants.AsterixConstantValue in project asterixdb by apache.
the class EquivalenceClassUtils method addEquivalenceClassesForPrimaryIndexAccess.
/**
* Adds equivalent classes for primary index accesses, including unnest-map for
* primary index access and data source scan through primary index ---
* one equivalent class between a primary key variable and a record field-access expression.
*
* @param operator
* , the primary index access operator.
* @param indexSearchVars
* , the returned variables from primary index access. The last variable
* is the record variable.
* @param recordType
* , the record type of an index payload record.
* @param metaRecordType
* , the type of a meta record associated with an index payload record.
* @param dataset
* , the accessed dataset.
* @param context
* , the optimization context.
* @throws AlgebricksException
*/
@SuppressWarnings("unchecked")
public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator, List<LogicalVariable> indexSearchVars, ARecordType recordType, ARecordType metaRecordType, Dataset dataset, IOptimizationContext context) throws AlgebricksException {
if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
return;
}
InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
String[] fieldNames = recordType.getFieldNames();
for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
}
boolean hasMeta = dataset.hasMetaPart();
Map<String, Integer> metaFieldNameToIndexMap = new HashMap<>();
if (hasMeta) {
String[] metaFieldNames = metaRecordType.getFieldNames();
for (int metaFieldIndex = 0; metaFieldIndex < metaFieldNames.length; ++metaFieldIndex) {
metaFieldNameToIndexMap.put(metaFieldNames[metaFieldIndex], metaFieldIndex);
}
}
List<Integer> keySourceIndicators = datasetDetails.getKeySourceIndicator();
LogicalVariable recordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 2) : indexSearchVars.get(indexSearchVars.size() - 1);
LogicalVariable metaRecordVar = hasMeta ? indexSearchVars.get(indexSearchVars.size() - 1) : null;
for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
LogicalVariable referredRecordVar = recordVar;
String pkFieldName = primaryKey.get(pkIndex).get(0);
int source = keySourceIndicators.get(pkIndex);
Integer fieldIndexInRecord;
if (source == 0) {
// The field is from the main record.
fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
} else {
// The field is from the auxiliary meta record.
referredRecordVar = metaRecordVar;
fieldIndexInRecord = metaFieldNameToIndexMap.get(pkFieldName);
}
LogicalVariable var = indexSearchVars.get(pkIndex);
ILogicalExpression expr = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX), new MutableObject<ILogicalExpression>(new VariableReferenceExpression(referredRecordVar)), new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(fieldIndexInRecord)))));
EquivalenceClass equivClass = new EquivalenceClass(Collections.singletonList(var), var, Collections.singletonList(expr));
Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
if (equivalenceMap == null) {
equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
context.putEquivalenceClassMap(operator, equivalenceMap);
}
equivalenceMap.put(var, equivClass);
}
}
use of org.apache.asterix.om.constants.AsterixConstantValue in project asterixdb by apache.
the class StaticTypeCastUtil method staticRecordTypeCast.
/**
* This method statically cast the type of records from their current type to the required type.
*
* @param func
* The record constructor expression.
* @param reqType
* The required type.
* @param inputType
* The current type.
* @param env
* The type environment.
* @throws AlgebricksException
*/
private static boolean staticRecordTypeCast(AbstractFunctionCallExpression func, ARecordType reqType, ARecordType inputType, IVariableTypeEnvironment env) throws AlgebricksException {
if (!(func.getFunctionIdentifier() == BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR || func.getFunctionIdentifier() == BuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
return false;
}
IAType[] reqFieldTypes = reqType.getFieldTypes();
String[] reqFieldNames = reqType.getFieldNames();
IAType[] inputFieldTypes = inputType.getFieldTypes();
String[] inputFieldNames = inputType.getFieldNames();
int[] fieldPermutation = new int[reqFieldTypes.length];
boolean[] nullFields = new boolean[reqFieldTypes.length];
boolean[] openFields = new boolean[inputFieldTypes.length];
Arrays.fill(nullFields, false);
Arrays.fill(openFields, true);
Arrays.fill(fieldPermutation, -1);
// forward match: match from actual to required
boolean matched = false;
for (int i = 0; i < inputFieldNames.length; i++) {
String fieldName = inputFieldNames[i];
IAType fieldType = inputFieldTypes[i];
if (2 * i + 1 > func.getArguments().size()) {
// it is not a record constructor function
return false;
}
// 2*i+1 is the index of field value expression
ILogicalExpression arg = func.getArguments().get(2 * i + 1).getValue();
matched = false;
for (int j = 0; j < reqFieldNames.length; j++) {
String reqFieldName = reqFieldNames[j];
IAType reqFieldType = reqFieldTypes[j];
if (fieldName.equals(reqFieldName)) {
//type matched
if (fieldType.equals(reqFieldType)) {
fieldPermutation[j] = i;
openFields[i] = false;
matched = true;
if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
}
break;
}
// match the optional field
if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
IAType itemType = ((AUnionType) reqFieldType).getActualType();
reqFieldType = itemType;
if (fieldType.equals(BuiltinType.AMISSING) || fieldType.equals(itemType)) {
fieldPermutation[j] = i;
openFields[i] = false;
matched = true;
// rewrite record expr
if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
}
break;
}
}
// delay that to runtime by calling the not-null function
if (NonTaggedFormatUtil.isOptional(fieldType)) {
IAType itemType = ((AUnionType) fieldType).getActualType();
if (reqFieldType.equals(itemType)) {
fieldPermutation[j] = i;
openFields[i] = false;
matched = true;
ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CHECK_UNKNOWN));
notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
//wrap the not null function to the original function
func.getArguments().get(2 * i + 1).setValue(notNullFunc);
break;
}
}
// match the record field: need cast
if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
fieldPermutation[j] = i;
openFields[i] = false;
matched = true;
break;
}
}
}
// the input has extra fields
if (!matched && !reqType.isOpen()) {
throw new AlgebricksException("static type mismatch: the input record includes an extra closed field " + fieldName + ":" + fieldType + "! Please check the field name and type.");
}
}
// backward match: match from required to actual
for (int i = 0; i < reqFieldNames.length; i++) {
String reqFieldName = reqFieldNames[i];
IAType reqFieldType = reqFieldTypes[i];
matched = false;
for (int j = 0; j < inputFieldNames.length; j++) {
String fieldName = inputFieldNames[j];
IAType fieldType = inputFieldTypes[j];
if (!fieldName.equals(reqFieldName)) {
continue;
}
// the entry index of fieldPermuatons is req field index
if (!openFields[j]) {
matched = true;
break;
}
// match the optional field
if (!NonTaggedFormatUtil.isOptional(reqFieldType)) {
continue;
}
IAType itemType = ((AUnionType) reqFieldType).getActualType();
if (fieldType.equals(BuiltinType.AMISSING) || fieldType.equals(itemType)) {
matched = true;
break;
}
}
if (matched) {
continue;
}
if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
// add a null field
nullFields[i] = true;
} else {
// no matched field in the input for a required closed field
if (inputType.isOpen()) {
//if the input type is open, return false, give that to dynamic type cast to defer the error to the runtime
return false;
} else {
throw new AlgebricksException("static type mismatch: the input record misses a required closed field " + reqFieldName + ":" + reqFieldType + "! Please check the field name and type.");
}
}
}
List<Mutable<ILogicalExpression>> arguments = func.getArguments();
List<Mutable<ILogicalExpression>> originalArguments = new ArrayList<Mutable<ILogicalExpression>>();
originalArguments.addAll(arguments);
arguments.clear();
// re-order the closed part and fill in null fields
for (int i = 0; i < fieldPermutation.length; i++) {
int pos = fieldPermutation[i];
if (pos >= 0) {
arguments.add(originalArguments.get(2 * pos));
arguments.add(originalArguments.get(2 * pos + 1));
}
if (nullFields[i]) {
// add a null field
arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AString(reqFieldNames[i])))));
arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(ANull.NULL))));
}
}
// add the open part
for (int i = 0; i < openFields.length; i++) {
if (openFields[i]) {
arguments.add(originalArguments.get(2 * i));
Mutable<ILogicalExpression> expRef = originalArguments.get(2 * i + 1);
injectCastToRelaxType(expRef, inputFieldTypes[i], env);
arguments.add(expRef);
}
}
return true;
}
use of org.apache.asterix.om.constants.AsterixConstantValue in project asterixdb by apache.
the class NonTaggedDataFormat method getConstantEvalFactory.
@SuppressWarnings("unchecked")
@Override
public IScalarEvaluatorFactory getConstantEvalFactory(IAlgebricksConstantValue value) throws AlgebricksException {
IAObject obj = null;
if (value.isMissing()) {
obj = AMissing.MISSING;
} else if (value.isTrue()) {
obj = ABoolean.TRUE;
} else if (value.isFalse()) {
obj = ABoolean.FALSE;
} else {
AsterixConstantValue acv = (AsterixConstantValue) value;
obj = acv.getObject();
}
ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
DataOutput dos = abvs.getDataOutput();
try {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(obj.getType()).serialize(obj, dos);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
}
return new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
}
use of org.apache.asterix.om.constants.AsterixConstantValue in project asterixdb by apache.
the class RTreeAccessMethod method createSecondaryToPrimaryPlan.
private ILogicalOperator createSecondaryToPrimaryPlan(OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx, boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context) throws AlgebricksException {
IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
Dataset dataset = indexSubTree.getDataset();
ARecordType recordType = indexSubTree.getRecordType();
ARecordType metaRecordType = indexSubTree.getMetaRecordType();
int optFieldIdx = AccessMethodUtils.chooseFirstOptFuncVar(chosenIndex, analysisCtx);
Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(optFuncExpr.getFieldType(optFieldIdx), optFuncExpr.getFieldName(optFieldIdx), recordType);
if (keyPairType == null) {
return null;
}
// Get the number of dimensions corresponding to the field indexed by chosenIndex.
IAType spatialType = keyPairType.first;
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numSecondaryKeys = numDimensions * 2;
// we made sure indexSubTree has datasource scan
AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.getDataSourceRef().getValue();
RTreeJobGenParams jobGenParams = new RTreeJobGenParams(chosenIndex.getIndexName(), IndexType.RTREE, dataset.getDataverseName(), dataset.getDatasetName(), retainInput, requiresBroadcast);
// A spatial object is serialized in the constant of the func expr we are optimizing.
// The R-Tree expects as input an MBR represented with 1 field per dimension.
// Here we generate vars and funcs for extracting MBR fields from the constant into fields of a tuple (as the
// R-Tree expects them).
// List of variables for the assign.
ArrayList<LogicalVariable> keyVarList = new ArrayList<>();
// List of expressions for the assign.
ArrayList<Mutable<ILogicalExpression>> keyExprList = new ArrayList<>();
Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr, indexSubTree, probeSubTree);
ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
for (int i = 0; i < numSecondaryKeys; i++) {
// The create MBR function "extracts" one field of an MBR around the given spatial object.
AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
// Spatial object is the constant from the func expr we are optimizing.
createMBR.getArguments().add(new MutableObject<>(searchKeyExpr));
// The number of dimensions.
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(numDimensions)))));
// Which part of the MBR to extract.
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
// Add a variable and its expr to the lists which will be passed into an assign op.
LogicalVariable keyVar = context.newVar();
keyVarList.add(keyVar);
keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
}
jobGenParams.setKeyVarList(keyVarList);
// Assign operator that "extracts" the MBR fields from the func-expr constant into a tuple.
AssignOperator assignSearchKeys = new AssignOperator(keyVarList, keyExprList);
if (probeSubTree == null) {
// We are optimizing a selection query.
// Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
assignSearchKeys.getInputs().add(new MutableObject<>(OperatorManipulationUtil.deepCopy(dataSourceOp.getInputs().get(0).getValue())));
assignSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
} else {
// We are optimizing a join, place the assign op top of the probe subtree.
assignSearchKeys.getInputs().add(probeSubTree.getRootRef());
}
ILogicalOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType, metaRecordType, chosenIndex, assignSearchKeys, jobGenParams, context, false, retainInput, retainNull);
// Generate the rest of the upstream plan which feeds the search results into the primary index.
return dataset.getDatasetType() == DatasetType.EXTERNAL ? AccessMethodUtils.createExternalDataLookupUnnestMap(dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, retainInput, retainNull) : AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp, dataset, recordType, metaRecordType, secondaryIndexUnnestOp, context, true, retainInput, false, false);
}
use of org.apache.asterix.om.constants.AsterixConstantValue in project asterixdb by apache.
the class InlineAllNtsInSubplanVisitor method createRecordConstructorAssignOp.
private Pair<ILogicalOperator, LogicalVariable> createRecordConstructorAssignOp(Set<LogicalVariable> inputLiveVars) {
// Creates a nested record.
List<Mutable<ILogicalExpression>> recordConstructorArgs = new ArrayList<>();
for (LogicalVariable inputLiveVar : inputLiveVars) {
if (!correlatedKeyVars.contains(inputLiveVar)) {
recordConstructorArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AString(Integer.toString(inputLiveVar.getId()))))));
recordConstructorArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(inputLiveVar)));
}
}
LogicalVariable recordVar = context.newVar();
Mutable<ILogicalExpression> recordExprRef = new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), recordConstructorArgs));
AssignOperator assignOp = new AssignOperator(recordVar, recordExprRef);
return new Pair<ILogicalOperator, LogicalVariable>(assignOp, recordVar);
}
Aggregations