use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.
the class FullTextContainsParameterCheckRule method checkValueForThirdParameter.
/**
* Checks the option of the given ftcontains() function. Also, sets default value.
*
* @param expr
* @throws AlgebricksException
*/
void checkValueForThirdParameter(Mutable<ILogicalExpression> expr, List<Mutable<ILogicalExpression>> newArgs) throws AlgebricksException {
// Get the last parameter - this should be a record-constructor.
AbstractFunctionCallExpression openRecConsExpr = (AbstractFunctionCallExpression) expr.getValue();
FunctionIdentifier openRecConsFi = openRecConsExpr.getFunctionIdentifier();
if (openRecConsFi != BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR && openRecConsFi != BuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR) {
throw new AlgebricksException("ftcontains() option should be the form of a record { }.");
}
// We multiply 2 because the layout of the arguments are: [expr, val, expr1, val1, ...]
if (openRecConsExpr.getArguments().size() > FullTextContainsDescriptor.getParamTypeMap().size() * 2) {
throw new AlgebricksException("Too many options were specified.");
}
for (int i = 0; i < openRecConsExpr.getArguments().size(); i = i + 2) {
ILogicalExpression optionExpr = openRecConsExpr.getArguments().get(i).getValue();
ILogicalExpression optionExprVal = openRecConsExpr.getArguments().get(i + 1).getValue();
if (optionExpr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
throw new AlgebricksException("Options must be in the form of constant strings. Check that the option at " + (i % 2 + 1) + " is indeed a constant string");
}
String option = ConstantExpressionUtil.getStringArgument(openRecConsExpr, i).toLowerCase();
if (!FullTextContainsDescriptor.getParamTypeMap().containsKey(option)) {
throw new AlgebricksException("The given option " + option + " is not a valid argument to ftcontains()");
}
boolean typeError = false;
String optionTypeStringVal = null;
// If the option value is a constant, then we can check here.
if (optionExprVal.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
switch(FullTextContainsDescriptor.getParamTypeMap().get(option)) {
case STRING:
optionTypeStringVal = ConstantExpressionUtil.getStringArgument(openRecConsExpr, i + 1).toLowerCase();
if (optionTypeStringVal == null) {
typeError = true;
}
break;
default:
// Currently, we only have a string parameter. So, the flow doesn't reach here.
typeError = true;
break;
}
}
if (typeError) {
throw new AlgebricksException("The given value for option " + option + " was not of the expected type");
}
// Check the validity of option value
switch(option) {
case FullTextContainsDescriptor.SEARCH_MODE_OPTION:
checkSearchModeOption(optionTypeStringVal);
break;
default:
break;
}
// Add this option as arguments to the ftcontains().
newArgs.add(new MutableObject<ILogicalExpression>(optionExpr));
newArgs.add(new MutableObject<ILogicalExpression>(optionExprVal));
}
}
use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.
the class LoadRecordFieldsRule method findAndEliminateRedundantFieldAccess.
/**
* Rewrite
* assign $x := field-access($y, "field")
* assign $y := record-constructor { "field": Expr, ... }
* into
* assign $x := Expr
* assign $y := record-constructor { "field": Expr, ... }
*/
private static boolean findAndEliminateRedundantFieldAccess(AssignOperator assign, IOptimizationContext context) throws AlgebricksException {
ILogicalExpression expr = getFirstExpr(assign);
AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
ILogicalExpression arg0 = f.getArguments().get(0).getValue();
if (arg0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
return false;
}
VariableReferenceExpression vre = (VariableReferenceExpression) arg0;
LogicalVariable recordVar = vre.getVariableReference();
ILogicalExpression arg1 = f.getArguments().get(1).getValue();
if (arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
return false;
}
IVariableTypeEnvironment typeEnvironment = context.getOutputTypeEnvironment(assign);
ConstantExpression ce = (ConstantExpression) arg1;
ILogicalExpression fldExpr;
if (f.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
String fldName = ((AString) ((AsterixConstantValue) ce.getValue()).getObject()).getStringValue();
fldExpr = findFieldExpression(assign, recordVar, fldName, typeEnvironment, (name, expression, env) -> findFieldByNameFromRecordConstructor(name, expression));
} else if (f.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
Integer fldIdx = ((AInt32) ((AsterixConstantValue) ce.getValue()).getObject()).getIntegerValue();
fldExpr = findFieldExpression(assign, recordVar, fldIdx, typeEnvironment, LoadRecordFieldsRule::findFieldByIndexFromRecordConstructor);
} else if (f.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_NESTED)) {
return false;
} else {
throw new IllegalStateException();
}
if (fldExpr == null) {
return false;
}
// check the liveness of the new expression
List<LogicalVariable> usedVariables = new ArrayList<>();
fldExpr.getUsedVariables(usedVariables);
List<LogicalVariable> liveInputVars = new ArrayList<>();
VariableUtilities.getLiveVariables(assign, liveInputVars);
usedVariables.removeAll(liveInputVars);
if (usedVariables.isEmpty()) {
assign.getExpressions().get(0).setValue(fldExpr);
return true;
} else {
return false;
}
}
use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.
the class IndexTupleTranslator method getMetadataEntityFromTuple.
@Override
public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord rec = recordSerde.deserialize(in);
String dvName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
IndexType indexStructure = IndexType.valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX)).getStringValue());
IACursor fieldNameCursor = ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
List<List<String>> searchKey = new ArrayList<>();
AOrderedList fieldNameList;
while (fieldNameCursor.next()) {
fieldNameList = (AOrderedList) fieldNameCursor.get();
IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
List<String> nestedFieldName = new ArrayList<>();
while (nestedFieldNameCursor.next()) {
nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
}
searchKey.add(nestedFieldName);
}
int indexKeyTypeFieldPos = rec.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
IACursor fieldTypeCursor = new ACollectionCursor();
if (indexKeyTypeFieldPos > 0) {
fieldTypeCursor = ((AOrderedList) rec.getValueByPos(indexKeyTypeFieldPos)).getCursor();
}
List<IAType> searchKeyType = new ArrayList<>(searchKey.size());
while (fieldTypeCursor.next()) {
String typeName = ((AString) fieldTypeCursor.get()).getStringValue();
IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dvName, typeName, false);
searchKeyType.add(fieldType);
}
int isEnforcedFieldPos = rec.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
Boolean isEnforcingKeys = false;
if (isEnforcedFieldPos > 0) {
isEnforcingKeys = ((ABoolean) rec.getValueByPos(isEnforcedFieldPos)).getBoolean();
}
Boolean isPrimaryIndex = ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
// Check if there is a gram length as well.
int gramLength = -1;
int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
if (gramLenPos >= 0) {
gramLength = ((AInt32) rec.getValueByPos(gramLenPos)).getIntegerValue();
}
// Read a field-source-indicator field.
List<Integer> keyFieldSourceIndicator = new ArrayList<>();
int keyFieldSourceIndicatorIndex = rec.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
if (keyFieldSourceIndicatorIndex >= 0) {
IACursor cursor = ((AOrderedList) rec.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
while (cursor.next()) {
keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
}
} else {
for (int index = 0; index < searchKey.size(); ++index) {
keyFieldSourceIndicator.add(0);
}
}
// index key type information is not persisted, thus we extract type information from the record metadata
if (searchKeyType.isEmpty()) {
try {
Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
String datatypeName = dSet.getItemTypeName();
String datatypeDataverseName = dSet.getItemTypeDataverseName();
ARecordType recordDt = (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
String metatypeName = dSet.getMetaItemTypeName();
String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
ARecordType metaDt = null;
if (metatypeName != null && metatypeDataverseName != null) {
metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName).getDatatype();
}
try {
searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
} catch (AlgebricksException e) {
throw new MetadataException(e);
}
} catch (RemoteException re) {
throw HyracksDataException.create(re);
}
}
return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType, gramLength, isEnforcingKeys, isPrimaryIndex, pendingOp);
}
use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.
the class ExternalIndexingOperations method getSnapshotFromExternalFileSystem.
public static List<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset) throws AlgebricksException {
ArrayList<ExternalFile> files = new ArrayList<>();
ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
try {
// Create the file system object
FileSystem fs = getFileSystemObject(datasetDetails.getProperties());
// Get paths of dataset
String path = datasetDetails.getProperties().get(ExternalDataConstants.KEY_PATH);
String[] paths = path.split(",");
// Add fileStatuses to files
for (String aPath : paths) {
FileStatus[] fileStatuses = fs.listStatus(new Path(aPath));
for (int i = 0; i < fileStatuses.length; i++) {
int nextFileNumber = files.size();
handleFile(dataset, files, fs, fileStatuses[i], nextFileNumber);
}
}
// Close file system
fs.close();
if (files.isEmpty()) {
throw new AlgebricksException("File Snapshot retrieved from external file system is empty");
}
return files;
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Exception while trying to get snapshot from external system", e);
throw new AlgebricksException("Unable to get list of HDFS files " + e);
}
}
use of org.apache.hyracks.algebricks.common.exceptions.AlgebricksException in project asterixdb by apache.
the class AbstractPhysicalOperator method buildPipelineWithProjection.
private AlgebricksPipeline buildPipelineWithProjection(ILogicalPlan p, IOperatorSchema outerPlanSchema, AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, PlanCompiler pc) throws AlgebricksException {
if (p.getRoots().size() > 1) {
throw new NotImplementedException("Nested plans with several roots are not supported.");
}
JobSpecification nestedJob = pc.compilePlan(p, outerPlanSchema, null);
ILogicalOperator topOpInSubplan = p.getRoots().get(0).getValue();
JobGenContext context = pc.getContext();
IOperatorSchema topOpInSubplanScm = context.getSchema(topOpInSubplan);
opSchema.addAllVariables(topOpInSubplanScm);
Map<OperatorDescriptorId, IOperatorDescriptor> opMap = nestedJob.getOperatorMap();
if (opMap.size() != 1) {
throw new AlgebricksException("Attempting to construct a nested plan with " + opMap.size() + " operator descriptors. Currently, nested plans can only consist in linear pipelines of Asterix micro operators.");
}
for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> opEntry : opMap.entrySet()) {
IOperatorDescriptor opd = opEntry.getValue();
if (!(opd instanceof AlgebricksMetaOperatorDescriptor)) {
throw new AlgebricksException("Can only generate Hyracks jobs for pipelinable Asterix nested plans, not for " + opd.getClass().getName());
}
AlgebricksMetaOperatorDescriptor amod = (AlgebricksMetaOperatorDescriptor) opd;
return amod.getPipeline();
// we suppose that the top operator in the subplan already does the
// projection for us
}
throw new IllegalStateException();
}
Aggregations