use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class IntroduceDynamicTypeCastForExternalFunctionRule method rewriteFunctionArgs.
private boolean rewriteFunctionArgs(ILogicalOperator op, Mutable<ILogicalExpression> expRef, IOptimizationContext context) throws AlgebricksException {
ILogicalExpression expr = expRef.getValue();
if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL || !(expr instanceof ScalarFunctionCallExpression)) {
return false;
}
ScalarFunctionCallExpression funcCallExpr = (ScalarFunctionCallExpression) expr;
boolean changed = false;
IAType inputRecordType;
ARecordType requiredRecordType;
for (int iter1 = 0; iter1 < funcCallExpr.getArguments().size(); iter1++) {
inputRecordType = (IAType) op.computeOutputTypeEnvironment(context).getType(funcCallExpr.getArguments().get(iter1).getValue());
if (!(((ExternalScalarFunctionInfo) funcCallExpr.getFunctionInfo()).getArgumenTypes().get(iter1) instanceof ARecordType)) {
continue;
}
requiredRecordType = (ARecordType) ((ExternalScalarFunctionInfo) funcCallExpr.getFunctionInfo()).getArgumenTypes().get(iter1);
/**
* the input record type can be an union type
* for the case when it comes from a subplan or left-outer join
*/
boolean checkUnknown = false;
while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
/** while-loop for the case there is a nested multi-level union */
inputRecordType = ((AUnionType) inputRecordType).getActualType();
checkUnknown = true;
}
boolean castFlag = !IntroduceDynamicTypeCastRule.compatible(requiredRecordType, inputRecordType);
if (castFlag || checkUnknown) {
AbstractFunctionCallExpression castFunc = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CAST_TYPE));
castFunc.getArguments().add(funcCallExpr.getArguments().get(iter1));
TypeCastUtils.setRequiredAndInputTypes(castFunc, requiredRecordType, inputRecordType);
funcCallExpr.getArguments().set(iter1, new MutableObject<>(castFunc));
changed = changed || true;
}
}
return changed;
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class IntroduceDynamicTypeCastRule method rewritePost.
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
// Depending on the operator type, we need to extract the following pieces of information.
AbstractLogicalOperator op;
ARecordType requiredRecordType;
LogicalVariable recordVar;
// We identify INSERT and DISTRIBUTE_RESULT operators.
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
switch(op1.getOperatorTag()) {
case SINK:
case DELEGATE_OPERATOR:
{
/**
* pattern match: commit insert assign
* resulting plan: commit-insert-project-assign
*/
if (op1.getOperatorTag() == LogicalOperatorTag.DELEGATE_OPERATOR) {
DelegateOperator eOp = (DelegateOperator) op1;
if (!(eOp.getDelegate() instanceof CommitOperator)) {
return false;
}
}
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
if (op2.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE_UPSERT) {
InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op2;
if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE) {
return false;
}
// Remember this is the operator we need to modify
op = insertDeleteOp;
// Derive the required ARecordType based on the schema of the DataSource
InsertDeleteUpsertOperator insertDeleteOperator = (InsertDeleteUpsertOperator) op2;
DataSource dataSource = (DataSource) insertDeleteOperator.getDataSource();
requiredRecordType = (ARecordType) dataSource.getItemType();
// Derive the Variable which we will potentially wrap with cast/null functions
ILogicalExpression expr = insertDeleteOperator.getPayloadExpression().getValue();
List<LogicalVariable> payloadVars = new ArrayList<>();
expr.getUsedVariables(payloadVars);
recordVar = payloadVars.get(0);
} else {
return false;
}
break;
}
case DISTRIBUTE_RESULT:
{
// First, see if there was an output-record-type specified
requiredRecordType = (ARecordType) op1.getAnnotations().get("output-record-type");
if (requiredRecordType == null) {
return false;
}
// Remember this is the operator we need to modify
op = op1;
recordVar = ((VariableReferenceExpression) ((DistributeResultOperator) op).getExpressions().get(0).getValue()).getVariableReference();
break;
}
default:
{
return false;
}
}
// Derive the statically-computed type of the record
IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
IAType inputRecordType = (IAType) env.getVarType(recordVar);
/** the input record type can be an union type -- for the case when it comes from a subplan or left-outer join */
boolean checkUnknown = false;
while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
/** while-loop for the case there is a nested multi-level union */
inputRecordType = ((AUnionType) inputRecordType).getActualType();
checkUnknown = true;
}
/** see whether the input record type needs to be casted */
boolean cast = !compatible(requiredRecordType, inputRecordType);
if (checkUnknown) {
recordVar = addWrapperFunction(requiredRecordType, recordVar, op, context, BuiltinFunctions.CHECK_UNKNOWN);
}
if (cast) {
addWrapperFunction(requiredRecordType, recordVar, op, context, BuiltinFunctions.CAST_TYPE);
}
return cast || checkUnknown;
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class ConnectorApiServletTest method testGet.
@Test
public void testGet() throws Exception {
// Starts test asterixdb cluster.
SqlppExecutionTest.setUp();
// Configures a test connector api servlet.
ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" }, (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
PrintWriter outputWriter = new PrintWriter(outputStream);
// Creates mocks.
IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
IServletRequest mockRequest = mock(IServletRequest.class);
IServletResponse mockResponse = mock(IServletResponse.class);
FullHttpRequest mockHttpRequest = mock(FullHttpRequest.class);
// Put stuff in let map
let.ctx().put(ServletConstants.HYRACKS_CONNECTION_ATTR, mockHcc);
// Sets up mock returns.
when(mockRequest.getHttpRequest()).thenReturn(mockHttpRequest);
when(mockHttpRequest.method()).thenReturn(HttpMethod.GET);
when(mockRequest.getParameter("dataverseName")).thenReturn("Metadata");
when(mockRequest.getParameter("datasetName")).thenReturn("Dataset");
when(mockResponse.writer()).thenReturn(outputWriter);
when(mockHcc.getNodeControllerInfos()).thenReturn(nodeMap);
when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
// Calls ConnectorAPIServlet.formResponseObject.
nodeMap.put("asterix_nc1", mockInfo1);
nodeMap.put("asterix_nc2", mockInfo2);
let.handle(mockRequest, mockResponse);
// Constructs the actual response.
ObjectMapper om = new ObjectMapper();
ObjectNode actualResponse = (ObjectNode) om.readTree(outputStream.toString());
// Checks the temp-or-not, primary key, data type of the dataset.
boolean temp = actualResponse.get("temp").asBoolean();
Assert.assertFalse(temp);
String primaryKey = actualResponse.get("keys").asText();
Assert.assertEquals("DataverseName,DatasetName", primaryKey);
ARecordType recordType = (ARecordType) JSONDeserializerForTypes.convertFromJSON(actualResponse.get("type"));
Assert.assertEquals(getMetadataRecordType("Metadata", "Dataset"), recordType);
// Checks the correctness of results.
ArrayNode splits = (ArrayNode) actualResponse.get("splits");
String path = (splits.get(0)).get("path").asText();
Assert.assertTrue(path.endsWith("Metadata/Dataset_idx_Dataset"));
// Tears down the asterixdb cluster.
SqlppExecutionTest.tearDown();
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class ConnectorApiServletTest method getMetadataRecordType.
private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
// Retrieves file splits of the dataset.
MetadataProvider metadataProvider = new MetadataProvider((ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null, new StorageComponentProvider());
try {
metadataProvider.setMetadataTxnContext(mdTxnCtx);
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
// Metadata transaction commits.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return recordType;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class IndexTupleTranslator method getMetadataEntityFromTuple.
@Override
public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord rec = recordSerde.deserialize(in);
String dvName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
IndexType indexStructure = IndexType.valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX)).getStringValue());
IACursor fieldNameCursor = ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
List<List<String>> searchKey = new ArrayList<>();
AOrderedList fieldNameList;
while (fieldNameCursor.next()) {
fieldNameList = (AOrderedList) fieldNameCursor.get();
IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
List<String> nestedFieldName = new ArrayList<>();
while (nestedFieldNameCursor.next()) {
nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
}
searchKey.add(nestedFieldName);
}
int indexKeyTypeFieldPos = rec.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
IACursor fieldTypeCursor = new ACollectionCursor();
if (indexKeyTypeFieldPos > 0) {
fieldTypeCursor = ((AOrderedList) rec.getValueByPos(indexKeyTypeFieldPos)).getCursor();
}
List<IAType> searchKeyType = new ArrayList<>(searchKey.size());
while (fieldTypeCursor.next()) {
String typeName = ((AString) fieldTypeCursor.get()).getStringValue();
IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dvName, typeName, false);
searchKeyType.add(fieldType);
}
int isEnforcedFieldPos = rec.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
Boolean isEnforcingKeys = false;
if (isEnforcedFieldPos > 0) {
isEnforcingKeys = ((ABoolean) rec.getValueByPos(isEnforcedFieldPos)).getBoolean();
}
Boolean isPrimaryIndex = ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
// Check if there is a gram length as well.
int gramLength = -1;
int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
if (gramLenPos >= 0) {
gramLength = ((AInt32) rec.getValueByPos(gramLenPos)).getIntegerValue();
}
// Read a field-source-indicator field.
List<Integer> keyFieldSourceIndicator = new ArrayList<>();
int keyFieldSourceIndicatorIndex = rec.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
if (keyFieldSourceIndicatorIndex >= 0) {
IACursor cursor = ((AOrderedList) rec.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
while (cursor.next()) {
keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
}
} else {
for (int index = 0; index < searchKey.size(); ++index) {
keyFieldSourceIndicator.add(0);
}
}
// index key type information is not persisted, thus we extract type information from the record metadata
if (searchKeyType.isEmpty()) {
try {
Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
String datatypeName = dSet.getItemTypeName();
String datatypeDataverseName = dSet.getItemTypeDataverseName();
ARecordType recordDt = (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
String metatypeName = dSet.getMetaItemTypeName();
String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
ARecordType metaDt = null;
if (metatypeName != null && metatypeDataverseName != null) {
metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName).getDatatype();
}
try {
searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
} catch (AlgebricksException e) {
throw new MetadataException(e);
}
} catch (RemoteException re) {
throw HyracksDataException.create(re);
}
}
return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType, gramLength, isEnforcingKeys, isPrimaryIndex, pendingOp);
}
Aggregations