use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class QueryTranslator method handleCreateDatasetStatement.
public void handleCreateDatasetStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc) throws CompilationException, Exception {
MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
DatasetDecl dd = (DatasetDecl) stmt;
String dataverseName = getActiveDataverse(dd.getDataverse());
String datasetName = dd.getName().getValue();
DatasetType dsType = dd.getDatasetType();
String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
String itemTypeName = dd.getItemTypeName().getValue();
String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
String metaItemTypeName = dd.getMetaItemTypeName().getValue();
Identifier ngNameId = dd.getNodegroupName();
String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
String compactionPolicy = dd.getCompactionPolicy();
Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
boolean defaultCompactionPolicy = compactionPolicy == null;
boolean temp = dd.getDatasetDetailsDecl().isTemp();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
MetadataLockManager.INSTANCE.createDatasetBegin(metadataProvider.getLocks(), dataverseName, itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName, metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
Dataset dataset = null;
try {
IDatasetDetails datasetDetails = null;
Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds != null) {
if (dd.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
}
}
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), itemTypeDataverseName, itemTypeName);
if (dt == null) {
throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
}
String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, metadataProvider);
if (compactionPolicy == null) {
compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
} else {
validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
}
switch(dd.getDatasetType()) {
case INTERNAL:
IAType itemType = dt.getDatatype();
if (itemType.getTypeTag() != ATypeTag.OBJECT) {
throw new AlgebricksException("Dataset type has to be a record type.");
}
IAType metaItemType = null;
if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
}
if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.OBJECT) {
throw new AlgebricksException("Dataset meta type has to be a record type.");
}
ARecordType metaRecType = (ARecordType) metaItemType;
List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
List<Integer> keySourceIndicators = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getKeySourceIndicators();
boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
ARecordType aRecordType = (ARecordType) itemType;
List<IAType> partitioningTypes = ValidateUtil.validatePartitioningExpressions(aRecordType, metaRecType, partitioningExprs, keySourceIndicators, autogenerated);
List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
if (filterField != null) {
ValidateUtil.validateFilterField(aRecordType, filterField);
}
if (compactionPolicy == null && filterField != null) {
// If the dataset has a filter and the user didn't specify a merge
// policy, then we will pick the
// correlated-prefix as the default merge policy.
compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
}
datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs, keySourceIndicators, partitioningTypes, autogenerated, filterField, temp);
break;
case EXTERNAL:
String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(), TransactionState.COMMIT);
break;
default:
throw new CompilationException("Unknown datatype " + dd.getDatasetType());
}
// #. initialize DatasetIdFactory if it is not initialized.
if (!DatasetIdFactory.isInitialized()) {
DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
}
// #. add a new dataset with PendingAddOp
dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(), MetadataUtil.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
if (dd.getDatasetType() == DatasetType.INTERNAL) {
JobSpecification jobSpec = DatasetUtil.createDatasetJobSpec(dataset, metadataProvider);
// #. make metadataTxn commit before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress.setValue(ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA);
// #. runJob
JobUtils.runJob(hcc, jobSpec, true);
// #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
// #. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
dataset.setPendingOp(MetadataUtil.PENDING_NO_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress.getValue() == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the index in NC
// [Notice]
// As long as we updated(and committed) metadata, we should remove any effect of the job
// because an exception occurs during runJob.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
JobSpecification jobSpec = DatasetUtil.dropDatasetJobSpec(dataset, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
JobUtils.runJob(hcc, jobSpec, true);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
}
}
throw e;
} finally {
metadataProvider.getLocks().unlock();
}
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class ConnectorApiServletTest method testFormResponseObject.
@Test
public void testFormResponseObject() throws Exception {
ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" }, (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
ObjectMapper om = new ObjectMapper();
ObjectNode actualResponse = om.createObjectNode();
FileSplit[] splits = new FileSplit[2];
splits[0] = new ManagedFileSplit("asterix_nc1", "foo1");
splits[1] = new ManagedFileSplit("asterix_nc2", "foo2");
Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
// Sets up mock returns.
when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
String[] fieldNames = new String[] { "a1", "a2" };
IAType[] fieldTypes = new IAType[] { BuiltinType.ABOOLEAN, BuiltinType.ADAYTIMEDURATION };
ARecordType recordType = new ARecordType("record", fieldNames, fieldTypes, true);
String primaryKey = "a1";
// Calls ConnectorAPIServlet.formResponseObject.
nodeMap.put("asterix_nc1", mockInfo1);
nodeMap.put("asterix_nc2", mockInfo2);
PA.invokeMethod(let, "formResponseObject(" + ObjectNode.class.getName() + ", " + FileSplit.class.getName() + "[], " + ARecordType.class.getName() + ", " + String.class.getName() + ", boolean, " + Map.class.getName() + ")", actualResponse, splits, recordType, primaryKey, true, nodeMap);
// Constructs expected response.
ObjectNode expectedResponse = om.createObjectNode();
expectedResponse.put("temp", true);
expectedResponse.put("keys", primaryKey);
expectedResponse.set("type", recordType.toJSON());
ArrayNode splitsArray = om.createArrayNode();
ObjectNode element1 = om.createObjectNode();
element1.put("ip", "127.0.0.1");
element1.put("path", splits[0].getPath());
ObjectNode element2 = om.createObjectNode();
element2.put("ip", "127.0.0.2");
element2.put("path", splits[1].getPath());
splitsArray.add(element1);
splitsArray.add(element2);
expectedResponse.set("splits", splitsArray);
// Checks results.
Assert.assertEquals(actualResponse.toString(), expectedResponse.toString());
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class JObjectPointableVisitor method visit.
@Override
public IJObject visit(ARecordVisitablePointable accessor, TypeInfo arg) throws HyracksDataException {
IJObject result = null;
IJRecordAccessor jRecordAccessor = raccessorToJObject.get(accessor);
if (jRecordAccessor == null) {
jRecordAccessor = new JRecordAccessor(accessor.getInputRecordType(), arg.getObjectPool());
raccessorToJObject.put(accessor, jRecordAccessor);
}
result = jRecordAccessor.access(accessor, arg.getObjectPool(), (ARecordType) arg.getAtype(), this);
return result;
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class JTypeObjectFactory method create.
@Override
public IJObject create(IAType type) {
IJObject retValue = null;
switch(type.getTypeTag()) {
case INTEGER:
retValue = new JInt(0);
break;
case STRING:
retValue = new JString("");
break;
case FLOAT:
retValue = new JFloat(0);
break;
case DOUBLE:
retValue = new JDouble(0);
break;
case BOOLEAN:
retValue = new JBoolean(false);
break;
case CIRCLE:
retValue = new JCircle(new JPoint(0, 0), 0);
break;
case POINT:
retValue = new JPoint(0, 0);
break;
case POINT3D:
retValue = new JPoint3D(0, 0, 0);
break;
case POLYGON:
retValue = new JPolygon(new JPoint[] {});
break;
case LINE:
retValue = new JLine(new JPoint(0, 0), new JPoint(0, 0));
break;
case RECTANGLE:
retValue = new JRectangle(new JPoint(0, 0), new JPoint(1, 1));
break;
case DATE:
retValue = new JDate(0);
break;
case DATETIME:
retValue = new JDateTime(0);
break;
case DURATION:
retValue = new JDuration(0, 0);
break;
case INTERVAL:
retValue = new JInterval(0, 0);
break;
case TIME:
retValue = new JTime(0);
break;
case BIGINT:
retValue = new JLong(0);
break;
case NULL:
retValue = JObjects.JNull.INSTANCE;
break;
case MISSING:
retValue = JObjects.JMissing.INSTANCE;
break;
case ARRAY:
AOrderedListType ot = (AOrderedListType) type;
IAType orderedItemType = ot.getItemType();
IJObject orderedItemObject = create(orderedItemType);
retValue = new JOrderedList(orderedItemObject);
break;
case MULTISET:
AUnorderedListType ut = (AUnorderedListType) type;
IAType unorderedItemType = ut.getItemType();
IJObject unorderedItemObject = create(unorderedItemType);
retValue = new JUnorderedList(unorderedItemObject);
break;
case OBJECT:
IAType[] fieldTypes = ((ARecordType) type).getFieldTypes();
IJObject[] fieldObjects = new IJObject[fieldTypes.length];
int index = 0;
for (IAType fieldType : fieldTypes) {
fieldObjects[index] = create(fieldType);
index++;
}
retValue = new JRecord((ARecordType) type, fieldObjects);
break;
case UNION:
AUnionType unionType = (AUnionType) type;
IJObject itemObject = null;
if (unionType.isMissableType()) {
itemObject = create(unionType);
}
retValue = itemObject;
break;
default:
break;
}
return retValue;
}
use of org.apache.asterix.om.types.ARecordType in project asterixdb by apache.
the class ClosedRecordConstructorResultType method computeType.
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env, IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
String funcName = f.getFunctionIdentifier().getName();
/**
* if type has been top-down propagated, use the enforced type
*/
ARecordType type = (ARecordType) TypeCastUtils.getRequiredType(f);
if (type != null) {
return type;
}
int n = f.getArguments().size() / 2;
String[] fieldNames = new String[n];
IAType[] fieldTypes = new IAType[n];
int i = 0;
Iterator<Mutable<ILogicalExpression>> argIter = f.getArguments().iterator();
while (argIter.hasNext()) {
ILogicalExpression e1 = argIter.next().getValue();
ILogicalExpression e2 = argIter.next().getValue();
IAType e2Type = (IAType) env.getType(e2);
if (e2Type.getTypeTag() == ATypeTag.UNION) {
AUnionType unionType = (AUnionType) e2Type;
e2Type = AUnionType.createUnknownableType(unionType.getActualType());
}
fieldTypes[i] = e2Type;
fieldNames[i] = ConstantExpressionUtil.getStringConstant(e1);
if (fieldNames[i] == null) {
throw new InvalidExpressionException(funcName, 2 * i, e1, LogicalExpressionTag.CONSTANT);
}
i++;
}
return new ARecordType(null, fieldNames, fieldTypes, false);
}
Aggregations