use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class ClusterLifecycleListener method executeWorkSet.
private void executeWorkSet(Set<IClusterManagementWork> workSet) {
int nodesToAdd = 0;
Set<String> nodesToRemove = new HashSet<>();
Set<AddNodeWork> nodeAdditionRequests = new HashSet<>();
Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<>();
for (IClusterManagementWork w : workSet) {
switch(w.getClusterManagementWorkType()) {
case ADD_NODE:
if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodesRequested()) {
nodesToAdd = ((AddNodeWork) w).getNumberOfNodesRequested();
}
nodeAdditionRequests.add((AddNodeWork) w);
break;
case REMOVE_NODE:
nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
nodeRemovalRequests.add(w);
RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
pendingWorkResponses.add(response);
break;
}
}
List<String> addedNodes = new ArrayList<>();
String asterixInstanceName = ClusterProperties.INSTANCE.getCluster().getInstanceName();
for (int i = 0; i < nodesToAdd; i++) {
Node node = ClusterStateManager.INSTANCE.getAvailableSubstitutionNode();
if (node != null) {
try {
ClusterManagerProvider.getClusterManager().addNode(appCtx, node);
addedNodes.add(asterixInstanceName + "_" + node.getId());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Added NC at:" + node.getId());
}
} catch (AsterixException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Unable to add NC at:" + node.getId());
}
e.printStackTrace();
}
} else {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Unable to add NC: no more available nodes");
}
}
}
for (AddNodeWork w : nodeAdditionRequests) {
int n = w.getNumberOfNodesRequested();
List<String> nodesToBeAddedForWork = new ArrayList<>();
for (int i = 0; i < n && i < addedNodes.size(); i++) {
nodesToBeAddedForWork.add(addedNodes.get(i));
}
if (nodesToBeAddedForWork.isEmpty()) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Unable to satisfy request by " + w);
}
AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
response.setStatus(Status.FAILURE);
w.getSourceSubscriber().notifyRequestCompletion(response);
} else {
AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
pendingWorkResponses.add(response);
}
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class AQLTestCase method testAQL.
@Test
public void testAQL() throws UnsupportedEncodingException, FileNotFoundException, ParseException, AsterixException, AlgebricksException {
Reader reader = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
IParser parser = aqlParserFactory.createParser(reader);
GlobalConfig.ASTERIX_LOGGER.info(queryFile.toString());
try {
parser.parse();
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + reader);
StringWriter sw = new StringWriter();
PrintWriter writer = new PrintWriter(sw);
e.printStackTrace(writer);
GlobalConfig.ASTERIX_LOGGER.warning(sw.toString());
throw new ParseException("Parsing " + queryFile.toString());
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class TwitterRecordReaderFactory method createRecordReader.
@Override
public IRecordReader<? extends String> createRecordReader(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
IRecordReader<? extends String> recordReader;
switch(configuration.get(ExternalDataConstants.KEY_READER)) {
case ExternalDataConstants.READER_PULL_TWITTER:
recordReader = new TwitterPullRecordReader(TwitterUtil.getTwitterService(configuration), configuration.get(SearchAPIConstants.QUERY), Integer.parseInt(configuration.get(SearchAPIConstants.INTERVAL)));
break;
case ExternalDataConstants.READER_PUSH_TWITTER:
FilterQuery query;
try {
query = TwitterUtil.getFilterQuery(configuration);
recordReader = (query == null) ? new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), TwitterUtil.getTweetListener()) : new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), TwitterUtil.getTweetListener(), query);
} catch (AsterixException e) {
throw new HyracksDataException(e);
}
break;
case ExternalDataConstants.READER_USER_STREAM_TWITTER:
recordReader = new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), TwitterUtil.getUserTweetsListener());
break;
default:
throw new HyracksDataException("No Record reader found!");
}
return recordReader;
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class SecondaryRTreeOperationsHelper method setSecondaryRecDescAndComparators.
@Override
protected void setSecondaryRecDescAndComparators() throws AlgebricksException {
List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
int numSecondaryKeys = secondaryKeyFields.size();
boolean isEnforcingKeyTypes = index.isEnforcingKeyFileds();
if (numSecondaryKeys != 1) {
throw new AsterixException("Cannot use " + numSecondaryKeys + " fields as a key for the R-tree index. " + "There can be only one field as a key for the R-tree index.");
}
Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), itemType);
IAType spatialType = spatialTypePair.first;
anySecondaryKeyIsNullable = spatialTypePair.second;
if (spatialType == null) {
throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
}
isPointMBR = spatialType.getTypeTag() == ATypeTag.POINT || spatialType.getTypeTag() == ATypeTag.POINT3D;
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
numNestedSecondaryKeyFields = numDimensions * 2;
int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
secondaryFieldAccessEvalFactories = metadataProvider.getFormat().createMBRFactory(isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(0), recordColumn, numDimensions, filterFieldName, isPointMBR);
secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numNestedSecondaryKeyFields + numFilterFields];
ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
keyType = nestedKeyType.getTypeTag();
for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
ISerializerDeserializer keySerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(nestedKeyType);
secondaryRecFields[i] = keySerde;
secondaryComparatorFactories[i] = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(nestedKeyType, true);
secondaryTypeTraits[i] = TypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
valueProviderFactories[i] = metadataProvider.getStorageComponentProvider().getPrimitiveValueProviderFactory();
}
// Add serializers and comparators for primary index fields.
if (dataset.getDatasetType() == DatasetType.INTERNAL) {
for (int i = 0; i < numPrimaryKeys; i++) {
secondaryRecFields[numNestedSecondaryKeyFields + i] = primaryRecDesc.getFields()[i];
secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryRecDesc.getTypeTraits()[i];
enforcedRecFields[i] = primaryRecDesc.getFields()[i];
enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i];
}
} else {
for (int i = 0; i < numPrimaryKeys; i++) {
secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
enforcedRecFields[i] = IndexingConstants.getSerializerDeserializer(i);
enforcedTypeTraits[i] = IndexingConstants.getTypeTraits(i);
}
}
enforcedRecFields[numPrimaryKeys] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(itemType);
enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
if (numFilterFields > 0) {
rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
for (int i = 0; i < rtreeFields.length; i++) {
rtreeFields[i] = i;
}
Pair<IAType, Boolean> typePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
IAType type = typePair.first;
ISerializerDeserializer serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(type);
secondaryRecFields[numPrimaryKeys + numNestedSecondaryKeyFields] = serde;
}
secondaryRecDesc = new RecordDescriptor(secondaryRecFields);
primaryKeyFields = new int[numPrimaryKeys];
for (int i = 0; i < primaryKeyFields.length; i++) {
primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
}
if (isPointMBR) {
int numNestedSecondaryKeyFieldForPointMBR = numNestedSecondaryKeyFields / 2;
ISerializerDeserializer[] recFieldsForPointMBR = new ISerializerDeserializer[numPrimaryKeys + numNestedSecondaryKeyFieldForPointMBR + numFilterFields];
int idx = 0;
for (int i = 0; i < numNestedSecondaryKeyFieldForPointMBR; i++) {
recFieldsForPointMBR[idx++] = secondaryRecFields[i];
}
for (int i = 0; i < numPrimaryKeys + numFilterFields; i++) {
recFieldsForPointMBR[idx++] = secondaryRecFields[numNestedSecondaryKeyFields + i];
}
secondaryRecDescForPointMBR = new RecordDescriptor(recFieldsForPointMBR);
}
}
use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.
the class AOrderedListBinaryTokenizer method next.
@Override
public void next() {
int itemOffset = -1;
int length = -1;
try {
itemOffset = getItemOffset(data, start, itemIndex);
// Assuming homogeneous list.
ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(data[start + 1]);
// ? Can we handle the non-string type ?
length = NonTaggedFormatUtil.getFieldValueLength(data, itemOffset, typeTag, false);
// Last param is a hack to pass the type tag.
token.reset(data, itemOffset, itemOffset + length, length, data[start + 1]);
} catch (AsterixException e) {
throw new IllegalStateException(e);
}
itemIndex++;
}
Aggregations