Search in sources :

Example 41 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class ClusterLifecycleListener method executeWorkSet.

private void executeWorkSet(Set<IClusterManagementWork> workSet) {
    int nodesToAdd = 0;
    Set<String> nodesToRemove = new HashSet<>();
    Set<AddNodeWork> nodeAdditionRequests = new HashSet<>();
    Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<>();
    for (IClusterManagementWork w : workSet) {
        switch(w.getClusterManagementWorkType()) {
            case ADD_NODE:
                if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodesRequested()) {
                    nodesToAdd = ((AddNodeWork) w).getNumberOfNodesRequested();
                }
                nodeAdditionRequests.add((AddNodeWork) w);
                break;
            case REMOVE_NODE:
                nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
                nodeRemovalRequests.add(w);
                RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
                pendingWorkResponses.add(response);
                break;
        }
    }
    List<String> addedNodes = new ArrayList<>();
    String asterixInstanceName = ClusterProperties.INSTANCE.getCluster().getInstanceName();
    for (int i = 0; i < nodesToAdd; i++) {
        Node node = ClusterStateManager.INSTANCE.getAvailableSubstitutionNode();
        if (node != null) {
            try {
                ClusterManagerProvider.getClusterManager().addNode(appCtx, node);
                addedNodes.add(asterixInstanceName + "_" + node.getId());
                if (LOGGER.isLoggable(Level.INFO)) {
                    LOGGER.info("Added NC at:" + node.getId());
                }
            } catch (AsterixException e) {
                if (LOGGER.isLoggable(Level.WARNING)) {
                    LOGGER.warning("Unable to add NC at:" + node.getId());
                }
                e.printStackTrace();
            }
        } else {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Unable to add NC: no more available nodes");
            }
        }
    }
    for (AddNodeWork w : nodeAdditionRequests) {
        int n = w.getNumberOfNodesRequested();
        List<String> nodesToBeAddedForWork = new ArrayList<>();
        for (int i = 0; i < n && i < addedNodes.size(); i++) {
            nodesToBeAddedForWork.add(addedNodes.get(i));
        }
        if (nodesToBeAddedForWork.isEmpty()) {
            if (LOGGER.isLoggable(Level.INFO)) {
                LOGGER.info("Unable to satisfy request by " + w);
            }
            AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
            response.setStatus(Status.FAILURE);
            w.getSourceSubscriber().notifyRequestCompletion(response);
        } else {
            AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
            pendingWorkResponses.add(response);
        }
    }
}
Also used : Node(org.apache.asterix.event.schema.cluster.Node) IClusterManagementWork(org.apache.asterix.common.api.IClusterManagementWork) ArrayList(java.util.ArrayList) AsterixException(org.apache.asterix.common.exceptions.AsterixException) RemoveNodeWorkResponse(org.apache.asterix.metadata.cluster.RemoveNodeWorkResponse) AddNodeWork(org.apache.asterix.metadata.cluster.AddNodeWork) AddNodeWorkResponse(org.apache.asterix.metadata.cluster.AddNodeWorkResponse) HashSet(java.util.HashSet)

Example 42 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class AQLTestCase method testAQL.

@Test
public void testAQL() throws UnsupportedEncodingException, FileNotFoundException, ParseException, AsterixException, AlgebricksException {
    Reader reader = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
    IParser parser = aqlParserFactory.createParser(reader);
    GlobalConfig.ASTERIX_LOGGER.info(queryFile.toString());
    try {
        parser.parse();
    } catch (Exception e) {
        GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + reader);
        StringWriter sw = new StringWriter();
        PrintWriter writer = new PrintWriter(sw);
        e.printStackTrace(writer);
        GlobalConfig.ASTERIX_LOGGER.warning(sw.toString());
        throw new ParseException("Parsing " + queryFile.toString());
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) StringWriter(java.io.StringWriter) BufferedReader(java.io.BufferedReader) Reader(java.io.Reader) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) ParseException(org.apache.asterix.lang.aql.parser.ParseException) FileInputStream(java.io.FileInputStream) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) FileNotFoundException(java.io.FileNotFoundException) ParseException(org.apache.asterix.lang.aql.parser.ParseException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) IParser(org.apache.asterix.lang.common.base.IParser) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 43 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class TwitterRecordReaderFactory method createRecordReader.

@Override
public IRecordReader<? extends String> createRecordReader(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
    IRecordReader<? extends String> recordReader;
    switch(configuration.get(ExternalDataConstants.KEY_READER)) {
        case ExternalDataConstants.READER_PULL_TWITTER:
            recordReader = new TwitterPullRecordReader(TwitterUtil.getTwitterService(configuration), configuration.get(SearchAPIConstants.QUERY), Integer.parseInt(configuration.get(SearchAPIConstants.INTERVAL)));
            break;
        case ExternalDataConstants.READER_PUSH_TWITTER:
            FilterQuery query;
            try {
                query = TwitterUtil.getFilterQuery(configuration);
                recordReader = (query == null) ? new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), TwitterUtil.getTweetListener()) : new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), TwitterUtil.getTweetListener(), query);
            } catch (AsterixException e) {
                throw new HyracksDataException(e);
            }
            break;
        case ExternalDataConstants.READER_USER_STREAM_TWITTER:
            recordReader = new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), TwitterUtil.getUserTweetsListener());
            break;
        default:
            throw new HyracksDataException("No Record reader found!");
    }
    return recordReader;
}
Also used : AsterixException(org.apache.asterix.common.exceptions.AsterixException) FilterQuery(twitter4j.FilterQuery) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 44 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class SecondaryRTreeOperationsHelper method setSecondaryRecDescAndComparators.

@Override
protected void setSecondaryRecDescAndComparators() throws AlgebricksException {
    List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
    int numSecondaryKeys = secondaryKeyFields.size();
    boolean isEnforcingKeyTypes = index.isEnforcingKeyFileds();
    if (numSecondaryKeys != 1) {
        throw new AsterixException("Cannot use " + numSecondaryKeys + " fields as a key for the R-tree index. " + "There can be only one field as a key for the R-tree index.");
    }
    Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), secondaryKeyFields.get(0), itemType);
    IAType spatialType = spatialTypePair.first;
    anySecondaryKeyIsNullable = spatialTypePair.second;
    if (spatialType == null) {
        throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
    }
    isPointMBR = spatialType.getTypeTag() == ATypeTag.POINT || spatialType.getTypeTag() == ATypeTag.POINT3D;
    int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
    numNestedSecondaryKeyFields = numDimensions * 2;
    int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
    secondaryFieldAccessEvalFactories = metadataProvider.getFormat().createMBRFactory(isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(0), recordColumn, numDimensions, filterFieldName, isPointMBR);
    secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
    valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
    ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numNestedSecondaryKeyFields + numFilterFields];
    ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
    secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
    ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
    IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
    keyType = nestedKeyType.getTypeTag();
    for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
        ISerializerDeserializer keySerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(nestedKeyType);
        secondaryRecFields[i] = keySerde;
        secondaryComparatorFactories[i] = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(nestedKeyType, true);
        secondaryTypeTraits[i] = TypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
        valueProviderFactories[i] = metadataProvider.getStorageComponentProvider().getPrimitiveValueProviderFactory();
    }
    // Add serializers and comparators for primary index fields.
    if (dataset.getDatasetType() == DatasetType.INTERNAL) {
        for (int i = 0; i < numPrimaryKeys; i++) {
            secondaryRecFields[numNestedSecondaryKeyFields + i] = primaryRecDesc.getFields()[i];
            secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryRecDesc.getTypeTraits()[i];
            enforcedRecFields[i] = primaryRecDesc.getFields()[i];
            enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i];
        }
    } else {
        for (int i = 0; i < numPrimaryKeys; i++) {
            secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
            secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
            enforcedRecFields[i] = IndexingConstants.getSerializerDeserializer(i);
            enforcedTypeTraits[i] = IndexingConstants.getTypeTraits(i);
        }
    }
    enforcedRecFields[numPrimaryKeys] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(itemType);
    enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
    if (numFilterFields > 0) {
        rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
        for (int i = 0; i < rtreeFields.length; i++) {
            rtreeFields[i] = i;
        }
        Pair<IAType, Boolean> typePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
        IAType type = typePair.first;
        ISerializerDeserializer serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(type);
        secondaryRecFields[numPrimaryKeys + numNestedSecondaryKeyFields] = serde;
    }
    secondaryRecDesc = new RecordDescriptor(secondaryRecFields);
    primaryKeyFields = new int[numPrimaryKeys];
    for (int i = 0; i < primaryKeyFields.length; i++) {
        primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
    }
    if (isPointMBR) {
        int numNestedSecondaryKeyFieldForPointMBR = numNestedSecondaryKeyFields / 2;
        ISerializerDeserializer[] recFieldsForPointMBR = new ISerializerDeserializer[numPrimaryKeys + numNestedSecondaryKeyFieldForPointMBR + numFilterFields];
        int idx = 0;
        for (int i = 0; i < numNestedSecondaryKeyFieldForPointMBR; i++) {
            recFieldsForPointMBR[idx++] = secondaryRecFields[i];
        }
        for (int i = 0; i < numPrimaryKeys + numFilterFields; i++) {
            recFieldsForPointMBR[idx++] = secondaryRecFields[numNestedSecondaryKeyFields + i];
        }
        secondaryRecDescForPointMBR = new RecordDescriptor(recFieldsForPointMBR);
    }
}
Also used : ITypeTraits(org.apache.hyracks.api.dataflow.value.ITypeTraits) RecordDescriptor(org.apache.hyracks.api.dataflow.value.RecordDescriptor) ISerializerDeserializer(org.apache.hyracks.api.dataflow.value.ISerializerDeserializer) AsterixException(org.apache.asterix.common.exceptions.AsterixException) List(java.util.List) IAType(org.apache.asterix.om.types.IAType)

Example 45 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class AOrderedListBinaryTokenizer method next.

@Override
public void next() {
    int itemOffset = -1;
    int length = -1;
    try {
        itemOffset = getItemOffset(data, start, itemIndex);
        // Assuming homogeneous list.
        ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(data[start + 1]);
        // ? Can we handle the non-string type ?
        length = NonTaggedFormatUtil.getFieldValueLength(data, itemOffset, typeTag, false);
        // Last param is a hack to pass the type tag.
        token.reset(data, itemOffset, itemOffset + length, length, data[start + 1]);
    } catch (AsterixException e) {
        throw new IllegalStateException(e);
    }
    itemIndex++;
}
Also used : AsterixException(org.apache.asterix.common.exceptions.AsterixException) ATypeTag(org.apache.asterix.om.types.ATypeTag)

Aggregations

AsterixException (org.apache.asterix.common.exceptions.AsterixException)67 IOException (java.io.IOException)27 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)26 DataOutput (java.io.DataOutput)15 IPointable (org.apache.hyracks.data.std.api.IPointable)15 IFrameTupleReference (org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference)15 TypeMismatchException (org.apache.asterix.runtime.exceptions.TypeMismatchException)14 IScalarEvaluator (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator)14 VoidPointable (org.apache.hyracks.data.std.primitive.VoidPointable)14 ArrayBackedValueStorage (org.apache.hyracks.data.std.util.ArrayBackedValueStorage)14 ATypeTag (org.apache.asterix.om.types.ATypeTag)10 IAType (org.apache.asterix.om.types.IAType)10 ARecordType (org.apache.asterix.om.types.ARecordType)9 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)9 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)9 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)8 IScalarEvaluatorFactory (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)8 List (java.util.List)7 InputStream (java.io.InputStream)5 CompilationException (org.apache.asterix.common.exceptions.CompilationException)5