Search in sources :

Example 11 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class RecordAddFieldsDescriptor method createEvaluatorFactory.

@Override
public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args) {
    return new IScalarEvaluatorFactory() {

        private static final long serialVersionUID = 1L;

        @Override
        public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
            final PointableAllocator allocator = new PointableAllocator();
            final IVisitablePointable vp0 = allocator.allocateRecordValue(inRecType);
            final IVisitablePointable vp1 = allocator.allocateListValue(inListType);
            final IPointable argPtr0 = new VoidPointable();
            final IPointable argPtr1 = new VoidPointable();
            final IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
            final IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
            final ArrayBackedValueStorage fieldNamePointable = new ArrayBackedValueStorage();
            final ArrayBackedValueStorage fieldValuePointer = new ArrayBackedValueStorage();
            final PointableHelper pointableHelper = new PointableHelper();
            try {
                pointableHelper.serializeString("field-name", fieldNamePointable, true);
                pointableHelper.serializeString("field-value", fieldValuePointer, true);
            } catch (AsterixException e) {
                throw new HyracksDataException(e);
            }
            return new IScalarEvaluator() {

                // the default 32k frame size
                public static final int TABLE_FRAME_SIZE = 32768;

                // the default 32k frame size
                public static final int TABLE_SIZE = 100;

                private final RecordBuilder recordBuilder = new RecordBuilder();

                private final RuntimeRecordTypeInfo requiredRecordTypeInfo = new RuntimeRecordTypeInfo();

                private final IBinaryHashFunction putHashFunc = ListItemBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();

                private final IBinaryHashFunction getHashFunc = ListItemBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();

                private final BinaryEntry keyEntry = new BinaryEntry();

                private final BinaryEntry valEntry = new BinaryEntry();

                private final IVisitablePointable tempValReference = allocator.allocateEmpty();

                private final IBinaryComparator cmp = ListItemBinaryComparatorFactory.INSTANCE.createBinaryComparator();

                private BinaryHashMap hashMap = new BinaryHashMap(TABLE_SIZE, TABLE_FRAME_SIZE, putHashFunc, getHashFunc, cmp);

                private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();

                private DataOutput out = resultStorage.getDataOutput();

                @Override
                public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
                    resultStorage.reset();
                    recordBuilder.reset(outRecType);
                    requiredRecordTypeInfo.reset(outRecType);
                    eval0.evaluate(tuple, argPtr0);
                    eval1.evaluate(tuple, argPtr1);
                    // Make sure we get a valid record
                    byte typeTag0 = argPtr0.getByteArray()[argPtr0.getStartOffset()];
                    if (typeTag0 != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
                        throw new TypeMismatchException(getIdentifier(), 0, typeTag0, ATypeTag.SERIALIZED_RECORD_TYPE_TAG);
                    }
                    // Make sure we get a valid list
                    byte typeTag1 = argPtr1.getByteArray()[argPtr1.getStartOffset()];
                    if (typeTag1 != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
                        throw new TypeMismatchException(getIdentifier(), 1, typeTag1, ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG);
                    }
                    vp0.set(argPtr0);
                    vp1.set(argPtr1);
                    ARecordVisitablePointable recordPointable = (ARecordVisitablePointable) vp0;
                    AListVisitablePointable listPointable = (AListVisitablePointable) vp1;
                    // Initialize our hashmap
                    int tableSize = recordPointable.getFieldNames().size() + listPointable.getItems().size();
                    // Thus avoiding unnecessary object construction
                    if (hashMap == null || tableSize > TABLE_SIZE) {
                        hashMap = new BinaryHashMap(tableSize, TABLE_FRAME_SIZE, putHashFunc, getHashFunc, cmp);
                    } else {
                        hashMap.clear();
                    }
                    addFields(recordPointable, listPointable);
                    recordBuilder.write(out, true);
                    result.set(resultStorage);
                }

                private void addFields(ARecordVisitablePointable inputRecordPointer, AListVisitablePointable listPointable) throws HyracksDataException {
                    List<IVisitablePointable> inputRecordFieldNames = inputRecordPointer.getFieldNames();
                    List<IVisitablePointable> inputRecordFieldValues = inputRecordPointer.getFieldValues();
                    List<IVisitablePointable> inputFields = listPointable.getItems();
                    IVisitablePointable namePointable = null;
                    IVisitablePointable valuePointable = null;
                    int numInputRecordFields = inputRecordFieldNames.size();
                    try {
                        // Add original record without duplicate checking
                        for (int i = 0; i < numInputRecordFields; ++i) {
                            IVisitablePointable fnp = inputRecordFieldNames.get(i);
                            IVisitablePointable fvp = inputRecordFieldValues.get(i);
                            int pos = requiredRecordTypeInfo.getFieldIndex(fnp.getByteArray(), fnp.getStartOffset() + 1, fnp.getLength() - 1);
                            if (pos >= 0) {
                                recordBuilder.addField(pos, fvp);
                            } else {
                                recordBuilder.addField(fnp, fvp);
                            }
                            keyEntry.set(fnp.getByteArray(), fnp.getStartOffset(), fnp.getLength());
                            valEntry.set(fvp.getByteArray(), fvp.getStartOffset(), fvp.getLength());
                            hashMap.put(keyEntry, valEntry);
                        }
                        // Get the fields from a list of records
                        for (int i = 0; i < inputFields.size(); i++) {
                            if (!PointableHelper.sameType(ATypeTag.OBJECT, inputFields.get(i))) {
                                throw new AsterixException("Expected list of record, got " + PointableHelper.getTypeTag(inputFields.get(i)));
                            }
                            List<IVisitablePointable> names = ((ARecordVisitablePointable) inputFields.get(i)).getFieldNames();
                            List<IVisitablePointable> values = ((ARecordVisitablePointable) inputFields.get(i)).getFieldValues();
                            // Get name and value of the field to be added
                            // Use loop to account for the cases where users switches the order of the fields
                            IVisitablePointable fieldName;
                            for (int j = 0; j < names.size(); j++) {
                                fieldName = names.get(j);
                                // if fieldName is "field-name" then read the name
                                if (PointableHelper.byteArrayEqual(fieldNamePointable, fieldName)) {
                                    namePointable = values.get(j);
                                } else {
                                    // otherwise the fieldName is "field-value". Thus, read the value
                                    valuePointable = values.get(j);
                                }
                            }
                            if (namePointable == null || valuePointable == null) {
                                throw new InvalidDataFormatException(getIdentifier(), "fields to be added");
                            }
                            // Check that the field being added is a valid field
                            int pos = requiredRecordTypeInfo.getFieldIndex(namePointable.getByteArray(), namePointable.getStartOffset() + 1, namePointable.getLength() - 1);
                            keyEntry.set(namePointable.getByteArray(), namePointable.getStartOffset(), namePointable.getLength());
                            // Check if already in our built record
                            BinaryEntry entry = hashMap.get(keyEntry);
                            if (entry != null) {
                                tempValReference.set(entry.getBuf(), entry.getOffset(), entry.getLength());
                                // If value is not equal throw conflicting duplicate field, otherwise ignore
                                if (!PointableHelper.byteArrayEqual(valuePointable, tempValReference)) {
                                    throw new RuntimeDataException(ErrorCode.DUPLICATE_FIELD_NAME, getIdentifier());
                                }
                            } else {
                                if (pos > -1) {
                                    recordBuilder.addField(pos, valuePointable);
                                } else {
                                    recordBuilder.addField(namePointable, valuePointable);
                                }
                                valEntry.set(valuePointable.getByteArray(), valuePointable.getStartOffset(), valuePointable.getLength());
                                hashMap.put(keyEntry, valEntry);
                            }
                        }
                    } catch (AsterixException e) {
                        throw new HyracksDataException(e);
                    }
                }
            };
        }
    };
}
Also used : BinaryEntry(org.apache.hyracks.data.std.util.BinaryEntry) DataOutput(java.io.DataOutput) TypeMismatchException(org.apache.asterix.runtime.exceptions.TypeMismatchException) IBinaryComparator(org.apache.hyracks.api.dataflow.value.IBinaryComparator) IPointable(org.apache.hyracks.data.std.api.IPointable) IScalarEvaluator(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator) InvalidDataFormatException(org.apache.asterix.runtime.exceptions.InvalidDataFormatException) ARecordVisitablePointable(org.apache.asterix.om.pointables.ARecordVisitablePointable) AListVisitablePointable(org.apache.asterix.om.pointables.AListVisitablePointable) AsterixException(org.apache.asterix.common.exceptions.AsterixException) VoidPointable(org.apache.hyracks.data.std.primitive.VoidPointable) PointableAllocator(org.apache.asterix.om.pointables.PointableAllocator) RuntimeDataException(org.apache.asterix.common.exceptions.RuntimeDataException) IBinaryHashFunction(org.apache.hyracks.api.dataflow.value.IBinaryHashFunction) RecordBuilder(org.apache.asterix.builders.RecordBuilder) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) IScalarEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory) PointableHelper(org.apache.asterix.runtime.evaluators.functions.PointableHelper) IVisitablePointable(org.apache.asterix.om.pointables.base.IVisitablePointable) ArrayBackedValueStorage(org.apache.hyracks.data.std.util.ArrayBackedValueStorage) IHyracksTaskContext(org.apache.hyracks.api.context.IHyracksTaskContext) IFrameTupleReference(org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference) RuntimeRecordTypeInfo(org.apache.asterix.om.types.runtime.RuntimeRecordTypeInfo) BinaryHashMap(org.apache.asterix.runtime.evaluators.functions.BinaryHashMap)

Example 12 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class ValidateUtil method validateKeyFields.

/**
     * Validates the key fields that will be used as keys of an index.
     *
     * @param recType
     *            the record type
     * @param keyFieldNames
     *            a map of key fields that will be validated
     * @param keyFieldTypes
     *            a map of key types (if provided) that will be validated
     * @param indexType
     *            the type of the index that its key fields is being validated
     * @throws AlgebricksException
     */
public static void validateKeyFields(ARecordType recType, ARecordType metaRecType, List<List<String>> keyFieldNames, List<Integer> keySourceIndicators, List<IAType> keyFieldTypes, IndexType indexType) throws AlgebricksException {
    List<IAType> fieldTypes = KeyFieldTypeUtil.getKeyTypes(recType, metaRecType, keyFieldNames, keySourceIndicators);
    int pos = 0;
    boolean openFieldCompositeIdx = false;
    for (IAType fieldType : fieldTypes) {
        List<String> fieldName = keyFieldNames.get(pos);
        if (fieldType == null) {
            fieldType = keyFieldTypes.get(pos);
            if (keyFieldTypes.get(pos) == BuiltinType.AMISSING) {
                throw new AsterixException("A field with this name  \"" + fieldName + "\" could not be found.");
            }
        } else if (openFieldCompositeIdx) {
            throw new AsterixException("A closed field \"" + fieldName + "\" could be only in a prefix part of the composite index, containing opened field.");
        }
        if (keyFieldTypes.get(pos) != BuiltinType.AMISSING && fieldType.getTypeTag() != keyFieldTypes.get(pos).getTypeTag()) {
            throw new AsterixException("A field \"" + fieldName + "\" is already defined with the type \"" + fieldType + "\"");
        }
        switch(indexType) {
            case BTREE:
                switch(fieldType.getTypeTag()) {
                    case TINYINT:
                    case SMALLINT:
                    case INTEGER:
                    case BIGINT:
                    case FLOAT:
                    case DOUBLE:
                    case STRING:
                    case BINARY:
                    case DATE:
                    case TIME:
                    case DATETIME:
                    case UNION:
                    case UUID:
                    case YEARMONTHDURATION:
                    case DAYTIMEDURATION:
                        break;
                    default:
                        throw new AsterixException("The field \"" + fieldName + "\" which is of type " + fieldType.getTypeTag() + " cannot be indexed using the BTree index.");
                }
                break;
            case RTREE:
                switch(fieldType.getTypeTag()) {
                    case POINT:
                    case LINE:
                    case RECTANGLE:
                    case CIRCLE:
                    case POLYGON:
                    case UNION:
                        break;
                    default:
                        throw new AsterixException("The field \"" + fieldName + "\" which is of type " + fieldType.getTypeTag() + " cannot be indexed using the RTree index.");
                }
                break;
            case LENGTH_PARTITIONED_NGRAM_INVIX:
                switch(fieldType.getTypeTag()) {
                    case STRING:
                    case UNION:
                        break;
                    default:
                        throw new AsterixException("The field \"" + fieldName + "\" which is of type " + fieldType.getTypeTag() + " cannot be indexed using the Length Partitioned N-Gram index.");
                }
                break;
            case LENGTH_PARTITIONED_WORD_INVIX:
                switch(fieldType.getTypeTag()) {
                    case STRING:
                    case MULTISET:
                    case ARRAY:
                    case UNION:
                        break;
                    default:
                        throw new AsterixException("The field \"" + fieldName + "\" which is of type " + fieldType.getTypeTag() + " cannot be indexed using the Length Partitioned Keyword index.");
                }
                break;
            case SINGLE_PARTITION_NGRAM_INVIX:
                switch(fieldType.getTypeTag()) {
                    case STRING:
                    case UNION:
                        break;
                    default:
                        throw new AsterixException("The field \"" + fieldName + "\" which is of type " + fieldType.getTypeTag() + " cannot be indexed using the N-Gram index.");
                }
                break;
            case SINGLE_PARTITION_WORD_INVIX:
                switch(fieldType.getTypeTag()) {
                    case STRING:
                    case MULTISET:
                    case ARRAY:
                    case UNION:
                        break;
                    default:
                        throw new AsterixException("The field \"" + fieldName + "\" which is of type " + fieldType.getTypeTag() + " cannot be indexed using the Keyword index.");
                }
                break;
            default:
                throw new AsterixException("Invalid index type: " + indexType + ".");
        }
        pos++;
    }
}
Also used : AsterixException(org.apache.asterix.common.exceptions.AsterixException) IAType(org.apache.asterix.om.types.IAType)

Example 13 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class ExternalLibraryUtils method uninstallLibrary.

/**
     * Remove the library from metadata completely.
     * TODO Currently, external libraries only include functions and adapters. we need to extend this to include:
     * 1. external data source
     * 2. data parser
     *
     * @param dataverse
     * @param libraryName
     * @return true if the library was found and removed, false otherwise
     * @throws AsterixException
     * @throws RemoteException
     * @throws ACIDException
     */
protected static boolean uninstallLibrary(String dataverse, String libraryName) throws AsterixException, RemoteException, ACIDException {
    MetadataTransactionContext mdTxnCtx = null;
    try {
        // begin transaction
        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
        // make sure dataverse exists
        Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
        if (dv == null) {
            return false;
        }
        // make sure library exists
        Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, dataverse, libraryName);
        if (library == null) {
            return false;
        }
        // get dataverse functions
        List<Function> functions = MetadataManager.INSTANCE.getDataverseFunctions(mdTxnCtx, dataverse);
        for (Function function : functions) {
            // does function belong to library?
            if (function.getName().startsWith(libraryName + "#")) {
                // drop the function
                MetadataManager.INSTANCE.dropFunction(mdTxnCtx, new FunctionSignature(dataverse, function.getName(), function.getArity()));
            }
        }
        // get the dataverse adapters
        List<DatasourceAdapter> adapters = MetadataManager.INSTANCE.getDataverseAdapters(mdTxnCtx, dataverse);
        for (DatasourceAdapter adapter : adapters) {
            // belong to the library?
            if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
                // remove adapter <! we didn't check if there are feeds which use this adapter>
                MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
            }
        }
        // drop the library itself
        MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
    } catch (Exception e) {
        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
        throw new AsterixException(e);
    }
    return true;
}
Also used : Function(org.apache.asterix.metadata.entities.Function) LibraryFunction(org.apache.asterix.external.library.LibraryFunction) DatasourceAdapter(org.apache.asterix.metadata.entities.DatasourceAdapter) AsterixException(org.apache.asterix.common.exceptions.AsterixException) MetadataTransactionContext(org.apache.asterix.metadata.MetadataTransactionContext) Library(org.apache.asterix.metadata.entities.Library) ExternalLibrary(org.apache.asterix.external.library.ExternalLibrary) Dataverse(org.apache.asterix.metadata.entities.Dataverse) FunctionSignature(org.apache.asterix.common.functions.FunctionSignature) AsterixException(org.apache.asterix.common.exceptions.AsterixException) ACIDException(org.apache.asterix.common.exceptions.ACIDException) IOException(java.io.IOException) RemoteException(java.rmi.RemoteException)

Example 14 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class QueryServiceServlet method handleRequest.

private void handleRequest(RequestParameters param, IServletResponse response) throws IOException {
    LOGGER.info(param.toString());
    long elapsedStart = System.nanoTime();
    final StringWriter stringWriter = new StringWriter();
    final PrintWriter resultWriter = new PrintWriter(stringWriter);
    ResultDelivery delivery = parseResultDelivery(param.mode);
    String handleUrl = getHandleUrl(param.host, param.path, delivery);
    SessionOutput sessionOutput = createSessionOutput(param, handleUrl, resultWriter);
    SessionConfig sessionConfig = sessionOutput.config();
    HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
    HttpResponseStatus status = HttpResponseStatus.OK;
    Stats stats = new Stats();
    long[] execStartEnd = new long[] { -1, -1 };
    resultWriter.print("{\n");
    printRequestId(resultWriter);
    printClientContextID(resultWriter, param);
    printSignature(resultWriter);
    printType(resultWriter, sessionConfig);
    try {
        if (param.statement == null || param.statement.isEmpty()) {
            throw new AsterixException("Empty request, no statement provided");
        }
        String statementsText = param.statement + ";";
        executeStatement(statementsText, sessionOutput, delivery, stats, param, handleUrl, execStartEnd);
        if (ResultDelivery.IMMEDIATE == delivery || ResultDelivery.DEFERRED == delivery) {
            ResultUtil.printStatus(sessionOutput, ResultStatus.SUCCESS);
        }
    } catch (AlgebricksException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.getMessage(), pe);
        ResultUtil.printError(resultWriter, pe);
        ResultUtil.printStatus(sessionOutput, ResultStatus.FATAL);
        status = HttpResponseStatus.BAD_REQUEST;
    } catch (HyracksException pe) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, pe.getMessage(), pe);
        ResultUtil.printError(resultWriter, pe);
        ResultUtil.printStatus(sessionOutput, ResultStatus.FATAL);
        status = HttpResponseStatus.INTERNAL_SERVER_ERROR;
    } catch (Exception e) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Unexpected exception", e);
        ResultUtil.printError(resultWriter, e);
        ResultUtil.printStatus(sessionOutput, ResultStatus.FATAL);
        status = HttpResponseStatus.INTERNAL_SERVER_ERROR;
    } finally {
        if (execStartEnd[0] == -1) {
            execStartEnd[1] = -1;
        } else if (execStartEnd[1] == -1) {
            execStartEnd[1] = System.nanoTime();
        }
    }
    printMetrics(resultWriter, System.nanoTime() - elapsedStart, execStartEnd[1] - execStartEnd[0], stats.getCount(), stats.getSize());
    resultWriter.print("}\n");
    resultWriter.flush();
    String result = stringWriter.toString();
    GlobalConfig.ASTERIX_LOGGER.log(Level.FINE, result);
    response.setStatus(status);
    response.writer().print(result);
    if (response.writer().checkError()) {
        LOGGER.warning("Error flushing output writer");
    }
}
Also used : HttpResponseStatus(io.netty.handler.codec.http.HttpResponseStatus) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) SessionConfig(org.apache.asterix.translator.SessionConfig) TokenMgrError(org.apache.asterix.lang.aql.parser.TokenMgrError) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) ResultDelivery(org.apache.asterix.translator.IStatementExecutor.ResultDelivery) AsterixException(org.apache.asterix.common.exceptions.AsterixException) JsonParseException(com.fasterxml.jackson.core.JsonParseException) AlgebricksException(org.apache.hyracks.algebricks.common.exceptions.AlgebricksException) IOException(java.io.IOException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) HyracksException(org.apache.hyracks.api.exceptions.HyracksException) JsonMappingException(com.fasterxml.jackson.databind.JsonMappingException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) StringWriter(java.io.StringWriter) SessionOutput(org.apache.asterix.translator.SessionOutput) Stats(org.apache.asterix.translator.IStatementExecutor.Stats) PrintWriter(java.io.PrintWriter)

Example 15 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class RestApiServlet method doHandle.

private void doHandle(IServletResponse response, String query, SessionOutput sessionOutput, ResultDelivery resultDelivery) throws JsonProcessingException {
    try {
        response.setStatus(HttpResponseStatus.OK);
        IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
        IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
        if (hds == null) {
            synchronized (ctx) {
                hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
                if (hds == null) {
                    hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
                    ctx.put(HYRACKS_DATASET_ATTR, hds);
                }
            }
        }
        IParser parser = parserFactory.createParser(query);
        List<Statement> aqlStatements = parser.parse();
        validate(aqlStatements);
        MetadataManager.INSTANCE.init();
        IStatementExecutor translator = statementExecutorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
        translator.compileAndExecute(hcc, hds, resultDelivery, null, new IStatementExecutor.Stats());
    } catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, pe.getMessage(), pe);
        String errorMessage = ResultUtil.buildParseExceptionMessage(pe, query);
        ObjectNode errorResp = ResultUtil.getErrorResponse(2, errorMessage, "", ResultUtil.extractFullStackTrace(pe));
        sessionOutput.out().write(new ObjectMapper().writeValueAsString(errorResp));
    } catch (Exception e) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        ResultUtil.apiErrorHandler(sessionOutput.out(), e);
    }
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Statement(org.apache.asterix.lang.common.base.Statement) TokenMgrError(org.apache.asterix.lang.aql.parser.TokenMgrError) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IOException(java.io.IOException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IStatementExecutor(org.apache.asterix.translator.IStatementExecutor) AsterixException(org.apache.asterix.common.exceptions.AsterixException) HyracksDataset(org.apache.hyracks.client.dataset.HyracksDataset) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) IParser(org.apache.asterix.lang.common.base.IParser)

Aggregations

AsterixException (org.apache.asterix.common.exceptions.AsterixException)67 IOException (java.io.IOException)27 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)26 DataOutput (java.io.DataOutput)15 IPointable (org.apache.hyracks.data.std.api.IPointable)15 IFrameTupleReference (org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference)15 TypeMismatchException (org.apache.asterix.runtime.exceptions.TypeMismatchException)14 IScalarEvaluator (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator)14 VoidPointable (org.apache.hyracks.data.std.primitive.VoidPointable)14 ArrayBackedValueStorage (org.apache.hyracks.data.std.util.ArrayBackedValueStorage)14 ATypeTag (org.apache.asterix.om.types.ATypeTag)10 IAType (org.apache.asterix.om.types.IAType)10 ARecordType (org.apache.asterix.om.types.ARecordType)9 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)9 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)9 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)8 IScalarEvaluatorFactory (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)8 List (java.util.List)7 InputStream (java.io.InputStream)5 CompilationException (org.apache.asterix.common.exceptions.CompilationException)5