Search in sources :

Example 36 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class SocketServerInputStreamFactory method configure.

@Override
public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException, CompilationException {
    try {
        sockets = new ArrayList<>();
        String modeValue = configuration.get(ExternalDataConstants.KEY_MODE);
        if (modeValue != null) {
            mode = Mode.valueOf(modeValue.trim().toUpperCase());
        }
        String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
        if (socketsValue == null) {
            throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
        }
        Map<InetAddress, Set<String>> ncMap;
        ncMap = RuntimeUtils.getNodeControllerMap((ICcApplicationContext) serviceCtx.getApplicationContext());
        List<String> ncs = RuntimeUtils.getAllNodeControllers((ICcApplicationContext) serviceCtx.getApplicationContext());
        String[] socketsArray = socketsValue.split(",");
        Random random = new Random();
        for (String socket : socketsArray) {
            String[] socketTokens = socket.split(":");
            String host = socketTokens[0].trim();
            int port = Integer.parseInt(socketTokens[1].trim());
            Pair<String, Integer> p = null;
            switch(mode) {
                case IP:
                    Set<String> ncsOnIp = ncMap.get(InetAddress.getByName(host));
                    if ((ncsOnIp == null) || ncsOnIp.isEmpty()) {
                        throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "host", host, StringUtils.join(ncMap.keySet(), ", "));
                    }
                    String[] ncArray = ncsOnIp.toArray(new String[] {});
                    String nc = ncArray[random.nextInt(ncArray.length)];
                    p = new Pair<>(nc, port);
                    break;
                case NC:
                    p = new Pair<>(host, port);
                    if (!ncs.contains(host)) {
                        throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "NC", host, StringUtils.join(ncs, ", "));
                    }
                    break;
            }
            sockets.add(p);
        }
    } catch (CompilationException e) {
        throw e;
    } catch (HyracksDataException | UnknownHostException e) {
        throw new AsterixException(e);
    } catch (Exception e) {
        throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
    }
}
Also used : CompilationException(org.apache.asterix.common.exceptions.CompilationException) ICcApplicationContext(org.apache.asterix.common.dataflow.ICcApplicationContext) Set(java.util.Set) UnknownHostException(java.net.UnknownHostException) AlgebricksAbsolutePartitionConstraint(org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) CompilationException(org.apache.asterix.common.exceptions.CompilationException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) Random(java.util.Random) InetAddress(java.net.InetAddress)

Example 37 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class SimilarityJaccardPrefixEvaluator method evaluate.

@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
    resultStorage.reset();
    // similarity threshold
    sim = 0;
    evalThreshold.evaluate(tuple, inputVal);
    float similarityThreshold = AFloatSerializerDeserializer.getFloat(inputVal.getByteArray(), inputVal.getStartOffset() + 1);
    if (similarityThreshold != similarityThresholdCache || similarityFilters == null) {
        similarityFilters = new SimilarityFiltersJaccard(similarityThreshold);
        similarityThresholdCache = similarityThreshold;
    }
    evalLen1.evaluate(tuple, inputVal);
    int length1 = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 0, inputVal.getByteArray(), inputVal.getStartOffset());
    evalLen2.evaluate(tuple, inputVal);
    int length2 = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 2, inputVal.getByteArray(), inputVal.getStartOffset());
    //
    if (similarityFilters.passLengthFilter(length1, length2)) {
        // -- - tokens1 - --
        int i;
        tokens1.reset();
        evalTokens1.evaluate(tuple, inputVal);
        byte[] serList = inputVal.getByteArray();
        int startOffset = inputVal.getStartOffset();
        if (serList[startOffset] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG && serList[startOffset] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
            throw new TypeMismatchException(BuiltinFunctions.SIMILARITY_JACCARD, 1, serList[startOffset], ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG, ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG);
        }
        int lengthTokens1;
        if (serList[startOffset] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
            lengthTokens1 = AOrderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
            // read tokens
            for (i = 0; i < lengthTokens1; i++) {
                int itemOffset;
                int token;
                try {
                    itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
                } catch (AsterixException e) {
                    throw new HyracksDataException(e);
                }
                token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 1, serList, itemOffset, startOffset + 1);
                tokens1.add(token);
            }
        } else {
            lengthTokens1 = AUnorderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
            // read tokens
            for (i = 0; i < lengthTokens1; i++) {
                int itemOffset;
                int token;
                try {
                    itemOffset = AUnorderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
                } catch (AsterixException e) {
                    throw new HyracksDataException(e);
                }
                token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 1, serList, itemOffset, startOffset + 1);
                tokens1.add(token);
            }
        }
        // pad tokens
        for (; i < length1; i++) {
            tokens1.add(Integer.MAX_VALUE);
        }
        // -- - tokens2 - --
        tokens2.reset();
        evalTokens2.evaluate(tuple, inputVal);
        serList = inputVal.getByteArray();
        startOffset = inputVal.getStartOffset();
        if (serList[startOffset] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG && serList[startOffset] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
            throw new TypeMismatchException(BuiltinFunctions.SIMILARITY_JACCARD, 3, serList[startOffset], ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG, ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG);
        }
        int lengthTokens2;
        if (serList[startOffset] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
            lengthTokens2 = AOrderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
            // read tokens
            for (i = 0; i < lengthTokens2; i++) {
                int itemOffset;
                int token;
                try {
                    itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
                } catch (AsterixException e) {
                    throw new HyracksDataException(e);
                }
                token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 3, serList, itemOffset, startOffset + 1);
                tokens2.add(token);
            }
        } else {
            lengthTokens2 = AUnorderedListSerializerDeserializer.getNumberOfItems(inputVal.getByteArray(), startOffset);
            // read tokens
            for (i = 0; i < lengthTokens2; i++) {
                int itemOffset;
                int token;
                try {
                    itemOffset = AUnorderedListSerializerDeserializer.getItemOffset(serList, startOffset, i);
                } catch (AsterixException e) {
                    throw new HyracksDataException(e);
                }
                token = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 3, serList, itemOffset, startOffset + 1);
                tokens2.add(token);
            }
        }
        // pad tokens
        for (; i < length2; i++) {
            tokens2.add(Integer.MAX_VALUE);
        }
        // -- - token prefix - --
        evalTokenPrefix.evaluate(tuple, inputVal);
        int tokenPrefix = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SIMILARITY_JACCARD.getName(), 4, inputVal.getByteArray(), inputVal.getStartOffset());
        //
        // -- - position filter - --
        //
        SimilarityMetric.getPartialIntersectSize(tokens1.get(), 0, tokens1.length(), tokens2.get(), 0, tokens2.length(), tokenPrefix, parInter);
        if (similarityFilters.passPositionFilter(parInter.intersectSize, parInter.posXStop, length1, parInter.posYStop, length2)) {
            //
            if (similarityFilters.passSuffixFilter(tokens1.get(), 0, tokens1.length(), parInter.posXStart, tokens2.get(), 0, tokens2.length(), parInter.posYStart)) {
                sim = similarityFilters.passSimilarityFilter(tokens1.get(), 0, tokens1.length(), parInter.posXStop + 1, tokens2.get(), 0, tokens2.length(), parInter.posYStop + 1, parInter.intersectSize);
            }
        }
    }
    try {
        writeResult();
    } catch (IOException e) {
        throw new HyracksDataException(e);
    }
    result.set(resultStorage);
}
Also used : AsterixException(org.apache.asterix.common.exceptions.AsterixException) SimilarityFiltersJaccard(org.apache.asterix.fuzzyjoin.similarity.SimilarityFiltersJaccard) TypeMismatchException(org.apache.asterix.runtime.exceptions.TypeMismatchException) IOException(java.io.IOException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 38 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class AbstractAsterixListIterator method next.

@Override
public void next() throws HyracksDataException {
    try {
        pos = nextPos;
        ++count;
        nextPos = startOff + listLength;
        if (count + 1 < numberOfItems) {
            nextPos = getItemOffset(data, startOff, count + 1);
        }
        itemLen = nextPos - pos;
    } catch (AsterixException e) {
        throw new HyracksDataException(e);
    }
}
Also used : AsterixException(org.apache.asterix.common.exceptions.AsterixException) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException)

Example 39 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class AbstractLangTranslator method validateOperation.

public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt) throws AsterixException {
    if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE) && ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        try {
            ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
        } catch (HyracksDataException e) {
            throw new AsterixException(e);
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to be " + ClusterState.ACTIVE);
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
            throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
        } else {
            if (LOGGER.isLoggable(Level.INFO)) {
                LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
            }
        }
    }
    if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
        throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state." + "\n One or more Node Controllers have left.\n");
    }
    if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
        int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
        int waitCycleCount = 0;
        try {
            while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
                Thread.sleep(1000);
                waitCycleCount++;
            }
        } catch (InterruptedException e) {
            if (LOGGER.isLoggable(Level.WARNING)) {
                LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
            }
            Thread.currentThread().interrupt();
        }
        if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
            throw new AsterixException("Cluster Global recovery is not yet complete and the system is in " + ClusterState.ACTIVE + " state");
        }
    }
    boolean invalidOperation = false;
    String message = null;
    String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
    switch(stmt.getKind()) {
        case Statement.Kind.INSERT:
            InsertStatement insertStmt = (InsertStatement) stmt;
            if (insertStmt.getDataverseName() != null) {
                dataverse = insertStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Insert operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DELETE:
            DeleteStatement deleteStmt = (DeleteStatement) stmt;
            if (deleteStmt.getDataverseName() != null) {
                dataverse = deleteStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Delete operation is not permitted in dataverse " + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATAVERSE_DROP:
            DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
            if (invalidOperation) {
                message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
            }
            break;
        case Statement.Kind.DATASET_DROP:
            DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
            if (dropStmt.getDataverseName() != null) {
                dataverse = dropStmt.getDataverseName().getValue();
            }
            invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
            if (invalidOperation) {
                message = "Cannot drop a dataset belonging to the dataverse:" + MetadataConstants.METADATA_DATAVERSE_NAME;
            }
            break;
        case Statement.Kind.DATASET_DECL:
            DatasetDecl datasetStmt = (DatasetDecl) stmt;
            Map<String, String> hints = datasetStmt.getHints();
            if (hints != null && !hints.isEmpty()) {
                Pair<Boolean, String> validationResult = null;
                StringBuffer errorMsgBuffer = new StringBuffer();
                for (Entry<String, String> hint : hints.entrySet()) {
                    validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
                    if (!validationResult.first) {
                        errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue() + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
                        errorMsgBuffer.append(" \n");
                    }
                }
                invalidOperation = errorMsgBuffer.length() > 0;
                if (invalidOperation) {
                    message = errorMsgBuffer.toString();
                }
            }
            break;
        default:
            break;
    }
    if (invalidOperation) {
        throw new AsterixException("Invalid operation - " + message);
    }
}
Also used : DropDatasetStatement(org.apache.asterix.lang.common.statement.DropDatasetStatement) DeleteStatement(org.apache.asterix.lang.common.statement.DeleteStatement) HyracksDataException(org.apache.hyracks.api.exceptions.HyracksDataException) InsertStatement(org.apache.asterix.lang.common.statement.InsertStatement) DataverseDropStatement(org.apache.asterix.lang.common.statement.DataverseDropStatement) DatasetDecl(org.apache.asterix.lang.common.statement.DatasetDecl) AsterixException(org.apache.asterix.common.exceptions.AsterixException)

Example 40 with AsterixException

use of org.apache.asterix.common.exceptions.AsterixException in project asterixdb by apache.

the class ApiServlet method post.

@Override
protected void post(IServletRequest request, IServletResponse response) {
    // Query language
    ILangCompilationProvider compilationProvider = "AQL".equals(request.getParameter("query-language")) ? aqlCompilationProvider : sqlppCompilationProvider;
    IParserFactory parserFactory = compilationProvider.getParserFactory();
    // Output format.
    PrintWriter out = response.writer();
    OutputFormat format;
    boolean csvAndHeader = false;
    String output = request.getParameter("output-format");
    try {
        format = OutputFormat.valueOf(output);
    } catch (IllegalArgumentException e) {
        LOGGER.log(Level.INFO, output + ": unsupported output-format, using " + OutputFormat.CLEAN_JSON + " instead", e);
        // Default output format
        format = OutputFormat.CLEAN_JSON;
    }
    String query = request.getParameter("query");
    String wrapperArray = request.getParameter("wrapper-array");
    String printExprParam = request.getParameter("print-expr-tree");
    String printRewrittenExprParam = request.getParameter("print-rewritten-expr-tree");
    String printLogicalPlanParam = request.getParameter("print-logical-plan");
    String printOptimizedLogicalPlanParam = request.getParameter("print-optimized-logical-plan");
    String printJob = request.getParameter("print-job");
    String executeQuery = request.getParameter("execute-query");
    try {
        response.setStatus(HttpResponseStatus.OK);
        HttpUtil.setContentType(response, ContentType.TEXT_HTML, Encoding.UTF8);
    } catch (IOException e) {
        LOGGER.log(Level.WARNING, "Failure setting content type", e);
        response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        return;
    }
    try {
        IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
        IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
        if (hds == null) {
            synchronized (ctx) {
                hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
                if (hds == null) {
                    hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
                    ctx.put(HYRACKS_DATASET_ATTR, hds);
                }
            }
        }
        IParser parser = parserFactory.createParser(query);
        List<Statement> aqlStatements = parser.parse();
        SessionConfig sessionConfig = new SessionConfig(format, true, isSet(executeQuery), true);
        sessionConfig.set(SessionConfig.FORMAT_HTML, true);
        sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csvAndHeader);
        sessionConfig.set(SessionConfig.FORMAT_WRAPPER_ARRAY, isSet(wrapperArray));
        sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam), isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
        SessionOutput sessionOutput = new SessionOutput(sessionConfig, out);
        MetadataManager.INSTANCE.init();
        IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
        double duration;
        long startTime = System.currentTimeMillis();
        translator.compileAndExecute(hcc, hds, IStatementExecutor.ResultDelivery.IMMEDIATE, null, new IStatementExecutor.Stats());
        long endTime = System.currentTimeMillis();
        duration = (endTime - startTime) / 1000.00;
        out.println(HTML_STATEMENT_SEPARATOR);
        out.println("<PRE>Duration of all jobs: " + duration + " sec</PRE>");
    } catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
        ResultUtil.webUIParseExceptionHandler(out, pe, query);
    } catch (Exception e) {
        GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
        ResultUtil.webUIErrorHandler(out, e);
    }
}
Also used : IHyracksClientConnection(org.apache.hyracks.api.client.IHyracksClientConnection) SessionConfig(org.apache.asterix.translator.SessionConfig) AsterixException(org.apache.asterix.common.exceptions.AsterixException) ILangCompilationProvider(org.apache.asterix.compiler.provider.ILangCompilationProvider) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) IParserFactory(org.apache.asterix.lang.common.base.IParserFactory) PrintWriter(java.io.PrintWriter) Statement(org.apache.asterix.lang.common.base.Statement) OutputFormat(org.apache.asterix.translator.SessionConfig.OutputFormat) TokenMgrError(org.apache.asterix.lang.aql.parser.TokenMgrError) IOException(java.io.IOException) AsterixException(org.apache.asterix.common.exceptions.AsterixException) IOException(java.io.IOException) IStatementExecutor(org.apache.asterix.translator.IStatementExecutor) HyracksDataset(org.apache.hyracks.client.dataset.HyracksDataset) IHyracksDataset(org.apache.hyracks.api.dataset.IHyracksDataset) SessionOutput(org.apache.asterix.translator.SessionOutput) IParser(org.apache.asterix.lang.common.base.IParser)

Aggregations

AsterixException (org.apache.asterix.common.exceptions.AsterixException)67 IOException (java.io.IOException)27 HyracksDataException (org.apache.hyracks.api.exceptions.HyracksDataException)26 DataOutput (java.io.DataOutput)15 IPointable (org.apache.hyracks.data.std.api.IPointable)15 IFrameTupleReference (org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference)15 TypeMismatchException (org.apache.asterix.runtime.exceptions.TypeMismatchException)14 IScalarEvaluator (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator)14 VoidPointable (org.apache.hyracks.data.std.primitive.VoidPointable)14 ArrayBackedValueStorage (org.apache.hyracks.data.std.util.ArrayBackedValueStorage)14 ATypeTag (org.apache.asterix.om.types.ATypeTag)10 IAType (org.apache.asterix.om.types.IAType)10 ARecordType (org.apache.asterix.om.types.ARecordType)9 IHyracksTaskContext (org.apache.hyracks.api.context.IHyracksTaskContext)9 ISerializerDeserializer (org.apache.hyracks.api.dataflow.value.ISerializerDeserializer)9 AlgebricksException (org.apache.hyracks.algebricks.common.exceptions.AlgebricksException)8 IScalarEvaluatorFactory (org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory)8 List (java.util.List)7 InputStream (java.io.InputStream)5 CompilationException (org.apache.asterix.common.exceptions.CompilationException)5