Search in sources :

Example 61 with SchemaChangeException

use of org.apache.drill.exec.exception.SchemaChangeException in project drill by apache.

the class SingleBatchSorterTemplate method setup.

@Override
public void setup(FragmentExecContext context, SelectionVector2 vector2, VectorAccessible incoming) throws SchemaChangeException {
    Preconditions.checkNotNull(vector2);
    this.vector2 = vector2;
    try {
        doSetup(context, incoming, null);
    } catch (IllegalStateException e) {
        throw new SchemaChangeException(e);
    }
}
Also used : SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException)

Example 62 with SchemaChangeException

use of org.apache.drill.exec.exception.SchemaChangeException in project drill by apache.

the class PojoRecordReader method setup.

@Override
public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
    operatorContext = context;
    try {
        Field[] fields = pojoClass.getDeclaredFields();
        List<PojoWriter> writers = Lists.newArrayList();
        for (int i = 0; i < fields.length; i++) {
            Field f = fields[i];
            if (Modifier.isStatic(f.getModifiers())) {
                continue;
            }
            Class<?> type = f.getType();
            PojoWriter w = null;
            if (type == int.class) {
                w = new IntWriter(f);
            } else if (type == Integer.class) {
                w = new NIntWriter(f);
            } else if (type == Long.class) {
                w = new NBigIntWriter(f);
            } else if (type == Boolean.class) {
                w = new NBooleanWriter(f);
            } else if (type == double.class) {
                w = new DoubleWriter(f);
            } else if (type == Double.class) {
                w = new NDoubleWriter(f);
            } else if (type.isEnum()) {
                w = new EnumWriter(f, output.getManagedBuffer());
            } else if (type == boolean.class) {
                w = new BitWriter(f);
            } else if (type == long.class) {
                w = new LongWriter(f);
            } else if (type == String.class) {
                w = new StringWriter(f, output.getManagedBuffer());
            } else if (type == Timestamp.class) {
                w = new NTimeStampWriter(f);
            } else {
                throw new ExecutionSetupException(String.format("PojoRecord reader doesn't yet support conversions from type [%s].", type));
            }
            writers.add(w);
            w.init(output);
        }
        this.writers = writers.toArray(new PojoWriter[writers.size()]);
    } catch (SchemaChangeException e) {
        throw new ExecutionSetupException("Failure while setting up schema for PojoRecordReader.", e);
    }
    currentIterator = pojoObjects.iterator();
}
Also used : BitWriter(org.apache.drill.exec.store.pojo.Writers.BitWriter) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException) EnumWriter(org.apache.drill.exec.store.pojo.Writers.EnumWriter) LongWriter(org.apache.drill.exec.store.pojo.Writers.LongWriter) IntWriter(org.apache.drill.exec.store.pojo.Writers.IntWriter) NIntWriter(org.apache.drill.exec.store.pojo.Writers.NIntWriter) NBigIntWriter(org.apache.drill.exec.store.pojo.Writers.NBigIntWriter) NBigIntWriter(org.apache.drill.exec.store.pojo.Writers.NBigIntWriter) NTimeStampWriter(org.apache.drill.exec.store.pojo.Writers.NTimeStampWriter) Field(java.lang.reflect.Field) NIntWriter(org.apache.drill.exec.store.pojo.Writers.NIntWriter) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) StringWriter(org.apache.drill.exec.store.pojo.Writers.StringWriter) NDoubleWriter(org.apache.drill.exec.store.pojo.Writers.NDoubleWriter) NDoubleWriter(org.apache.drill.exec.store.pojo.Writers.NDoubleWriter) DoubleWriter(org.apache.drill.exec.store.pojo.Writers.DoubleWriter) NBooleanWriter(org.apache.drill.exec.store.pojo.Writers.NBooleanWriter)

Example 63 with SchemaChangeException

use of org.apache.drill.exec.exception.SchemaChangeException in project drill by apache.

the class DrillCursor method nextRowInternally.

/**
   * ...
   * <p>
   *   Is to be called (once) from {@link #loadInitialSchema} for
   *   {@link DrillResultSetImpl#execute()}, and then (repeatedly) from
   *   {@link #next()} for {@link AvaticaResultSet#next()}.
   * </p>
   *
   * @return  whether cursor is positioned at a row (false when after end of
   *   results)
   */
private boolean nextRowInternally() throws SQLException {
    if (currentRecordNumber + 1 < currentBatchHolder.getRecordCount()) {
        // Have next row in current batch--just advance index and report "at a row."
        currentRecordNumber++;
        return true;
    } else {
        try {
            QueryDataBatch qrb = resultsListener.getNext();
            // the (initial) schema but no rows)).
            if (afterFirstBatch) {
                while (qrb != null && (qrb.getHeader().getRowCount() == 0 || qrb.getData() == null)) {
                    // Empty message--dispose of and try to get another.
                    logger.warn("Spurious batch read: {}", qrb);
                    qrb.release();
                    qrb = resultsListener.getNext();
                }
            }
            afterFirstBatch = true;
            if (qrb == null) {
                // End of batches--clean up, set state to done, report after last row.
                // (We load it so we clear it.)
                currentBatchHolder.clear();
                afterLastRow = true;
                return false;
            } else {
                // Got next (or first) batch--reset record offset to beginning;
                // assimilate schema if changed; set up return value for first call
                // to next().
                currentRecordNumber = 0;
                final boolean schemaChanged;
                try {
                    schemaChanged = currentBatchHolder.load(qrb.getHeader().getDef(), qrb.getData());
                } finally {
                    qrb.release();
                }
                schema = currentBatchHolder.getSchema();
                if (schemaChanged) {
                    updateColumns();
                }
                if (returnTrueForNextCallToNext && currentBatchHolder.getRecordCount() == 0) {
                    returnTrueForNextCallToNext = false;
                }
                return true;
            }
        } catch (UserException e) {
            // error type is accessible, of course. :-( )
            throw new SQLException(e.getMessage(), e);
        } catch (InterruptedException e) {
            // but JDBC client certainly could.
            throw new SQLException("Interrupted.", e);
        } catch (SchemaChangeException e) {
            // throws SchemaChangeException, so check/clean catch clause.
            throw new SQLException("Unexpected SchemaChangeException from RecordBatchLoader.load(...)");
        } catch (RuntimeException e) {
            throw new SQLException("Unexpected RuntimeException: " + e.toString(), e);
        }
    }
}
Also used : SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) QueryDataBatch(org.apache.drill.exec.rpc.user.QueryDataBatch) SQLException(java.sql.SQLException) UserException(org.apache.drill.common.exceptions.UserException)

Example 64 with SchemaChangeException

use of org.apache.drill.exec.exception.SchemaChangeException in project drill by apache.

the class QueryBuilder method rowSet.

/**
   * Run the query and return the first result set as a
   * {@link DirectRowSet} object that can be inspected directly
   * by the code using a {@link RowSetReader}.
   * <p>
   * An enhancement is to provide a way to read a series of result
   * batches as row sets.
   * @return a row set that represents the first batch returned from
   * the query
   * @throws RpcException if anything goes wrong
   */
public DirectRowSet rowSet() throws RpcException {
    // Ignore all but the first non-empty batch.
    QueryDataBatch dataBatch = null;
    for (QueryDataBatch batch : results()) {
        if (dataBatch == null && batch.getHeader().getRowCount() != 0) {
            dataBatch = batch;
        } else {
            batch.release();
        }
    }
    if (dataBatch == null) {
        return null;
    }
    // Unload the batch and convert to a row set.
    final RecordBatchLoader loader = new RecordBatchLoader(client.allocator());
    try {
        loader.load(dataBatch.getHeader().getDef(), dataBatch.getData());
        dataBatch.release();
        VectorContainer container = loader.getContainer();
        container.setRecordCount(loader.getRecordCount());
        return new DirectRowSet(client.allocator(), container);
    } catch (SchemaChangeException e) {
        throw new IllegalStateException(e);
    }
}
Also used : SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException) QueryDataBatch(org.apache.drill.exec.rpc.user.QueryDataBatch) RecordBatchLoader(org.apache.drill.exec.record.RecordBatchLoader) DirectRowSet(org.apache.drill.test.rowSet.DirectRowSet) VectorContainer(org.apache.drill.exec.record.VectorContainer)

Example 65 with SchemaChangeException

use of org.apache.drill.exec.exception.SchemaChangeException in project drill by apache.

the class TestOutputMutator method addField.

@Override
public <T extends ValueVector> T addField(MaterializedField field, Class<T> clazz) throws SchemaChangeException {
    ValueVector v = TypeHelper.getNewVector(field, allocator);
    if (!clazz.isAssignableFrom(v.getClass())) {
        throw new SchemaChangeException(String.format("The class that was provided %s does not correspond to the expected vector type of %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
    }
    addField(v);
    return (T) v;
}
Also used : ValueVector(org.apache.drill.exec.vector.ValueVector) SchemaChangeException(org.apache.drill.exec.exception.SchemaChangeException)

Aggregations

SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)66 IOException (java.io.IOException)23 MaterializedField (org.apache.drill.exec.record.MaterializedField)20 ErrorCollector (org.apache.drill.common.expression.ErrorCollector)18 ErrorCollectorImpl (org.apache.drill.common.expression.ErrorCollectorImpl)18 LogicalExpression (org.apache.drill.common.expression.LogicalExpression)18 ValueVector (org.apache.drill.exec.vector.ValueVector)18 ClassTransformationException (org.apache.drill.exec.exception.ClassTransformationException)16 TransferPair (org.apache.drill.exec.record.TransferPair)9 HoldingContainer (org.apache.drill.exec.expr.ClassGenerator.HoldingContainer)8 TypedFieldId (org.apache.drill.exec.record.TypedFieldId)8 ExecutionSetupException (org.apache.drill.common.exceptions.ExecutionSetupException)7 Ordering (org.apache.drill.common.logical.data.Order.Ordering)7 JConditional (com.sun.codemodel.JConditional)6 NamedExpression (org.apache.drill.common.logical.data.NamedExpression)6 ValueVectorWriteExpression (org.apache.drill.exec.expr.ValueVectorWriteExpression)6 RecordBatchLoader (org.apache.drill.exec.record.RecordBatchLoader)6 VectorContainer (org.apache.drill.exec.record.VectorContainer)6 SchemaPath (org.apache.drill.common.expression.SchemaPath)5 RecordBatchData (org.apache.drill.exec.physical.impl.sort.RecordBatchData)5