Search in sources :

Example 51 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class Utilities method createEmptyBuckets.

/**
   * Check the existence of buckets according to bucket specification. Create empty buckets if
   * needed.
   *
   * @param hconf
   * @param paths A list of empty buckets to create
   * @param conf The definition of the FileSink.
   * @param reporter The mapreduce reporter object
   * @throws HiveException
   * @throws IOException
   */
private static void createEmptyBuckets(Configuration hconf, List<Path> paths, FileSinkDesc conf, Reporter reporter) throws HiveException, IOException {
    JobConf jc;
    if (hconf instanceof JobConf) {
        jc = new JobConf(hconf);
    } else {
        // test code path
        jc = new JobConf(hconf);
    }
    HiveOutputFormat<?, ?> hiveOutputFormat = null;
    Class<? extends Writable> outputClass = null;
    boolean isCompressed = conf.getCompressed();
    TableDesc tableInfo = conf.getTableInfo();
    try {
        Serializer serializer = (Serializer) tableInfo.getDeserializerClass().newInstance();
        serializer.initialize(null, tableInfo.getProperties());
        outputClass = serializer.getSerializedClass();
        hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, conf.getTableInfo());
    } catch (SerDeException e) {
        throw new HiveException(e);
    } catch (InstantiationException e) {
        throw new HiveException(e);
    } catch (IllegalAccessException e) {
        throw new HiveException(e);
    }
    for (Path path : paths) {
        RecordWriter writer = HiveFileFormatUtils.getRecordWriter(jc, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), path, reporter);
        writer.close(false);
        LOG.info("created empty bucket for enforcing bucketing at " + path);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) RecordWriter(org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter) TableDesc(org.apache.hadoop.hive.ql.plan.TableDesc) JobConf(org.apache.hadoop.mapred.JobConf) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) Serializer(org.apache.hadoop.hive.serde2.Serializer)

Example 52 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class ScriptOperator method close.

@Override
public void close(boolean abort) throws HiveException {
    boolean new_abort = abort;
    if (!abort) {
        if (scriptError != null) {
            throw new HiveException(ErrorMsg.SCRIPT_GENERIC_ERROR.getErrorCodedMsg(), scriptError);
        }
        // everything ok. try normal shutdown
        try {
            try {
                if (scriptOutWriter != null) {
                    scriptOutWriter.close();
                }
            } catch (IOException e) {
                if (isBrokenPipeException(e) && allowPartialConsumption()) {
                    LOG.warn("Got broken pipe: ignoring exception");
                } else {
                    if (isBrokenPipeException(e)) {
                        displayBrokenPipeInfo();
                    }
                    throw e;
                }
            }
            int exitVal = 0;
            if (scriptPid != null) {
                exitVal = scriptPid.waitFor();
            }
            if (exitVal != 0) {
                LOG.error("Script failed with code " + exitVal);
                new_abort = true;
            }
        } catch (IOException e) {
            LOG.error("Got ioexception: " + e.getMessage());
            e.printStackTrace();
            new_abort = true;
        } catch (InterruptedException e) {
        }
    } else {
        // error code of the child process if possible.
        try {
            // Interrupt the current thread after 1 second
            final Thread mythread = Thread.currentThread();
            Timer timer = new Timer(true);
            timer.schedule(new TimerTask() {

                @Override
                public void run() {
                    mythread.interrupt();
                }
            }, 1000);
            // Wait for the child process to finish
            int exitVal = 0;
            if (scriptPid != null) {
                scriptPid.waitFor();
            }
            // Cancel the timer
            timer.cancel();
            // Output the exit code
            LOG.error("Script exited with code " + exitVal);
        } catch (InterruptedException e) {
            // Ignore
            LOG.error("Script has not exited yet. It will be killed.");
        }
    }
    // try these best effort
    try {
        if (outThread != null) {
            outThread.join(0);
        }
    } catch (Exception e) {
        LOG.warn("Exception in closing outThread: " + StringUtils.stringifyException(e));
    }
    try {
        if (errThread != null) {
            errThread.join(0);
        }
    } catch (Exception e) {
        LOG.warn("Exception in closing errThread: " + StringUtils.stringifyException(e));
    }
    try {
        if (scriptPid != null) {
            scriptPid.destroy();
        }
    } catch (Exception e) {
        LOG.warn("Exception in destroying scriptPid: " + StringUtils.stringifyException(e));
    }
    super.close(new_abort);
    if (new_abort && !abort) {
        throw new HiveException(ErrorMsg.SCRIPT_CLOSING_ERROR.getErrorCodedMsg());
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Timer(java.util.Timer) TimerTask(java.util.TimerTask) IOException(java.io.IOException) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 53 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class RCFileMergeOperator method closeOp.

@Override
public void closeOp(boolean abort) throws HiveException {
    // close writer
    if (outWriter == null) {
        return;
    }
    try {
        outWriter.close();
    } catch (IOException e) {
        throw new HiveException("Unable to close RCFileMergeOperator", e);
    }
    outWriter = null;
    super.closeOp(abort);
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IOException(java.io.IOException)

Example 54 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class ObjectCache method retrieve.

@SuppressWarnings("unchecked")
@Override
public <T> T retrieve(String key, Callable<T> fn) throws HiveException {
    T value;
    try {
        value = (T) registry.get(key);
        if (value == null) {
            value = fn.call();
            LOG.info("Caching key: " + key);
            registry.cacheForVertex(key, value);
        } else {
            LOG.info("Found " + key + " in cache with value: " + value);
        }
    } catch (Exception e) {
        throw new HiveException(e);
    }
    return value;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 55 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class SparkReduceRecordHandler method processVectors.

/**
   * @param values
   * @return true if it is not done and can take more inputs
   */
private <E> boolean processVectors(Iterator<E> values, byte tag) throws HiveException {
    VectorizedRowBatch batch = batches[tag];
    batch.reset();
    buffer.reset();
    /* deserialize key into columns */
    VectorizedBatchUtil.addRowToBatchFrom(keyObject, keyStructInspector, 0, 0, batch, buffer);
    for (int i = 0; i < keysColumnOffset; i++) {
        VectorizedBatchUtil.setRepeatingColumn(batch, i);
    }
    int rowIdx = 0;
    try {
        while (values.hasNext()) {
            /* deserialize value into columns */
            BytesWritable valueWritable = (BytesWritable) values.next();
            Object valueObj = deserializeValue(valueWritable, tag);
            VectorizedBatchUtil.addRowToBatchFrom(valueObj, valueStructInspectors[tag], rowIdx, keysColumnOffset, batch, buffer);
            rowIdx++;
            if (rowIdx >= BATCH_SIZE) {
                VectorizedBatchUtil.setBatchSize(batch, rowIdx);
                reducer.process(batch, tag);
                rowIdx = 0;
                if (isLogInfoEnabled) {
                    logMemoryInfo();
                }
            }
        }
        if (rowIdx > 0) {
            VectorizedBatchUtil.setBatchSize(batch, rowIdx);
            reducer.process(batch, tag);
        }
        if (isLogInfoEnabled) {
            logMemoryInfo();
        }
    } catch (Exception e) {
        String rowString = null;
        try {
            rowString = batch.toString();
        } catch (Exception e2) {
            rowString = "[Error getting row data with exception " + StringUtils.stringifyException(e2) + " ]";
        }
        throw new HiveException("Error while processing vector batch (tag=" + tag + ") " + rowString, e);
    }
    // give me more
    return true;
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) BytesWritable(org.apache.hadoop.io.BytesWritable) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)451 IOException (java.io.IOException)172 ArrayList (java.util.ArrayList)81 Path (org.apache.hadoop.fs.Path)68 Table (org.apache.hadoop.hive.ql.metadata.Table)65 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)46 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)45 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)45 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)42 Partition (org.apache.hadoop.hive.ql.metadata.Partition)39 FileSystem (org.apache.hadoop.fs.FileSystem)31 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)29 LinkedHashMap (java.util.LinkedHashMap)28 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)28 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)28 FileNotFoundException (java.io.FileNotFoundException)27 URISyntaxException (java.net.URISyntaxException)27 HashMap (java.util.HashMap)26 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)23