Search in sources :

Example 1 with IllegalCacheConfigurationException

use of org.apache.hadoop.hive.llap.IllegalCacheConfigurationException in project hive by apache.

the class OrcEncodedDataReader method getOrcTailForPath.

/**
 * Looks up metadata for the given Orc file in the cache. Will read it in, in case of a cache miss.
 * @param path
 * @param jobConf
 * @param tag
 * @param daemonConf
 * @param metadataCache
 * @return
 * @throws IOException
 */
public static OrcTail getOrcTailForPath(Path path, Configuration jobConf, CacheTag tag, Configuration daemonConf, MetadataCache metadataCache, Object fileKey) throws IOException {
    Supplier<FileSystem> fsSupplier = getFsSupplier(path, jobConf);
    if (fileKey == null) {
        fileKey = determineFileId(fsSupplier, path, daemonConf);
    }
    if (fileKey == null || metadataCache == null) {
        throw new IllegalCacheConfigurationException("LLAP metadata cache not available for path " + path.toString());
    }
    LlapBufferOrBuffers tailBuffers = metadataCache.getFileMetadata(fileKey);
    try {
        // Cache hit
        if (tailBuffers != null) {
            return getOrcTailFromLlapBuffers(tailBuffers);
        }
        // Cache miss
        throwIfCacheOnlyRead(HiveConf.getBoolVar(jobConf, ConfVars.LLAP_IO_CACHE_ONLY));
        ReaderOptions opts = EncodedOrcFile.readerOptions(jobConf).filesystem(fsSupplier);
        Reader reader = EncodedOrcFile.createReader(path, opts);
        ByteBuffer tailBufferBb = reader.getSerializedFileFooter();
        tailBuffers = metadataCache.putFileMetadata(fileKey, tailBufferBb, tag, new AtomicBoolean(false));
        return getOrcTailFromLlapBuffers(tailBuffers);
    } finally {
        // By this time buffers got locked at either cache look up or cache insert times.
        if (tailBuffers != null) {
            metadataCache.decRefBuffer(tailBuffers);
        }
    }
}
Also used : ReaderOptions(org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) FileSystem(org.apache.hadoop.fs.FileSystem) Reader(org.apache.hadoop.hive.ql.io.orc.encoded.Reader) LlapDataReader(org.apache.hadoop.hive.ql.io.orc.encoded.LlapDataReader) EncodedReader(org.apache.hadoop.hive.ql.io.orc.encoded.EncodedReader) IllegalCacheConfigurationException(org.apache.hadoop.hive.llap.IllegalCacheConfigurationException) LlapBufferOrBuffers(org.apache.hadoop.hive.llap.io.metadata.MetadataCache.LlapBufferOrBuffers) ByteBuffer(java.nio.ByteBuffer)

Example 2 with IllegalCacheConfigurationException

use of org.apache.hadoop.hive.llap.IllegalCacheConfigurationException in project hive by apache.

the class VectorizedOrcAcidRowBatchReader method getOrcReaderData.

/**
 * Gets the OrcTail from cache if LLAP IO is enabled, otherwise creates the reader to get the tail.
 * Always store the Reader along with the Tail as part of ReaderData so we can reuse it.
 * @param path The Orc file path we want to get the OrcTail for
 * @param conf The Configuration to access LLAP
 * @param cacheTag The cacheTag needed to get OrcTail from LLAP IO cache
 * @param fileKey fileId of the Orc file (either the Long fileId of HDFS or the SyntheticFileId).
 *                Optional, if it is not provided, it will be generated, see:
 *                {@link org.apache.hadoop.hive.ql.io.HdfsUtils.getFileId()}
 * @return ReaderData object where the orcTail is not null. Reader can be null, but if we had to create
 * one we return that as well for further reuse.
 */
private static ReaderData getOrcReaderData(Path path, Configuration conf, CacheTag cacheTag, Object fileKey) throws IOException {
    ReaderData readerData = new ReaderData();
    if (shouldReadDeleteDeltasWithLlap(conf, true)) {
        try {
            readerData.orcTail = LlapProxy.getIo().getOrcTailFromCache(path, conf, cacheTag, fileKey);
            readerData.reader = OrcFile.createReader(path, OrcFile.readerOptions(conf).orcTail(readerData.orcTail));
        } catch (IllegalCacheConfigurationException icce) {
            throw new IOException("LLAP cache is not configured properly while delete delta caching is turned on", icce);
        }
    }
    readerData.reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
    readerData.orcTail = new OrcTail(readerData.reader.getFileTail(), readerData.reader.getSerializedFileFooter());
    return readerData;
}
Also used : IllegalCacheConfigurationException(org.apache.hadoop.hive.llap.IllegalCacheConfigurationException) IOException(java.io.IOException) OrcTail(org.apache.orc.impl.OrcTail)

Aggregations

IllegalCacheConfigurationException (org.apache.hadoop.hive.llap.IllegalCacheConfigurationException)2 IOException (java.io.IOException)1 ByteBuffer (java.nio.ByteBuffer)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 LlapBufferOrBuffers (org.apache.hadoop.hive.llap.io.metadata.MetadataCache.LlapBufferOrBuffers)1 ReaderOptions (org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions)1 EncodedReader (org.apache.hadoop.hive.ql.io.orc.encoded.EncodedReader)1 LlapDataReader (org.apache.hadoop.hive.ql.io.orc.encoded.LlapDataReader)1 Reader (org.apache.hadoop.hive.ql.io.orc.encoded.Reader)1 OrcTail (org.apache.orc.impl.OrcTail)1