Search in sources :

Example 1 with FlinkHiveException

use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.

the class HiveParserUtils method rexSubQueryIn.

/**
 * Proxy to {@link RexSubQuery#in(RelNode, com.google.common.collect.ImmutableList)}.
 */
public static RexSubQuery rexSubQueryIn(RelNode relNode, Collection<RexNode> rexNodes) {
    Class[] argTypes = new Class[] { RelNode.class, null };
    argTypes[1] = useShadedImmutableList ? shadedImmutableListClz : immutableListClz;
    Method method = HiveReflectionUtils.tryGetMethod(RexSubQuery.class, "in", argTypes);
    Preconditions.checkState(method != null, "Cannot get the method to create an IN sub-query");
    try {
        return (RexSubQuery) method.invoke(null, relNode, toImmutableList(rexNodes));
    } catch (IllegalAccessException | InvocationTargetException e) {
        throw new FlinkHiveException("Failed to create RexSubQuery", e);
    }
}
Also used : RelNode(org.apache.calcite.rel.RelNode) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) Method(java.lang.reflect.Method) RexSubQuery(org.apache.calcite.rex.RexSubQuery) InvocationTargetException(java.lang.reflect.InvocationTargetException)

Example 2 with FlinkHiveException

use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.

the class HiveParser method startSessionState.

private void startSessionState(HiveConf hiveConf, CatalogManager catalogManager) {
    final ClassLoader contextCL = Thread.currentThread().getContextClassLoader();
    try {
        HiveParserSessionState sessionState = new HiveParserSessionState(hiveConf, contextCL);
        sessionState.initTxnMgr(hiveConf);
        sessionState.setCurrentDatabase(catalogManager.getCurrentDatabase());
        // some Hive functions needs the timestamp
        setCurrentTimestamp(sessionState);
        SessionState.setCurrentSessionState(sessionState);
    } catch (LockException e) {
        throw new FlinkHiveException("Failed to init SessionState", e);
    } finally {
        // don't let SessionState mess up with our context classloader
        Thread.currentThread().setContextClassLoader(contextCL);
    }
}
Also used : LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException)

Example 3 with FlinkHiveException

use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.

the class HivePartitionUtils method getAllPartitions.

/**
 * Returns all HiveTablePartitions of a hive table, returns single HiveTablePartition if the
 * hive table is not partitioned.
 */
public static List<HiveTablePartition> getAllPartitions(JobConf jobConf, String hiveVersion, ObjectPath tablePath, List<String> partitionColNames, List<Map<String, String>> remainingPartitions) {
    List<HiveTablePartition> allHivePartitions = new ArrayList<>();
    try (HiveMetastoreClientWrapper client = HiveMetastoreClientFactory.create(HiveConfUtils.create(jobConf), hiveVersion)) {
        String dbName = tablePath.getDatabaseName();
        String tableName = tablePath.getObjectName();
        Table hiveTable = client.getTable(dbName, tableName);
        Properties tableProps = HiveReflectionUtils.getTableMetadata(HiveShimLoader.loadHiveShim(hiveVersion), hiveTable);
        if (partitionColNames != null && partitionColNames.size() > 0) {
            List<Partition> partitions = new ArrayList<>();
            if (remainingPartitions != null) {
                for (Map<String, String> spec : remainingPartitions) {
                    partitions.add(client.getPartition(dbName, tableName, partitionSpecToValues(spec, partitionColNames)));
                }
            } else {
                partitions.addAll(client.listPartitions(dbName, tableName, (short) -1));
            }
            for (Partition partition : partitions) {
                HiveTablePartition hiveTablePartition = toHiveTablePartition(partitionColNames, tableProps, partition);
                allHivePartitions.add(hiveTablePartition);
            }
        } else {
            allHivePartitions.add(new HiveTablePartition(hiveTable.getSd(), tableProps));
        }
    } catch (TException e) {
        throw new FlinkHiveException("Failed to collect all partitions from hive metaStore", e);
    }
    return allHivePartitions;
}
Also used : TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) HiveTablePartition(org.apache.flink.connectors.hive.HiveTablePartition) HiveTablePartition(org.apache.flink.connectors.hive.HiveTablePartition) Table(org.apache.hadoop.hive.metastore.api.Table) HiveMetastoreClientWrapper(org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) ArrayList(java.util.ArrayList) Properties(java.util.Properties)

Example 4 with FlinkHiveException

use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.

the class HiveWriterFactory method createRecordWriter.

/**
 * Create a {@link RecordWriter} from path.
 */
public RecordWriter createRecordWriter(Path path) {
    try {
        checkInitialize();
        JobConf conf = new JobConf(confWrapper.conf());
        if (isCompressed) {
            String codecStr = conf.get(HiveConf.ConfVars.COMPRESSINTERMEDIATECODEC.varname);
            if (!StringUtils.isNullOrWhitespaceOnly(codecStr)) {
                // noinspection unchecked
                Class<? extends CompressionCodec> codec = (Class<? extends CompressionCodec>) Class.forName(codecStr, true, Thread.currentThread().getContextClassLoader());
                FileOutputFormat.setOutputCompressorClass(conf, codec);
            }
            String typeStr = conf.get(HiveConf.ConfVars.COMPRESSINTERMEDIATETYPE.varname);
            if (!StringUtils.isNullOrWhitespaceOnly(typeStr)) {
                SequenceFile.CompressionType style = SequenceFile.CompressionType.valueOf(typeStr);
                SequenceFileOutputFormat.setOutputCompressionType(conf, style);
            }
        }
        return hiveShim.getHiveRecordWriter(conf, hiveOutputFormatClz, recordSerDe.getSerializedClass(), isCompressed, tableProperties, path);
    } catch (Exception e) {
        throw new FlinkHiveException(e);
    }
}
Also used : SequenceFile(org.apache.hadoop.io.SequenceFile) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) CompressionCodec(org.apache.hadoop.io.compress.CompressionCodec) JobConf(org.apache.hadoop.mapred.JobConf) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 5 with FlinkHiveException

use of org.apache.flink.connectors.hive.FlinkHiveException in project flink by apache.

the class HiveShimV230 method init.

private static void init() {
    if (!inited) {
        synchronized (HiveShimV230.class) {
            if (!inited) {
                try {
                    isMaterializedView = org.apache.hadoop.hive.ql.metadata.Table.class.getDeclaredMethod("isMaterializedView");
                    inited = true;
                } catch (Exception e) {
                    throw new FlinkHiveException(e);
                }
            }
        }
    }
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) FlinkHiveException(org.apache.flink.connectors.hive.FlinkHiveException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) InvocationTargetException(java.lang.reflect.InvocationTargetException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException)

Aggregations

FlinkHiveException (org.apache.flink.connectors.hive.FlinkHiveException)17 InvocationTargetException (java.lang.reflect.InvocationTargetException)9 Method (java.lang.reflect.Method)4 Timestamp (java.sql.Timestamp)3 LocalDate (java.time.LocalDate)3 TException (org.apache.thrift.TException)3 IOException (java.io.IOException)2 LocalDateTime (java.time.LocalDateTime)2 RelNode (org.apache.calcite.rel.RelNode)2 RelDataType (org.apache.calcite.rel.type.RelDataType)2 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)2 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 Table (org.apache.hadoop.hive.metastore.api.Table)2 HiveFileFormatUtils (org.apache.hadoop.hive.ql.io.HiveFileFormatUtils)2 Field (java.lang.reflect.Field)1 Date (java.sql.Date)1 Instant (java.time.Instant)1 ArrayList (java.util.ArrayList)1 Properties (java.util.Properties)1