Search in sources :

Example 1 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class GetColumnsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        String tablePattern = convertIdentifierPattern(tableName, true);
        Pattern columnPattern = null;
        if (columnName != null) {
            columnPattern = Pattern.compile(convertIdentifierPattern(columnName, false));
        }
        List<String> dbNames = metastoreClient.getDatabases(schemaPattern);
        Collections.sort(dbNames);
        Map<String, List<String>> db2Tabs = new HashMap<>();
        for (String dbName : dbNames) {
            List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
            Collections.sort(tableNames);
            db2Tabs.put(dbName, tableNames);
        }
        if (isAuthV2Enabled()) {
            List<HivePrivilegeObject> privObjs = getPrivObjs(db2Tabs);
            String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName + ", tablePattern : " + tableName;
            authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr);
        }
        int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
        for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
            String dbName = dbTabs.getKey();
            List<String> tableNames = dbTabs.getValue();
            for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
                TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
                List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName()));
                Set<String> pkColNames = new HashSet<>();
                for (SQLPrimaryKey key : primaryKeys) {
                    pkColNames.add(key.getColumn_name().toLowerCase());
                }
                for (ColumnDescriptor column : schema.getColumnDescriptors()) {
                    if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
                        continue;
                    }
                    Object[] rowData = new Object[] { // TABLE_CAT
                    null, // TABLE_SCHEM
                    table.getDbName(), // TABLE_NAME
                    table.getTableName(), // COLUMN_NAME
                    column.getName(), // DATA_TYPE
                    column.getType().toJavaSQLType(), // TYPE_NAME
                    column.getTypeName(), // COLUMN_SIZE
                    column.getTypeDescriptor().getColumnSize(), // BUFFER_LENGTH, unused
                    null, // DECIMAL_DIGITS
                    column.getTypeDescriptor().getDecimalDigits(), // NUM_PREC_RADIX
                    column.getType().getNumPrecRadix(), pkColNames.contains(column.getName().toLowerCase()) ? DatabaseMetaData.columnNoNulls : // NULLABLE
                    DatabaseMetaData.columnNullable, // REMARKS
                    column.getComment(), // COLUMN_DEF
                    null, // SQL_DATA_TYPE
                    null, // SQL_DATETIME_SUB
                    null, // CHAR_OCTET_LENGTH
                    null, // ORDINAL_POSITION
                    column.getOrdinalPosition(), // IS_NULLABLE
                    pkColNames.contains(column.getName().toLowerCase()) ? "NO" : "YES", // SCOPE_CATALOG
                    null, // SCOPE_SCHEMA
                    null, // SCOPE_TABLE
                    null, // SOURCE_DATA_TYPE
                    null, // IS_AUTO_INCREMENT
                    "NO" };
                    rowSet.addRow(rowData);
                }
            }
        }
        setState(OperationState.FINISHED);
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : Pattern(java.util.regex.Pattern) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) TableSchema(org.apache.hive.service.cli.TableSchema) HashMap(java.util.HashMap) ColumnDescriptor(org.apache.hive.service.cli.ColumnDescriptor) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) TableIterable(org.apache.hadoop.hive.ql.metadata.TableIterable) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) ArrayList(java.util.ArrayList) List(java.util.List) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HashSet(java.util.HashSet)

Example 2 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class TestObjectStore method dropAllStoreObjects.

public static void dropAllStoreObjects(RawStore store) throws MetaException, InvalidObjectException, InvalidInputException {
    try {
        Deadline.registerIfNot(100000);
        List<Function> funcs = store.getAllFunctions();
        for (Function func : funcs) {
            store.dropFunction(func.getDbName(), func.getFunctionName());
        }
        List<String> dbs = store.getAllDatabases();
        for (int i = 0; i < dbs.size(); i++) {
            String db = dbs.get(i);
            List<String> tbls = store.getAllTables(db);
            for (String tbl : tbls) {
                List<Index> indexes = store.getIndexes(db, tbl, 100);
                for (Index index : indexes) {
                    store.dropIndex(db, tbl, index.getIndexName());
                }
            }
            for (String tbl : tbls) {
                Deadline.startTimer("getPartition");
                List<Partition> parts = store.getPartitions(db, tbl, 100);
                for (Partition part : parts) {
                    store.dropPartition(db, tbl, part.getValues());
                }
                // Find any constraints and drop them
                Set<String> constraints = new HashSet<>();
                List<SQLPrimaryKey> pk = store.getPrimaryKeys(db, tbl);
                if (pk != null) {
                    for (SQLPrimaryKey pkcol : pk) {
                        constraints.add(pkcol.getPk_name());
                    }
                }
                List<SQLForeignKey> fks = store.getForeignKeys(null, null, db, tbl);
                if (fks != null) {
                    for (SQLForeignKey fkcol : fks) {
                        constraints.add(fkcol.getFk_name());
                    }
                }
                for (String constraint : constraints) {
                    store.dropConstraint(db, tbl, constraint);
                }
                store.dropTable(db, tbl);
            }
            store.dropDatabase(db);
        }
        List<String> roles = store.listRoleNames();
        for (String role : roles) {
            store.removeRole(role);
        }
    } catch (NoSuchObjectException e) {
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Index(org.apache.hadoop.hive.metastore.api.Index) Function(org.apache.hadoop.hive.metastore.api.Function) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) HashSet(java.util.HashSet)

Example 3 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class TestHBaseStore method createTableWithPrimaryKey.

@Test
public void createTableWithPrimaryKey() throws Exception {
    String tableName = "pktable";
    String pkName = "test_pk";
    String[] pkColNames = { "col0" };
    Table table = createMultiColumnTable(tableName, "int");
    List<SQLPrimaryKey> pk = Arrays.asList(new SQLPrimaryKey(DB, tableName, pkColNames[0], 0, pkName, true, false, true));
    store.createTableWithConstraints(table, pk, null);
    pk = store.getPrimaryKeys(DB, tableName);
    Assert.assertNotNull(pk);
    Assert.assertEquals(1, pk.size());
    Assert.assertEquals(DB, pk.get(0).getTable_db());
    Assert.assertEquals(tableName, pk.get(0).getTable_name());
    Assert.assertEquals(pkColNames[0], pk.get(0).getColumn_name());
    Assert.assertEquals(0, pk.get(0).getKey_seq());
    Assert.assertEquals(pkName, pk.get(0).getPk_name());
    Assert.assertTrue(pk.get(0).isEnable_cstr());
    Assert.assertFalse(pk.get(0).isValidate_cstr());
    Assert.assertTrue(pk.get(0).isRely_cstr());
    // Drop the primary key
    store.dropConstraint(DB, tableName, pkName);
    pk = store.getPrimaryKeys(DB, tableName);
    Assert.assertNull(pk);
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) Test(org.junit.Test)

Example 4 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class DDLTask method createTable.

/**
   * Create a new table.
   *
   * @param db
   *          The database in question.
   * @param crtTbl
   *          This is the table we're creating.
   * @return Returns 0 when execution succeeds and above 0 if it fails.
   * @throws HiveException
   *           Throws this exception if an unexpected error occurs.
   */
private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException {
    // create the table
    Table tbl = crtTbl.toTable(conf);
    List<SQLPrimaryKey> primaryKeys = crtTbl.getPrimaryKeys();
    List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys();
    LOG.info("creating table " + tbl.getDbName() + "." + tbl.getTableName() + " on " + tbl.getDataLocation());
    if (crtTbl.getReplicationSpec().isInReplicationScope() && (!crtTbl.getReplaceMode())) {
        // if this is a replication spec, then replace-mode semantics might apply.
        // if we're already asking for a table replacement, then we can skip this check.
        // however, otherwise, if in replication scope, and we've not been explicitly asked
        // to replace, we should check if the object we're looking at exists, and if so,
        // trigger replace-mode semantics.
        Table existingTable = db.getTable(tbl.getDbName(), tbl.getTableName(), false);
        if (existingTable != null) {
            if (!crtTbl.getReplicationSpec().allowEventReplacementInto(existingTable)) {
                // no replacement, the existing table state is newer than our update.
                return 0;
            } else {
                // we replace existing table.
                crtTbl.setReplaceMode(true);
            }
        }
    }
    // to UTC by default (only if the table property is not set)
    if (tbl.getSerializationLib().equals(ParquetHiveSerDe.class.getName())) {
        SessionState ss = SessionState.get();
        if (ss.getConf().getBoolVar(ConfVars.HIVE_PARQUET_INT96_DEFAULT_UTC_WRITE_ZONE)) {
            String parquetTimezone = tbl.getProperty(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY);
            if (parquetTimezone == null || parquetTimezone.isEmpty()) {
                tbl.setProperty(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, ParquetTableUtils.PARQUET_INT96_NO_ADJUSTMENT_ZONE);
            }
        }
    }
    // create the table
    if (crtTbl.getReplaceMode()) {
        // replace-mode creates are really alters using CreateTableDesc.
        try {
            db.alterTable(tbl.getDbName() + "." + tbl.getTableName(), tbl, null);
        } catch (InvalidOperationException e) {
            throw new HiveException("Unable to alter table. " + e.getMessage(), e);
        }
    } else {
        if ((foreignKeys != null && foreignKeys.size() > 0) || (primaryKeys != null && primaryKeys.size() > 0)) {
            db.createTable(tbl, crtTbl.getIfNotExists(), primaryKeys, foreignKeys);
        } else {
            db.createTable(tbl, crtTbl.getIfNotExists());
        }
        if (crtTbl.isCTAS()) {
            Table createdTable = db.getTable(tbl.getDbName(), tbl.getTableName());
            DataContainer dc = new DataContainer(createdTable.getTTable());
            SessionState.get().getLineageState().setLineage(createdTable.getPath(), dc, createdTable.getCols());
        }
    }
    addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
    return 0;
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) DataContainer(org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ParquetHiveSerDe(org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 5 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableAddConstraint.

private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException {
    ASTNode parent = (ASTNode) ast.getParent();
    ASTNode child = (ASTNode) ast.getChild(0);
    List<SQLPrimaryKey> primaryKeys = new ArrayList<SQLPrimaryKey>();
    List<SQLForeignKey> foreignKeys = new ArrayList<SQLForeignKey>();
    if (child.getToken().getType() == HiveParser.TOK_PRIMARY_KEY) {
        BaseSemanticAnalyzer.processPrimaryKeys(parent, child, primaryKeys);
    } else if (child.getToken().getType() == HiveParser.TOK_FOREIGN_KEY) {
        BaseSemanticAnalyzer.processForeignKeys(parent, child, foreignKeys);
    }
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) ArrayList(java.util.ArrayList)

Aggregations

SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)21 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)8 ArrayList (java.util.ArrayList)7 Table (org.apache.hadoop.hive.metastore.api.Table)6 IOException (java.io.IOException)4 HashMap (java.util.HashMap)3 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)3 Test (org.junit.Test)3 HashSet (java.util.HashSet)2 List (java.util.List)2 Query (javax.jdo.Query)2 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)2 Database (org.apache.hadoop.hive.metastore.api.Database)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 Function (org.apache.hadoop.hive.metastore.api.Function)2 Index (org.apache.hadoop.hive.metastore.api.Index)2 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)2 Partition (org.apache.hadoop.hive.metastore.api.Partition)2 PrimaryKeysRequest (org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest)2 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)2