Search in sources :

Example 1 with SerDeInfo

use of org.apache.hadoop.hive.metastore.api.SerDeInfo in project hive by apache.

the class TestAccumuloStorageHandler method testNonExternalExistentTable.

@Test(expected = MetaException.class)
public void testNonExternalExistentTable() throws Exception {
    MockInstance inst = new MockInstance(test.getMethodName());
    Connector conn = inst.getConnector("root", new PasswordToken(""));
    String tableName = "table";
    // Create the table
    conn.tableOperations().create(tableName);
    // Define the SerDe Parameters
    Map<String, String> params = new HashMap<String, String>();
    params.put(AccumuloSerDeParameters.COLUMN_MAPPINGS, "cf:cq");
    AccumuloConnectionParameters connectionParams = Mockito.mock(AccumuloConnectionParameters.class);
    AccumuloStorageHandler storageHandler = Mockito.mock(AccumuloStorageHandler.class);
    StorageDescriptor sd = Mockito.mock(StorageDescriptor.class);
    Table table = Mockito.mock(Table.class);
    SerDeInfo serDeInfo = Mockito.mock(SerDeInfo.class);
    // Call the real preCreateTable method
    Mockito.doCallRealMethod().when(storageHandler).preCreateTable(table);
    // Return our known table name
    Mockito.when(storageHandler.getTableName(table)).thenReturn(tableName);
    // Is not an EXTERNAL table
    Mockito.when(storageHandler.isExternalTable(table)).thenReturn(false);
    // Return the mocked StorageDescriptor
    Mockito.when(table.getSd()).thenReturn(sd);
    // No location expected with AccumuloStorageHandler
    Mockito.when(sd.getLocation()).thenReturn(null);
    // Return mocked SerDeInfo
    Mockito.when(sd.getSerdeInfo()).thenReturn(serDeInfo);
    // Custom parameters
    Mockito.when(serDeInfo.getParameters()).thenReturn(params);
    // Return the MockInstance's Connector
    Mockito.when(connectionParams.getConnector()).thenReturn(conn);
    storageHandler.connectionParams = connectionParams;
    storageHandler.preCreateTable(table);
}
Also used : Connector(org.apache.accumulo.core.client.Connector) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) Table(org.apache.hadoop.hive.metastore.api.Table) MockInstance(org.apache.accumulo.core.client.mock.MockInstance) HashMap(java.util.HashMap) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Test(org.junit.Test)

Example 2 with SerDeInfo

use of org.apache.hadoop.hive.metastore.api.SerDeInfo in project hive by apache.

the class TestHiveMetaStore method createStorageDescriptor.

private StorageDescriptor createStorageDescriptor(String tableName, List<FieldSchema> cols, Map<String, String> params, Map<String, String> serdParams) {
    StorageDescriptor sd = new StorageDescriptor();
    sd.setCols(cols);
    sd.setCompressed(false);
    sd.setNumBuckets(1);
    sd.setParameters(params);
    sd.setBucketCols(new ArrayList<String>(2));
    sd.getBucketCols().add("name");
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(tableName);
    sd.getSerdeInfo().setParameters(serdParams);
    sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
    sd.setSortCols(new ArrayList<Order>());
    sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName());
    sd.setInputFormat(HiveInputFormat.class.getName());
    sd.setOutputFormat(HiveOutputFormat.class.getName());
    return sd;
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) HiveInputFormat(org.apache.hadoop.hive.ql.io.HiveInputFormat) LazySimpleSerDe(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) HiveOutputFormat(org.apache.hadoop.hive.ql.io.HiveOutputFormat)

Example 3 with SerDeInfo

use of org.apache.hadoop.hive.metastore.api.SerDeInfo in project hive by apache.

the class TestDbNotificationListener method alterIndex.

@Test
public void alterIndex() throws Exception {
    String indexName = "alterIndex";
    String dbName = "default";
    String tableName = "alterIndexTable";
    String indexTableName = tableName + "__" + indexName + "__";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", ""));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    Map<String, String> params = new HashMap<String, String>();
    params.put("key", "value");
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 17, serde, Arrays.asList("bucketcol"), Arrays.asList(new Order("sortcol", 1)), params);
    Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    // Event 1
    msClient.createTable(table);
    Index oldIndex = new Index(indexName, null, "default", tableName, startTime, startTime, indexTableName, sd, emptyParameters, false);
    Table oldIndexTable = new Table(indexTableName, dbName, "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    // Event 2, 3
    // creates index and index table
    msClient.createIndex(oldIndex, oldIndexTable);
    Index newIndex = new Index(indexName, null, "default", tableName, startTime, startTime + 1, indexTableName, sd, emptyParameters, false);
    // Event 4
    msClient.alter_index(dbName, tableName, indexName, newIndex);
    // Get notifications from metastore
    NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(4, rsp.getEventsSize());
    NotificationEvent event = rsp.getEvents().get(3);
    assertEquals(firstEventId + 4, event.getEventId());
    assertTrue(event.getEventTime() >= startTime);
    assertEquals(EventType.ALTER_INDEX.toString(), event.getEventType());
    assertEquals(dbName, event.getDbName());
    // Parse the message field
    AlterIndexMessage alterIdxMsg = md.getAlterIndexMessage(event.getMessage());
    Index indexObj = alterIdxMsg.getIndexObjAfter();
    assertEquals(dbName, indexObj.getDbName());
    assertEquals(indexName, indexObj.getIndexName());
    assertEquals(tableName, indexObj.getOrigTableName());
    assertEquals(indexTableName, indexObj.getIndexTableName());
    assertTrue(indexObj.getCreateTime() < indexObj.getLastAccessTime());
    // When hive.metastore.transactional.event.listeners is set,
    // a failed event should not create a new notification
    DummyRawStoreFailEvent.setEventSucceed(false);
    try {
        msClient.alter_index(dbName, tableName, indexName, newIndex);
        fail("Error: alter index should've failed");
    } catch (Exception ex) {
    // expected
    }
    rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(4, rsp.getEventsSize());
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Index(org.apache.hadoop.hive.metastore.api.Index) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent) NotificationEventResponse(org.apache.hadoop.hive.metastore.api.NotificationEventResponse) AlterIndexMessage(org.apache.hadoop.hive.metastore.messaging.AlterIndexMessage) Test(org.junit.Test)

Example 4 with SerDeInfo

use of org.apache.hadoop.hive.metastore.api.SerDeInfo in project hive by apache.

the class TestDbNotificationListener method dropIndex.

@Test
public void dropIndex() throws Exception {
    String indexName = "dropIndex";
    String dbName = "default";
    String tableName = "dropIndexTable";
    String indexTableName = tableName + "__" + indexName + "__";
    int startTime = (int) (System.currentTimeMillis() / 1000);
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("col1", "int", ""));
    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
    Map<String, String> params = new HashMap<String, String>();
    params.put("key", "value");
    StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 17, serde, Arrays.asList("bucketcol"), Arrays.asList(new Order("sortcol", 1)), params);
    Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    // Event 1
    msClient.createTable(table);
    Index index = new Index(indexName, null, "default", tableName, startTime, startTime, indexTableName, sd, emptyParameters, false);
    Table indexTable = new Table(indexTableName, dbName, "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    // Event 2, 3 (index table and index)
    msClient.createIndex(index, indexTable);
    // Event 4 (drops index and indexTable)
    msClient.dropIndex(dbName, tableName, indexName, true);
    // Get notifications from metastore
    NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(4, rsp.getEventsSize());
    NotificationEvent event = rsp.getEvents().get(3);
    assertEquals(firstEventId + 4, event.getEventId());
    assertTrue(event.getEventTime() >= startTime);
    assertEquals(EventType.DROP_INDEX.toString(), event.getEventType());
    assertEquals(dbName, event.getDbName());
    // Parse the message field
    DropIndexMessage dropIdxMsg = md.getDropIndexMessage(event.getMessage());
    assertEquals(dbName, dropIdxMsg.getDB());
    assertEquals(indexName.toLowerCase(), dropIdxMsg.getIndexName());
    assertEquals(indexTableName.toLowerCase(), dropIdxMsg.getIndexTableName());
    assertEquals(tableName.toLowerCase(), dropIdxMsg.getOrigTableName());
    // When hive.metastore.transactional.event.listeners is set,
    // a failed event should not create a new notification
    index = new Index("dropIndexTable2", null, "default", tableName, startTime, startTime, "dropIndexTable__dropIndexTable2__", sd, emptyParameters, false);
    Table indexTable2 = new Table("dropIndexTable__dropIndexTable2__", dbName, "me", startTime, startTime, 0, sd, null, emptyParameters, null, null, null);
    msClient.createIndex(index, indexTable2);
    DummyRawStoreFailEvent.setEventSucceed(false);
    try {
        // drops index and indexTable
        msClient.dropIndex(dbName, tableName, "dropIndex2", true);
        fail("Error: drop index should've failed");
    } catch (Exception ex) {
    // expected
    }
    rsp = msClient.getNextNotification(firstEventId, 0, null);
    assertEquals(6, rsp.getEventsSize());
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) Table(org.apache.hadoop.hive.metastore.api.Table) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Index(org.apache.hadoop.hive.metastore.api.Index) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent) NotificationEventResponse(org.apache.hadoop.hive.metastore.api.NotificationEventResponse) DropIndexMessage(org.apache.hadoop.hive.metastore.messaging.DropIndexMessage) Test(org.junit.Test)

Example 5 with SerDeInfo

use of org.apache.hadoop.hive.metastore.api.SerDeInfo in project hive by apache.

the class HBaseUtils method deserializeStorageDescriptor.

static StorageDescriptor deserializeStorageDescriptor(byte[] serialized) throws InvalidProtocolBufferException {
    HbaseMetastoreProto.StorageDescriptor proto = HbaseMetastoreProto.StorageDescriptor.parseFrom(serialized);
    StorageDescriptor sd = new StorageDescriptor();
    sd.setCols(convertFieldSchemaListFromProto(proto.getColsList()));
    if (proto.hasInputFormat())
        sd.setInputFormat(proto.getInputFormat());
    if (proto.hasOutputFormat())
        sd.setOutputFormat(proto.getOutputFormat());
    sd.setCompressed(proto.getIsCompressed());
    sd.setNumBuckets(proto.getNumBuckets());
    if (proto.hasSerdeInfo()) {
        SerDeInfo serde = new SerDeInfo();
        serde.setName(proto.getSerdeInfo().hasName() ? proto.getSerdeInfo().getName() : null);
        serde.setSerializationLib(proto.getSerdeInfo().hasSerializationLib() ? proto.getSerdeInfo().getSerializationLib() : null);
        serde.setParameters(buildParameters(proto.getSerdeInfo().getParameters()));
        sd.setSerdeInfo(serde);
    }
    sd.setBucketCols(new ArrayList<>(proto.getBucketColsList()));
    List<Order> sortCols = new ArrayList<>();
    for (HbaseMetastoreProto.StorageDescriptor.Order protoOrder : proto.getSortColsList()) {
        sortCols.add(new Order(protoOrder.getColumnName(), protoOrder.getOrder()));
    }
    sd.setSortCols(sortCols);
    if (proto.hasSkewedInfo()) {
        SkewedInfo skewed = new SkewedInfo();
        skewed.setSkewedColNames(new ArrayList<>(proto.getSkewedInfo().getSkewedColNamesList()));
        for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList innerList : proto.getSkewedInfo().getSkewedColValuesList()) {
            skewed.addToSkewedColValues(new ArrayList<>(innerList.getSkewedColValueList()));
        }
        Map<List<String>, String> colMaps = new HashMap<>();
        for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap map : proto.getSkewedInfo().getSkewedColValueLocationMapsList()) {
            colMaps.put(new ArrayList<>(map.getKeyList()), map.getValue());
        }
        skewed.setSkewedColValueLocationMaps(colMaps);
        sd.setSkewedInfo(skewed);
    }
    if (proto.hasStoredAsSubDirectories()) {
        sd.setStoredAsSubDirectories(proto.getStoredAsSubDirectories());
    }
    return sd;
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) HashMap(java.util.HashMap) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) ArrayList(java.util.ArrayList) ByteString(com.google.protobuf.ByteString) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) List(java.util.List) ArrayList(java.util.ArrayList)

Aggregations

SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)185 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)162 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)126 Table (org.apache.hadoop.hive.metastore.api.Table)124 ArrayList (java.util.ArrayList)121 Test (org.junit.Test)110 Partition (org.apache.hadoop.hive.metastore.api.Partition)65 HashMap (java.util.HashMap)51 Order (org.apache.hadoop.hive.metastore.api.Order)32 List (java.util.List)30 ColumnStatistics (org.apache.hadoop.hive.metastore.api.ColumnStatistics)28 ColumnStatisticsData (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData)28 ColumnStatisticsDesc (org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc)28 ColumnStatisticsObj (org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj)28 AggrStats (org.apache.hadoop.hive.metastore.api.AggrStats)27 Database (org.apache.hadoop.hive.metastore.api.Database)22 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)22 SkewedInfo (org.apache.hadoop.hive.metastore.api.SkewedInfo)17 LongColumnStatsData (org.apache.hadoop.hive.metastore.api.LongColumnStatsData)13 Path (org.apache.hadoop.fs.Path)12